lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/lib.rs
ecies/rs
8acefe16f51de03b18fccb654d7b0d24f4548006
pub use libsecp256k1::{util::FULL_PUBLIC_KEY_SIZE, Error as SecpError, PublicKey, SecretKey}; pub mod consts; pub mod types; pub mod utils; #[cfg(feature = "openssl")] mod openssl_aes; #[cfg(feature = "pure")] mod pure_aes; use utils::{aes_decrypt, aes_encrypt, decapsulate, encapsulate, generate_keypair}; pub fn encrypt(receiver_pub: &[u8], msg: &[u8]) -> Result<Vec<u8>, SecpError> { let receiver_pk = PublicKey::parse_slice(receiver_pub, None)?; let (ephemeral_sk, ephemeral_pk) = generate_keypair(); let aes_key = encapsulate(&ephemeral_sk, &receiver_pk)?; let encrypted = aes_encrypt(&aes_key, msg).ok_or(SecpError::InvalidMessage)?; let mut cipher_text = Vec::with_capacity(FULL_PUBLIC_KEY_SIZE + encrypted.len()); cipher_text.extend(ephemeral_pk.serialize().iter()); cipher_text.extend(encrypted); Ok(cipher_text) } pub fn decrypt(receiver_sec: &[u8], msg: &[u8]) -> Result<Vec<u8>, SecpError> { let receiver_sk = SecretKey::parse_slice(receiver_sec)?; if msg.len() < FULL_PUBLIC_KEY_SIZE { return Err(SecpError::InvalidMessage); } let ephemeral_pk = PublicKey::parse_slice(&msg[..FULL_PUBLIC_KEY_SIZE], None)?; let encrypted = &msg[FULL_PUBLIC_KEY_SIZE..]; let aes_key = decapsulate(&ephemeral_pk, &receiver_sk)?; aes_decrypt(&aes_key, encrypted).ok_or(SecpError::InvalidMessage) } #[cfg(test)] mod tests { use super::*; use utils::generate_keypair; const MSG: &str = "helloworld"; const BIG_MSG_SIZE: usize = 2 * 1024 * 1024; const BIG_MSG: [u8; BIG_MSG_SIZE] = [1u8; BIG_MSG_SIZE]; pub(super) fn test_enc_dec(sk: &[u8], pk: &[u8]) { let msg = MSG.as_bytes(); assert_eq!(msg, decrypt(sk, &encrypt(pk, msg).unwrap()).unwrap().as_slice()); } pub(super) fn test_enc_dec_big(sk: &[u8], pk: &[u8]) { let msg = &BIG_MSG; assert_eq!(msg.to_vec(), decrypt(sk, &encrypt(pk, msg).unwrap()).unwrap()); } #[test] fn attempts_to_decrypt_with_another_key() { let (_, pk1) = generate_keypair(); let (sk2, _) = generate_keypair(); assert_eq!( decrypt( &sk2.serialize(), encrypt(&pk1.serialize_compressed(), b"text").unwrap().as_slice() ), Err(SecpError::InvalidMessage) ); } #[test] fn attempts_to_decrypt_incorrect_message() { let (sk, _) = generate_keypair(); assert_eq!(decrypt(&sk.serialize(), &[]), Err(SecpError::InvalidMessage)); assert_eq!(decrypt(&sk.serialize(), &[0u8; 65]), Err(SecpError::InvalidPublicKey)); } #[test] fn attempts_to_encrypt_with_invalid_key() { assert_eq!(encrypt(&[0u8; 33], b"text"), Err(SecpError::InvalidPublicKey)); } #[test] fn test_compressed_public() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize_compressed()); test_enc_dec(sk, pk); } #[test] fn test_uncompressed_public() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize()); test_enc_dec(sk, pk); } #[test] fn test_compressed_public_big_msg() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize_compressed()); test_enc_dec_big(sk, pk); } #[test] #[cfg(not(target_arch = "wasm32"))] fn test_against_python() { use futures_util::FutureExt; use hex::encode; use tokio::runtime::Runtime; use utils::tests::decode_hex; const PYTHON_BACKEND: &str = "https://eciespy.herokuapp.com/"; let (sk, pk) = generate_keypair(); let sk_hex = encode(&sk.serialize().to_vec()); let uncompressed_pk = &pk.serialize(); let pk_hex = encode(uncompressed_pk.to_vec()); let client = reqwest::Client::new(); let params = [("data", MSG), ("pub", pk_hex.as_str())]; let rt = Runtime::new().unwrap(); let res = rt .block_on( client .post(PYTHON_BACKEND) .form(&params) .send() .then(|r| r.unwrap().text()), ) .unwrap(); let server_encrypted = decode_hex(&res); let local_decrypted = decrypt(&sk.serialize(), server_encrypted.as_slice()).unwrap(); assert_eq!(local_decrypted, MSG.as_bytes()); let local_encrypted = encrypt(uncompressed_pk, MSG.as_bytes()).unwrap(); let params = [("data", encode(local_encrypted)), ("prv", sk_hex)]; let res = rt .block_on( client .post(PYTHON_BACKEND) .form(&params) .send() .then(|r| r.unwrap().text()), ) .unwrap(); assert_eq!(res, MSG); } } #[cfg(all(test, target_arch = "wasm32"))] mod wasm_tests { use super::generate_keypair; use super::tests::{test_enc_dec, test_enc_dec_big}; use wasm_bindgen_test::*; #[wasm_bindgen_test] fn test_wasm() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize()); test_enc_dec(sk, pk); test_enc_dec_big(sk, pk); } }
pub use libsecp256k1::{util::FULL_PUBLIC_KEY_SIZE, Error as SecpError, PublicKey, SecretKey}; pub mod consts; pub mod types; pub mod utils; #[cfg(feature = "openssl")] mod openssl_aes; #[cfg(feature = "pure")] mod pure_aes; use utils::{aes_decrypt, aes_encrypt, decapsulate, encapsulate, generate_keypair}; pub fn encrypt(receiver_pub: &[u8], msg: &[u8]) -> Result<Vec<u8>, SecpError> { let receiver_pk = PublicKey::parse_slice(receiver_pub, None)?; let (ephemeral_sk, ephemeral_pk) = generate_keypair(); let aes_key = encapsulate(&ephemeral_sk, &receiver_pk)?; let encrypted = aes_encrypt(&aes_key, msg).ok_or(SecpError::InvalidMessage)?; let mut cipher_text = Vec::with_capacity(FULL_PUBLIC_KEY_SIZE + encrypted.len()); cipher_text.extend(ephemeral_pk.serialize().iter()); cipher_text.extend(encrypted); Ok(cipher_text) } pub fn decrypt(receiver_sec: &[u8], msg: &[u8]) -> Result<Vec<u8>, SecpError> { let receiver_sk = SecretKey::parse_slice(receiver_sec)?; if msg.len() < FULL_PUBLIC_KEY_SIZE { return Err(SecpError::InvalidMessage); } let ephemeral_pk = PublicKey::parse_slice(&msg[..FULL_PUBLIC_KEY_SIZE], None)?; let encrypted = &msg[FULL_PUBLIC_KEY_SIZE..]; let aes_key = decapsulate(&ephemeral_pk,
alidMessage) ); } #[test] fn attempts_to_decrypt_incorrect_message() { let (sk, _) = generate_keypair(); assert_eq!(decrypt(&sk.serialize(), &[]), Err(SecpError::InvalidMessage)); assert_eq!(decrypt(&sk.serialize(), &[0u8; 65]), Err(SecpError::InvalidPublicKey)); } #[test] fn attempts_to_encrypt_with_invalid_key() { assert_eq!(encrypt(&[0u8; 33], b"text"), Err(SecpError::InvalidPublicKey)); } #[test] fn test_compressed_public() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize_compressed()); test_enc_dec(sk, pk); } #[test] fn test_uncompressed_public() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize()); test_enc_dec(sk, pk); } #[test] fn test_compressed_public_big_msg() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize_compressed()); test_enc_dec_big(sk, pk); } #[test] #[cfg(not(target_arch = "wasm32"))] fn test_against_python() { use futures_util::FutureExt; use hex::encode; use tokio::runtime::Runtime; use utils::tests::decode_hex; const PYTHON_BACKEND: &str = "https://eciespy.herokuapp.com/"; let (sk, pk) = generate_keypair(); let sk_hex = encode(&sk.serialize().to_vec()); let uncompressed_pk = &pk.serialize(); let pk_hex = encode(uncompressed_pk.to_vec()); let client = reqwest::Client::new(); let params = [("data", MSG), ("pub", pk_hex.as_str())]; let rt = Runtime::new().unwrap(); let res = rt .block_on( client .post(PYTHON_BACKEND) .form(&params) .send() .then(|r| r.unwrap().text()), ) .unwrap(); let server_encrypted = decode_hex(&res); let local_decrypted = decrypt(&sk.serialize(), server_encrypted.as_slice()).unwrap(); assert_eq!(local_decrypted, MSG.as_bytes()); let local_encrypted = encrypt(uncompressed_pk, MSG.as_bytes()).unwrap(); let params = [("data", encode(local_encrypted)), ("prv", sk_hex)]; let res = rt .block_on( client .post(PYTHON_BACKEND) .form(&params) .send() .then(|r| r.unwrap().text()), ) .unwrap(); assert_eq!(res, MSG); } } #[cfg(all(test, target_arch = "wasm32"))] mod wasm_tests { use super::generate_keypair; use super::tests::{test_enc_dec, test_enc_dec_big}; use wasm_bindgen_test::*; #[wasm_bindgen_test] fn test_wasm() { let (sk, pk) = generate_keypair(); let (sk, pk) = (&sk.serialize(), &pk.serialize()); test_enc_dec(sk, pk); test_enc_dec_big(sk, pk); } }
&receiver_sk)?; aes_decrypt(&aes_key, encrypted).ok_or(SecpError::InvalidMessage) } #[cfg(test)] mod tests { use super::*; use utils::generate_keypair; const MSG: &str = "helloworld"; const BIG_MSG_SIZE: usize = 2 * 1024 * 1024; const BIG_MSG: [u8; BIG_MSG_SIZE] = [1u8; BIG_MSG_SIZE]; pub(super) fn test_enc_dec(sk: &[u8], pk: &[u8]) { let msg = MSG.as_bytes(); assert_eq!(msg, decrypt(sk, &encrypt(pk, msg).unwrap()).unwrap().as_slice()); } pub(super) fn test_enc_dec_big(sk: &[u8], pk: &[u8]) { let msg = &BIG_MSG; assert_eq!(msg.to_vec(), decrypt(sk, &encrypt(pk, msg).unwrap()).unwrap()); } #[test] fn attempts_to_decrypt_with_another_key() { let (_, pk1) = generate_keypair(); let (sk2, _) = generate_keypair(); assert_eq!( decrypt( &sk2.serialize(), encrypt(&pk1.serialize_compressed(), b"text").unwrap().as_slice() ), Err(SecpError::Inv
random
[ { "content": "/// AES-256-GCM encryption wrapper\n\npub fn aes_encrypt(key: &[u8], msg: &[u8]) -> Option<Vec<u8>> {\n\n let key = GenericArray::from_slice(key);\n\n let aead = Aes256Gcm::new(key);\n\n\n\n let mut iv = [0u8; AES_IV_LENGTH];\n\n thread_rng().fill(&mut iv);\n\n\n\n let nonce = GenericArray::from_slice(&iv);\n\n\n\n let mut out = Vec::with_capacity(msg.len());\n\n out.extend(msg);\n\n\n\n if let Ok(tag) = aead.encrypt_in_place_detached(nonce, &EMPTY_BYTES, &mut out) {\n\n let mut output = Vec::with_capacity(AES_IV_PLUS_TAG_LENGTH + msg.len());\n\n output.extend(&iv);\n\n output.extend(tag);\n\n output.extend(out);\n\n Some(output)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/pure_aes.rs", "rank": 1, "score": 130520.64861814951 }, { "content": "/// AES-256-GCM encryption wrapper\n\npub fn aes_encrypt(key: &[u8], msg: &[u8]) -> Option<Vec<u8>> {\n\n let cipher = Cipher::aes_256_gcm();\n\n\n\n let mut iv = [0u8; AES_IV_LENGTH];\n\n thread_rng().fill(&mut iv);\n\n\n\n let mut tag = [0u8; AES_TAG_LENGTH];\n\n\n\n if let Ok(encrypted) = encrypt_aead(cipher, key, Some(&iv), &EMPTY_BYTES, msg, &mut tag) {\n\n let mut output = Vec::with_capacity(AES_IV_PLUS_TAG_LENGTH + encrypted.len());\n\n output.extend(&iv);\n\n output.extend(&tag);\n\n output.extend(encrypted);\n\n\n\n Some(output)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/openssl_aes.rs", "rank": 2, "score": 130520.64861814951 }, { "content": "/// AES-256-GCM decryption wrapper\n\npub fn aes_decrypt(key: &[u8], encrypted_msg: &[u8]) -> Option<Vec<u8>> {\n\n if encrypted_msg.len() < AES_IV_PLUS_TAG_LENGTH {\n\n return None;\n\n }\n\n\n\n let cipher = Cipher::aes_256_gcm();\n\n\n\n let iv = &encrypted_msg[..AES_IV_LENGTH];\n\n let tag = &encrypted_msg[AES_IV_LENGTH..AES_IV_PLUS_TAG_LENGTH];\n\n let encrypted = &encrypted_msg[AES_IV_PLUS_TAG_LENGTH..];\n\n\n\n decrypt_aead(cipher, key, Some(iv), &EMPTY_BYTES, encrypted, tag).ok()\n\n}\n", "file_path": "src/openssl_aes.rs", "rank": 4, "score": 107798.6500729607 }, { "content": "/// AES-256-GCM decryption wrapper\n\npub fn aes_decrypt(key: &[u8], encrypted_msg: &[u8]) -> Option<Vec<u8>> {\n\n if encrypted_msg.len() < AES_IV_PLUS_TAG_LENGTH {\n\n return None;\n\n }\n\n\n\n let key = GenericArray::from_slice(key);\n\n let aead = Aes256Gcm::new(key);\n\n\n\n let iv = GenericArray::from_slice(&encrypted_msg[..AES_IV_LENGTH]);\n\n let tag = GenericArray::from_slice(&encrypted_msg[AES_IV_LENGTH..AES_IV_PLUS_TAG_LENGTH]);\n\n\n\n let mut out = Vec::with_capacity(encrypted_msg.len() - AES_IV_PLUS_TAG_LENGTH);\n\n out.extend(&encrypted_msg[AES_IV_PLUS_TAG_LENGTH..]);\n\n\n\n if let Ok(_) = aead.decrypt_in_place_detached(iv, &EMPTY_BYTES, &mut out, tag) {\n\n Some(out)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/pure_aes.rs", "rank": 5, "score": 107798.65007296074 }, { "content": "/// Calculate a shared AES key of our secret key and peer's public key by hkdf\n\npub fn encapsulate(sk: &SecretKey, peer_pk: &PublicKey) -> Result<AesKey, SecpError> {\n\n let mut shared_point = *peer_pk;\n\n shared_point.tweak_mul_assign(sk)?;\n\n\n\n let mut master = Vec::with_capacity(FULL_PUBLIC_KEY_SIZE * 2);\n\n master.extend(PublicKey::from_secret_key(sk).serialize().iter());\n\n master.extend(shared_point.serialize().iter());\n\n\n\n hkdf_sha256(master.as_slice())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 6, "score": 97933.21394773325 }, { "content": "/// Calculate a shared AES key of our public key and peer's secret key by hkdf\n\npub fn decapsulate(pk: &PublicKey, peer_sk: &SecretKey) -> Result<AesKey, SecpError> {\n\n let mut shared_point = *pk;\n\n shared_point.tweak_mul_assign(peer_sk)?;\n\n\n\n let mut master = Vec::with_capacity(FULL_PUBLIC_KEY_SIZE * 2);\n\n master.extend(pk.serialize().iter());\n\n master.extend(shared_point.serialize().iter());\n\n\n\n hkdf_sha256(master.as_slice())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 7, "score": 97933.21394773325 }, { "content": "/// Generate a `(SecretKey, PublicKey)` pair\n\npub fn generate_keypair() -> (SecretKey, PublicKey) {\n\n let sk = SecretKey::random(&mut thread_rng());\n\n (sk, PublicKey::from_secret_key(&sk))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 8, "score": 97921.07660042393 }, { "content": "// private below\n\nfn hkdf_sha256(master: &[u8]) -> Result<AesKey, SecpError> {\n\n let h = Hkdf::<Sha256>::new(None, master);\n\n let mut out = [0u8; 32];\n\n h.expand(&EMPTY_BYTES, &mut out)\n\n .map_err(|_| SecpError::InvalidInputLength)?;\n\n Ok(out)\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod tests {\n\n use hex::decode;\n\n\n\n use libsecp256k1::Error;\n\n use rand::{thread_rng, Rng};\n\n\n\n use super::*;\n\n use crate::consts::{AES_IV_LENGTH, EMPTY_BYTES};\n\n\n\n /// Remove 0x prefix of a hex string\n\n pub fn remove0x(hex: &str) -> &str {\n", "file_path": "src/utils.rs", "rank": 9, "score": 79415.378003117 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let (sk, pk) = generate_keypair();\n\n let (sk, pk) = (&sk.serialize(), &pk.serialize());\n\n\n\n let big = &BIG_MSG;\n\n let big_encrypted = &encrypt(pk, big).unwrap();\n\n\n\n let bigger = &BIGGER_MSG;\n\n let bigger_encrypted = &encrypt(pk, bigger).unwrap();\n\n\n\n c.bench_function(\"encrypt 100M\", |b| b.iter(|| encrypt(pk, big).unwrap()));\n\n c.bench_function(\"encrypt 200M\", |b| b.iter(|| encrypt(pk, bigger).unwrap()));\n\n c.bench_function(\"decrypt 100M\", |b| b.iter(|| decrypt(sk, big_encrypted).unwrap()));\n\n c.bench_function(\"decrypt 200M\", |b| b.iter(|| decrypt(sk, bigger_encrypted).unwrap()));\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default().sample_size(10).measurement_time(Duration::new(30, 0));\n\n targets = criterion_benchmark\n\n}\n\ncriterion_main!(benches);\n", "file_path": "bench/simple.rs", "rank": 10, "score": 53973.71848898832 }, { "content": "/// Type alias for `[u8; 32]`, which is a 256-bit key\n\npub type AesKey = [u8; 32];\n", "file_path": "src/types.rs", "rank": 11, "score": 21762.95335823589 }, { "content": "/// AES IV/nonce length\n\npub const AES_IV_LENGTH: usize = 16;\n\n/// AES tag length\n\npub const AES_TAG_LENGTH: usize = 16;\n\n/// AES IV + tag length\n\npub const AES_IV_PLUS_TAG_LENGTH: usize = AES_IV_LENGTH + AES_TAG_LENGTH;\n\n/// Empty bytes array\n\npub const EMPTY_BYTES: [u8; 0] = [];\n", "file_path": "src/consts.rs", "rank": 12, "score": 21759.71063693379 }, { "content": "use openssl::symm::{decrypt_aead, encrypt_aead, Cipher};\n\nuse rand::{thread_rng, Rng};\n\n\n\nuse crate::consts::{AES_IV_LENGTH, AES_IV_PLUS_TAG_LENGTH, AES_TAG_LENGTH, EMPTY_BYTES};\n\n\n\n/// AES-256-GCM encryption wrapper\n", "file_path": "src/openssl_aes.rs", "rank": 13, "score": 20258.63504403411 }, { "content": "use aes_gcm::aead::{generic_array::GenericArray, AeadInPlace, NewAead};\n\nuse aes_gcm::{aes::Aes256, AesGcm};\n\nuse rand::{thread_rng, Rng};\n\nuse typenum::consts::U16;\n\n\n\nuse crate::consts::{AES_IV_LENGTH, AES_IV_PLUS_TAG_LENGTH, EMPTY_BYTES};\n\n\n\n/// AES-256-GCM with 16 bytes Nonce/IV\n\npub type Aes256Gcm = AesGcm<Aes256, U16>;\n\n\n\n/// AES-256-GCM encryption wrapper\n", "file_path": "src/pure_aes.rs", "rank": 14, "score": 20257.505106496712 }, { "content": "use hkdf::Hkdf;\n\nuse libsecp256k1::{util::FULL_PUBLIC_KEY_SIZE, Error as SecpError, PublicKey, SecretKey};\n\nuse rand::thread_rng;\n\nuse sha2::Sha256;\n\n\n\nuse crate::consts::EMPTY_BYTES;\n\nuse crate::types::AesKey;\n\n\n\n#[cfg(feature = \"pure\")]\n\npub use crate::pure_aes::{aes_decrypt, aes_encrypt};\n\n\n\n#[cfg(feature = \"openssl\")]\n\npub use crate::openssl_aes::{aes_decrypt, aes_encrypt};\n\n\n\n/// Generate a `(SecretKey, PublicKey)` pair\n", "file_path": "src/utils.rs", "rank": 15, "score": 19970.883565861914 }, { "content": " );\n\n\n\n let mut two = [0u8; 32];\n\n let mut three = [0u8; 32];\n\n two[31] = 2u8;\n\n three[31] = 3u8;\n\n\n\n let sk2 = SecretKey::parse_slice(&two).unwrap();\n\n let pk2 = PublicKey::from_secret_key(&sk2);\n\n let sk3 = SecretKey::parse_slice(&three).unwrap();\n\n let pk3 = PublicKey::from_secret_key(&sk3);\n\n\n\n assert_eq!(encapsulate(&sk2, &pk3), decapsulate(&pk2, &sk3));\n\n assert_eq!(\n\n encapsulate(&sk2, &pk3).map(|v| v.to_vec()).unwrap(),\n\n decode_hex(\"6f982d63e8590c9d9b5b4c1959ff80315d772edd8f60287c9361d548d5200f82\")\n\n );\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 16, "score": 19966.221203155015 }, { "content": "\n\n let group_order = decode_hex(\"fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141\");\n\n assert_eq!(\n\n SecretKey::parse_slice(&group_order).err().unwrap(),\n\n Error::InvalidSecretKey\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_hkdf() {\n\n let text = b\"secret\";\n\n\n\n let h = Hkdf::<Sha256>::new(None, text);\n\n let mut out = [0u8; 32];\n\n let r = h.expand(&EMPTY_BYTES, &mut out);\n\n\n\n assert!(r.is_ok());\n\n assert_eq!(\n\n out.to_vec(),\n\n decode_hex(\"2f34e5ff91ec85d53ca9b543683174d0cf550b60d5f52b24c97b386cfcf6cbbf\")\n", "file_path": "src/utils.rs", "rank": 17, "score": 19965.15638826556 }, { "content": " fn test_generate_keypair() {\n\n let (sk1, pk1) = generate_keypair();\n\n let (sk2, pk2) = generate_keypair();\n\n assert_ne!(sk1, sk2);\n\n assert_ne!(pk1, pk2);\n\n }\n\n\n\n #[test]\n\n fn test_attempt_to_decrypt_invalid_message() {\n\n assert!(aes_decrypt(&[], &[]).is_none());\n\n\n\n assert!(aes_decrypt(&[], &[0; AES_IV_LENGTH]).is_none());\n\n }\n\n\n\n #[test]\n\n fn test_aes_random_key() {\n\n let text = b\"this is a text\";\n\n let mut key = [0u8; 32];\n\n thread_rng().fill(&mut key);\n\n\n", "file_path": "src/utils.rs", "rank": 18, "score": 19964.067811373014 }, { "content": " let iv = decode_hex(\"f3e1ba810d2c8900b11312b7c725565f\");\n\n let tag = decode_hex(\"ec3b71e17c11dbe31484da9450edcf6c\");\n\n let encrypted = decode_hex(\"02d2ffed93b856f148b9\");\n\n\n\n let mut cipher_text = Vec::new();\n\n cipher_text.extend(iv);\n\n cipher_text.extend(tag);\n\n cipher_text.extend(encrypted);\n\n\n\n assert_eq!(text, aes_decrypt(&key, &cipher_text).unwrap().as_slice());\n\n }\n\n\n\n #[test]\n\n fn test_valid_secret() {\n\n // 0 < private key < group order int is valid\n\n let zero = [0u8; 32];\n\n assert_eq!(SecretKey::parse_slice(&zero).err().unwrap(), Error::InvalidSecretKey);\n\n\n\n let group_order_minus_1 = decode_hex(\"fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140\");\n\n SecretKey::parse_slice(&group_order_minus_1).unwrap();\n", "file_path": "src/utils.rs", "rank": 19, "score": 19963.231661064394 }, { "content": " if hex.starts_with(\"0x\") || hex.starts_with(\"0X\") {\n\n return &hex[2..];\n\n }\n\n hex\n\n }\n\n\n\n /// Convert hex string to u8 vector\n\n pub fn decode_hex(hex: &str) -> Vec<u8> {\n\n decode(remove0x(hex)).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_remove_0x_decode_hex() {\n\n assert_eq!(remove0x(\"0x0011\"), \"0011\");\n\n assert_eq!(remove0x(\"0X0011\"), \"0011\");\n\n assert_eq!(remove0x(\"0011\"), \"0011\");\n\n assert_eq!(decode_hex(\"0x0011\"), [0u8, 17u8]);\n\n }\n\n\n\n #[test]\n", "file_path": "src/utils.rs", "rank": 20, "score": 19962.663368367866 }, { "content": " assert_eq!(\n\n text,\n\n aes_decrypt(&key, aes_encrypt(&key, text).unwrap().as_slice())\n\n .unwrap()\n\n .as_slice()\n\n );\n\n\n\n let utf8_text = \"😀😀😀😀\".as_bytes();\n\n assert_eq!(\n\n utf8_text,\n\n aes_decrypt(&key, aes_encrypt(&key, utf8_text).unwrap().as_slice())\n\n .unwrap()\n\n .as_slice()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_aes_known_key() {\n\n let text = b\"helloworld\";\n\n let key = decode_hex(\"0000000000000000000000000000000000000000000000000000000000000000\");\n", "file_path": "src/utils.rs", "rank": 21, "score": 19960.696216171593 }, { "content": "use core::time::Duration;\n\n\n\nuse criterion::{criterion_group, criterion_main, Criterion};\n\nuse ecies::{decrypt, encrypt, utils::generate_keypair};\n\n\n\nconst BIG_MSG_SIZE: usize = 100 * 1024 * 1024;\n\nconst BIGGER_MSG_SIZE: usize = 200 * 1024 * 1024;\n\n\n\nconst BIG_MSG: [u8; BIG_MSG_SIZE] = [1u8; BIG_MSG_SIZE];\n\nconst BIGGER_MSG: [u8; BIGGER_MSG_SIZE] = [2u8; BIGGER_MSG_SIZE];\n\n\n", "file_path": "bench/simple.rs", "rank": 24, "score": 9.523087134556377 }, { "content": "# eciesrs\n\n\n\n[![Codacy Badge](https://api.codacy.com/project/badge/Grade/1c6d6ed949dd4836ab97421039e8be75)](https://www.codacy.com/app/ecies/rs)\n\n[![License](https://img.shields.io/github/license/ecies/rs.svg)](https://github.com/ecies/rs)\n\n[![CI](https://img.shields.io/github/workflow/status/ecies/rs/Build)](https://github.com/ecies/rs/actions)\n\n[![Crates](https://img.shields.io/crates/v/ecies)](https://crates.io/crates/ecies)\n\n[![Doc](https://docs.rs/ecies/badge.svg)](https://docs.rs/ecies/latest/ecies/)\n\n\n\nElliptic Curve Integrated Encryption Scheme for secp256k1 in Rust, based on [pure Rust implementation](https://github.com/paritytech/libsecp256k1) of secp256k1.\n\n\n\nECIES functionalities are built upon AES-GCM-256 and HKDF-SHA256.\n\n\n\nThis is the Rust version of [eciespy](https://github.com/ecies/py).\n\n\n\nThis library can be compiled to the WASM target at your option, see [WASM compatibility](#wasm-compatibility).\n\n\n\n## Quick Start\n\n\n\n```rust\n\nuse ecies::{decrypt, encrypt, utils::generate_keypair};\n\n\n\nconst MSG: &str = \"helloworld\";\n\nlet (sk, pk) = generate_keypair();\n\nlet (sk, pk) = (&sk.serialize(), &pk.serialize());\n\n\n\nlet msg = MSG.as_bytes();\n\nassert_eq!(\n\n msg,\n\n decrypt(sk, &encrypt(pk, msg).unwrap()).unwrap().as_slice()\n\n);\n\n```\n\n\n", "file_path": "README.md", "rank": 26, "score": 6.375990256562114 }, { "content": "### Pure Rust backend\n\n\n\n```bash\n\n$ export RUSTFLAGS=\"-Ctarget-cpu=sandybridge -Ctarget-feature=+aes,+sse2,+sse4.1,+ssse3\"\n\n$ cargo bench --no-default-features --features pure\n\nencrypt 100M time: [196.85 ms 201.97 ms 205.67 ms]\n\n change: [-9.8235% -7.9098% -5.9849%] (p = 0.00 < 0.05)\n\n Performance has improved.\n\nFound 1 outliers among 10 measurements (10.00%)\n\n 1 (10.00%) low severe\n\n\n\nencrypt 200M time: [554.62 ms 585.01 ms 599.71 ms]\n\n change: [-15.036% -11.698% -8.6460%] (p = 0.00 < 0.05)\n\n Performance has improved.\n\n\n\ndecrypt 100M time: [131.26 ms 134.39 ms 140.54 ms]\n\n change: [-3.9509% +2.9485% +10.198%] (p = 0.42 > 0.05)\n\n No change in performance detected.\n\n\n\ndecrypt 200M time: [288.13 ms 296.64 ms 311.78 ms]\n\n change: [-16.887% -13.038% -8.6679%] (p = 0.00 < 0.05)\n\n Performance has improved.\n\nFound 1 outliers among 10 measurements (10.00%)\n\n 1 (10.00%) high mild\n\n```\n\n\n\n## Release Notes\n\n\n\n### 0.2.2\n\n\n\n- Bump dependencies\n\n- Migrate to edition 2021\n\n\n\n### 0.2.1\n\n\n\n- Revamp error handling\n\n\n\n### 0.2.0\n\n\n\n- Revamp documentation\n\n- Optional pure Rust AES backend\n\n- WASM compatibility\n\n\n\n### 0.1.1 ~ 0.1.5\n\n\n\n- Bump dependencies\n\n- Update documentation\n\n- Fix error handling\n\n\n\n### 0.1.0\n\n\n\n- First beta version release\n", "file_path": "README.md", "rank": 33, "score": 4.040893331366903 }, { "content": "## Optional pure Rust AES backend\n\n\n\nYou can choose to use OpenSSL implementation or [pure Rust implementation](https://github.com/RustCrypto/AEADs) of AES-256-GCM:\n\n\n\n```toml\n\necies = {version = \"0.2\", default-features = false, features = [\"pure\"]}\n\n```\n\n\n\nDue to some [performance problem](https://github.com/RustCrypto/AEADs/issues/243), OpenSSL is the default backend.\n\n\n\nPure Rust implementation is sometimes useful, such as building on WASM:\n\n\n\n```bash\n\ncargo build --no-default-features --features pure --target=wasm32-unknown-unknown\n\n```\n\n\n\nIf you select the pure Rust backend on modern CPUs, consider building with\n\n\n\n```bash\n\nRUSTFLAGS=\"-Ctarget-cpu=sandybridge -Ctarget-feature=+aes,+sse2,+sse4.1,+ssse3\"\n\n```\n\n\n\nto speed up AES encryption/decryption. This would be no longer necessary when [`aes-gcm` supports automatic CPU detection](https://github.com/RustCrypto/AEADs/issues/243#issuecomment-738821935).\n\n\n\n## WASM compatibility\n\n\n\nIt's also possible to build to the `wasm32-unknown-unknown` target with the pure Rust backend. Check out [this repo](https://github.com/ecies/rs-wasm) for more details.\n\n\n\n## Security\n\n\n\n### Why AES-GCM-256 and HKDF-SHA256\n\n\n\nAEAD scheme like AES-GCM-256 should be your first option for symmetric ciphers, with unique IVs in each encryption.\n\n\n\nFor key derivation functions on shared points between two asymmetric keys, HKDFs are [proven](https://github.com/ecies/py/issues/82) to be more secure than simple hash functions like SHA256.\n\n\n\n### Cross-language compatibility\n\n\n\nAll functionalities are mutually checked among [different languages](https://github.com/ecies): Python, Rust, JavaScript and Golang.\n\n\n\n### Security audit\n\n\n\nFollowing dependencies are audited:\n\n\n\n- [aes-gcm](https://research.nccgroup.com/2020/02/26/public-report-rustcrypto-aes-gcm-and-chacha20poly1305-implementation-review/)\n\n- [OpenSSL](https://ostif.org/the-ostif-and-quarkslab-audit-of-openssl-is-complete/)\n\n\n", "file_path": "README.md", "rank": 35, "score": 3.642626854545785 }, { "content": "## Benchmark\n\n\n\nThe result shows that the pure Rust backend is around 20% ~ 50% slower compared to OpenSSL on MacBook Pro mid-2015 (2.8 GHz Quad-Core Intel Core i7).\n\n\n\n### OpenSSL backend\n\n\n\n```bash\n\n$ cargo bench --no-default-features --features openssl\n\nencrypt 100M time: [110.25 ms 115.77 ms 120.22 ms]\n\n change: [-10.123% -3.0504% +4.2342%] (p = 0.44 > 0.05)\n\n No change in performance detected.\n\n\n\nencrypt 200M time: [435.22 ms 450.50 ms 472.17 ms]\n\n change: [-7.5254% +3.6572% +14.508%] (p = 0.56 > 0.05)\n\n No change in performance detected.\n\nFound 1 outliers among 10 measurements (10.00%)\n\n 1 (10.00%) high mild\n\n\n\ndecrypt 100M time: [60.439 ms 66.276 ms 70.959 ms]\n\n change: [+0.1986% +7.7620% +15.995%] (p = 0.08 > 0.05)\n\n No change in performance detected.\n\n\n\ndecrypt 200M time: [182.10 ms 185.85 ms 190.63 ms]\n\n change: [-4.8452% +5.2114% +16.370%] (p = 0.40 > 0.05)\n\n No change in performance detected.\n\nFound 1 outliers among 10 measurements (10.00%)\n\n 1 (10.00%) high severe\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 36, "score": 3.0032235678199926 } ]
Rust
hphp/hack/src/rupro/hackrs/shallow_decl_provider/store.rs
leikahing/hhvm
26617c88ca35c5e078c3aef12c061d7996925375
use anyhow::Result; use datastore::Store; use pos::{ConstName, FunName, MethodName, ModuleName, PropName, TypeName}; use std::sync::Arc; use ty::decl::{ shallow::Decl, shallow::ModuleDecl, ConstDecl, FunDecl, ShallowClass, Ty, TypedefDecl, }; use ty::reason::Reason; #[derive(Debug)] pub struct ShallowDeclStore<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, static_properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, static_methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, constructors: Arc<dyn Store<TypeName, Ty<R>>>, } impl<R: Reason> ShallowDeclStore<R> { pub fn new( classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, static_properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, static_methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, constructors: Arc<dyn Store<TypeName, Ty<R>>>, ) -> Self { Self { classes, typedefs, funs, consts, modules, properties, static_properties, methods, static_methods, constructors, } } pub fn with_no_member_stores( classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, ) -> Self { Self { properties: Arc::new(PropFinder { classes: Arc::clone(&classes), }), static_properties: Arc::new(StaticPropFinder { classes: Arc::clone(&classes), }), methods: Arc::new(MethodFinder { classes: Arc::clone(&classes), }), static_methods: Arc::new(StaticMethodFinder { classes: Arc::clone(&classes), }), constructors: Arc::new(ConstructorFinder { classes: Arc::clone(&classes), }), classes, typedefs, funs, consts, modules, } } pub fn add_decls(&self, decls: impl IntoIterator<Item = Decl<R>>) -> Result<()> { for decl in decls.into_iter() { match decl { Decl::Class(name, decl) => self.add_class(name, Arc::new(decl))?, Decl::Fun(name, decl) => self.funs.insert(name, Arc::new(decl))?, Decl::Typedef(name, decl) => self.typedefs.insert(name, Arc::new(decl))?, Decl::Const(name, decl) => self.consts.insert(name, Arc::new(decl))?, Decl::Module(name, decl) => self.modules.insert(name, Arc::new(decl))?, } } Ok(()) } pub fn get_fun(&self, name: FunName) -> Result<Option<Arc<FunDecl<R>>>> { self.funs.get(name) } pub fn get_const(&self, name: ConstName) -> Result<Option<Arc<ConstDecl<R>>>> { self.consts.get(name) } pub fn get_class(&self, name: TypeName) -> Result<Option<Arc<ShallowClass<R>>>> { self.classes.get(name) } pub fn get_typedef(&self, name: TypeName) -> Result<Option<Arc<TypedefDecl<R>>>> { self.typedefs.get(name) } pub fn get_property_type( &self, class_name: TypeName, property_name: PropName, ) -> Result<Option<Ty<R>>> { self.properties.get((class_name, property_name)) } pub fn get_static_property_type( &self, class_name: TypeName, property_name: PropName, ) -> Result<Option<Ty<R>>> { self.static_properties.get((class_name, property_name)) } pub fn get_method_type( &self, class_name: TypeName, method_name: MethodName, ) -> Result<Option<Ty<R>>> { self.methods.get((class_name, method_name)) } pub fn get_static_method_type( &self, class_name: TypeName, method_name: MethodName, ) -> Result<Option<Ty<R>>> { self.static_methods.get((class_name, method_name)) } pub fn get_constructor_type(&self, class_name: TypeName) -> Result<Option<Ty<R>>> { self.constructors.get(class_name) } fn add_class(&self, name: TypeName, cls: Arc<ShallowClass<R>>) -> Result<()> { let cid = cls.name.id(); for prop in cls.props.iter().rev() { if let Some(ty) = &prop.ty { self.properties.insert((cid, prop.name.id()), ty.clone())? } } for prop in cls.static_props.iter().rev() { if let Some(ty) = &prop.ty { self.static_properties .insert((cid, prop.name.id()), ty.clone())? } } for meth in cls.methods.iter().rev() { self.methods .insert((cid, meth.name.id()), meth.ty.clone())? } for meth in cls.static_methods.iter().rev() { self.static_methods .insert((cid, meth.name.id()), meth.ty.clone())? } if let Some(constructor) = &cls.constructor { self.constructors.insert(cid, constructor.ty.clone())? } self.classes.insert(name, cls)?; Ok(()) } } #[derive(Debug)] struct PropFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, PropName), Ty<R>> for PropFinder<R> { fn get(&self, (class_name, property_name): (TypeName, PropName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.props.iter().rev().find_map(|prop| { if prop.name.id() == property_name { prop.ty.clone() } else { None } }) })) } fn insert(&self, _: (TypeName, PropName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, PropName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct StaticPropFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, PropName), Ty<R>> for StaticPropFinder<R> { fn get(&self, (class_name, property_name): (TypeName, PropName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.static_props.iter().rev().find_map(|prop| { if prop.name.id() == property_name { prop.ty.clone() } else { None } }) })) } fn insert(&self, _: (TypeName, PropName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, PropName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct MethodFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, MethodName), Ty<R>> for MethodFinder<R> { fn get(&self, (class_name, method_name): (TypeName, MethodName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.methods.iter().rev().find_map(|meth| { if meth.name.id() == method_name { Some(meth.ty.clone()) } else { None } }) })) } fn insert(&self, _: (TypeName, MethodName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, MethodName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct StaticMethodFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, MethodName), Ty<R>> for StaticMethodFinder<R> { fn get(&self, (class_name, method_name): (TypeName, MethodName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.static_methods.iter().rev().find_map(|meth| { if meth.name.id() == method_name { Some(meth.ty.clone()) } else { None } }) })) } fn insert(&self, _: (TypeName, MethodName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, MethodName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct ConstructorFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<TypeName, Ty<R>> for ConstructorFinder<R> { fn get(&self, class_name: TypeName) -> Result<Option<Ty<R>>> { Ok(self .classes .get(class_name)? .and_then(|cls| cls.constructor.as_ref().map(|meth| meth.ty.clone()))) } fn insert(&self, _: TypeName, _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = TypeName>) -> Result<()> { Ok(()) } }
use anyhow::Result; use datastore::Store; use pos::{ConstName, FunName, MethodName, ModuleName, PropName, TypeName}; use std::sync::Arc; use ty::decl::{ shallow::Decl, shallow::ModuleDecl, ConstDecl, FunDecl, ShallowClass, Ty, TypedefDecl, }; use ty::reason::Reason; #[derive(Debug)] pub struct ShallowDeclStore<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, static_properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, static_methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, constructors: Arc<dyn Store<TypeName, Ty<R>>>, } impl<R: Reason> ShallowDeclStore<R> { pub fn new( classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, static_properties: Arc<dyn Store<(TypeName, PropName), Ty<R>>>, methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, static_methods: Arc<dyn Store<(TypeName, MethodName), Ty<R>>>, constructors: Arc<dyn Store<TypeName, Ty<R>>>, ) -> Self { Self { classes, typedefs, funs, consts, modules, properties, static_properties, methods, static_methods, constructors, } } pub fn with_no_member_stores( classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, typedefs: Arc<dyn Store<TypeName, Arc<TypedefDecl<R>>>>, funs: Arc<dyn Store<FunName, Arc<FunDecl<R>>>>, consts: Arc<dyn Store<ConstName, Arc<ConstDecl<R>>>>, modules: Arc<dyn Store<ModuleName, Arc<ModuleDecl<R>>>>, ) -> Self { Self { properties: Arc::new(PropFinder { classes: Arc::clone(&classes), }), static_properties: Arc::new(StaticPropFinder { classes: Arc::clone(&classes), }), methods: Arc::new(MethodFinder { classes: Arc::clone(&classes), }), static_methods: Arc::new(StaticMethodFinder { classes: Arc::clone(&classes), }), constructors: Arc::new(ConstructorFinder { classes: Arc::clone(&classes), }), classes, typedefs, funs, consts, modules, } } pub fn add_decls(&self, decls: impl IntoIterator<Item = Decl<R>>) -> Result<()> { for decl in decls.into_iter() { match decl { Decl::Class(name, decl) => self.add_class(name, Arc::new(decl))?, Decl::Fun(name, decl) => self.funs.insert(name, Arc::new(decl))?, Decl::Typedef(name, decl) => self.typedefs.insert(name, Arc::new(decl))?, Decl::Const(name, decl) => self.consts.insert(name, Arc::new(decl))?, Decl::Module(name, decl) => self.modules.insert(name, Arc::new(decl))?, } } Ok(()) } pub fn get_fun(&self, name: FunName) -> Result<Option<Arc<FunDecl<R>>>> { self.funs.get(name) } pub fn get_const(&self, name: ConstName) -> Result<Option<Arc<ConstDecl<R>>>> { self.consts.get(name) } pub fn get_class(&self, name: TypeName) -> Result<Option<Arc<ShallowClass<R>>>> { self.classes.get(name) } pub fn get_typedef(&self, name: TypeName) -> Result<Option<Arc<TypedefDecl<R>>>> { self.typedefs.get(name) }
pub fn get_static_property_type( &self, class_name: TypeName, property_name: PropName, ) -> Result<Option<Ty<R>>> { self.static_properties.get((class_name, property_name)) } pub fn get_method_type( &self, class_name: TypeName, method_name: MethodName, ) -> Result<Option<Ty<R>>> { self.methods.get((class_name, method_name)) } pub fn get_static_method_type( &self, class_name: TypeName, method_name: MethodName, ) -> Result<Option<Ty<R>>> { self.static_methods.get((class_name, method_name)) } pub fn get_constructor_type(&self, class_name: TypeName) -> Result<Option<Ty<R>>> { self.constructors.get(class_name) } fn add_class(&self, name: TypeName, cls: Arc<ShallowClass<R>>) -> Result<()> { let cid = cls.name.id(); for prop in cls.props.iter().rev() { if let Some(ty) = &prop.ty { self.properties.insert((cid, prop.name.id()), ty.clone())? } } for prop in cls.static_props.iter().rev() { if let Some(ty) = &prop.ty { self.static_properties .insert((cid, prop.name.id()), ty.clone())? } } for meth in cls.methods.iter().rev() { self.methods .insert((cid, meth.name.id()), meth.ty.clone())? } for meth in cls.static_methods.iter().rev() { self.static_methods .insert((cid, meth.name.id()), meth.ty.clone())? } if let Some(constructor) = &cls.constructor { self.constructors.insert(cid, constructor.ty.clone())? } self.classes.insert(name, cls)?; Ok(()) } } #[derive(Debug)] struct PropFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, PropName), Ty<R>> for PropFinder<R> { fn get(&self, (class_name, property_name): (TypeName, PropName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.props.iter().rev().find_map(|prop| { if prop.name.id() == property_name { prop.ty.clone() } else { None } }) })) } fn insert(&self, _: (TypeName, PropName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, PropName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct StaticPropFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, PropName), Ty<R>> for StaticPropFinder<R> { fn get(&self, (class_name, property_name): (TypeName, PropName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.static_props.iter().rev().find_map(|prop| { if prop.name.id() == property_name { prop.ty.clone() } else { None } }) })) } fn insert(&self, _: (TypeName, PropName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, PropName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct MethodFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, MethodName), Ty<R>> for MethodFinder<R> { fn get(&self, (class_name, method_name): (TypeName, MethodName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.methods.iter().rev().find_map(|meth| { if meth.name.id() == method_name { Some(meth.ty.clone()) } else { None } }) })) } fn insert(&self, _: (TypeName, MethodName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, MethodName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct StaticMethodFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<(TypeName, MethodName), Ty<R>> for StaticMethodFinder<R> { fn get(&self, (class_name, method_name): (TypeName, MethodName)) -> Result<Option<Ty<R>>> { Ok(self.classes.get(class_name)?.and_then(|cls| { cls.static_methods.iter().rev().find_map(|meth| { if meth.name.id() == method_name { Some(meth.ty.clone()) } else { None } }) })) } fn insert(&self, _: (TypeName, MethodName), _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = (TypeName, MethodName)>) -> Result<()> { Ok(()) } } #[derive(Debug)] struct ConstructorFinder<R: Reason> { classes: Arc<dyn Store<TypeName, Arc<ShallowClass<R>>>>, } impl<R: Reason> Store<TypeName, Ty<R>> for ConstructorFinder<R> { fn get(&self, class_name: TypeName) -> Result<Option<Ty<R>>> { Ok(self .classes .get(class_name)? .and_then(|cls| cls.constructor.as_ref().map(|meth| meth.ty.clone()))) } fn insert(&self, _: TypeName, _: Ty<R>) -> Result<()> { Ok(()) } fn remove_batch(&self, _: &mut dyn Iterator<Item = TypeName>) -> Result<()> { Ok(()) } }
pub fn get_property_type( &self, class_name: TypeName, property_name: PropName, ) -> Result<Option<Ty<R>>> { self.properties.get((class_name, property_name)) }
function_block-full_function
[]
Rust
src/workers.rs
arthurprs/sucredb
c3cf1adddf74b7616aa6b6f10b334aba0e1b5a30
use crossbeam_channel as chan; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::{thread, time}; pub trait ExitMsg { fn exit_msg() -> Self; fn is_exit(&self) -> bool; } pub struct WorkerSender<T: ExitMsg + Send + 'static> { cursor: AtomicUsize, alive_threads: Arc<AtomicUsize>, channels: Vec<chan::Sender<T>>, } impl<T: ExitMsg + Send + 'static> Clone for WorkerSender<T> { fn clone(&self) -> Self { WorkerSender { cursor: Default::default(), channels: self.channels.clone(), alive_threads: self.alive_threads.clone(), } } } pub struct WorkerManager<T: ExitMsg + Send + 'static> { thread_count: usize, threads: Vec<thread::JoinHandle<()>>, name: String, alive_threads: Arc<AtomicUsize>, channels: Vec<chan::Sender<T>>, } impl<T: ExitMsg + Send + 'static> WorkerManager<T> { pub fn new(name: String, thread_count: usize) -> Self { assert!(thread_count > 0); WorkerManager { thread_count: thread_count, threads: Default::default(), name: name, alive_threads: Default::default(), channels: Default::default(), } } pub fn start<F>(&mut self, mut worker_fn_gen: F) where F: FnMut() -> Box<FnMut(T) + Send>, { assert!(self.channels.is_empty()); for i in 0..self.thread_count { let mut worker_fn = worker_fn_gen(); let (tx, rx) = chan::unbounded(); let alive_handle = self.alive_threads.clone(); self.channels.push(tx); self.threads.push( thread::Builder::new() .name(format!("Worker:{}:{}", i, self.name)) .spawn(move || { alive_handle.fetch_add(1, Ordering::SeqCst); for m in rx { if m.is_exit() { break; } worker_fn(m); } alive_handle.fetch_sub(1, Ordering::SeqCst); info!("Exiting worker"); }).unwrap(), ); } } pub fn sender(&self) -> WorkerSender<T> { assert!(!self.channels.is_empty()); WorkerSender { cursor: Default::default(), channels: self.channels.clone(), alive_threads: self.alive_threads.clone(), } } } impl<T: ExitMsg + Send + 'static> WorkerSender<T> { pub fn send(&self, msg: T) -> bool { let cursor = self.cursor.fetch_add(1, Ordering::Relaxed); self.send_to(cursor, msg) } pub fn send_to(&self, seed: usize, msg: T) -> bool { self.channels[seed % self.channels.len()].send(msg); self.alive_threads.load(Ordering::SeqCst) > 0 } } impl<T: ExitMsg + Send + 'static> Drop for WorkerManager<T> { fn drop(&mut self) { for c in &*self.channels { let _ = c.send(T::exit_msg()); } for t in self.threads.drain(..) { let _ = t.join(); } } } pub fn timer_fn<F>( name: String, interval: time::Duration, mut callback: F, ) -> thread::JoinHandle<()> where F: FnMut(time::Instant) -> bool + Send + 'static, { thread::Builder::new() .name(format!("Timer:{}", name)) .spawn(move || loop { thread::sleep(interval); if !callback(time::Instant::now()) { break; } }).expect("Can't start timer") }
use crossbeam_channel as chan; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::{thread, time}; pub trait ExitMsg { fn exit_msg() -> Self; fn is_exit(&self) -> bool; } pub struct WorkerSender<
<T> { assert!(!self.channels.is_empty()); WorkerSender { cursor: Default::default(), channels: self.channels.clone(), alive_threads: self.alive_threads.clone(), } } } impl<T: ExitMsg + Send + 'static> WorkerSender<T> { pub fn send(&self, msg: T) -> bool { let cursor = self.cursor.fetch_add(1, Ordering::Relaxed); self.send_to(cursor, msg) } pub fn send_to(&self, seed: usize, msg: T) -> bool { self.channels[seed % self.channels.len()].send(msg); self.alive_threads.load(Ordering::SeqCst) > 0 } } impl<T: ExitMsg + Send + 'static> Drop for WorkerManager<T> { fn drop(&mut self) { for c in &*self.channels { let _ = c.send(T::exit_msg()); } for t in self.threads.drain(..) { let _ = t.join(); } } } pub fn timer_fn<F>( name: String, interval: time::Duration, mut callback: F, ) -> thread::JoinHandle<()> where F: FnMut(time::Instant) -> bool + Send + 'static, { thread::Builder::new() .name(format!("Timer:{}", name)) .spawn(move || loop { thread::sleep(interval); if !callback(time::Instant::now()) { break; } }).expect("Can't start timer") }
T: ExitMsg + Send + 'static> { cursor: AtomicUsize, alive_threads: Arc<AtomicUsize>, channels: Vec<chan::Sender<T>>, } impl<T: ExitMsg + Send + 'static> Clone for WorkerSender<T> { fn clone(&self) -> Self { WorkerSender { cursor: Default::default(), channels: self.channels.clone(), alive_threads: self.alive_threads.clone(), } } } pub struct WorkerManager<T: ExitMsg + Send + 'static> { thread_count: usize, threads: Vec<thread::JoinHandle<()>>, name: String, alive_threads: Arc<AtomicUsize>, channels: Vec<chan::Sender<T>>, } impl<T: ExitMsg + Send + 'static> WorkerManager<T> { pub fn new(name: String, thread_count: usize) -> Self { assert!(thread_count > 0); WorkerManager { thread_count: thread_count, threads: Default::default(), name: name, alive_threads: Default::default(), channels: Default::default(), } } pub fn start<F>(&mut self, mut worker_fn_gen: F) where F: FnMut() -> Box<FnMut(T) + Send>, { assert!(self.channels.is_empty()); for i in 0..self.thread_count { let mut worker_fn = worker_fn_gen(); let (tx, rx) = chan::unbounded(); let alive_handle = self.alive_threads.clone(); self.channels.push(tx); self.threads.push( thread::Builder::new() .name(format!("Worker:{}:{}", i, self.name)) .spawn(move || { alive_handle.fetch_add(1, Ordering::SeqCst); for m in rx { if m.is_exit() { break; } worker_fn(m); } alive_handle.fetch_sub(1, Ordering::SeqCst); info!("Exiting worker"); }).unwrap(), ); } } pub fn sender(&self) -> WorkerSender
random
[ { "content": "pub trait Metadata\n\n : Serialize + DeserializeOwned + Clone + PartialEq + Send + fmt::Debug + 'static\n\n {\n\n}\n\n\n\nimpl<T: Serialize + DeserializeOwned + Clone + PartialEq + Send + fmt::Debug + 'static> Metadata\n\n for T {\n\n}\n\n\n", "file_path": "src/gossip.rs", "rank": 1, "score": 113012.15028052495 }, { "content": "// rwlock needs metadata to be sync, as it's read concurrently by multiple threads\n\npub trait Metadata:\n\n Clone + PartialEq + Serialize + DeserializeOwned + Send + Sync + fmt::Debug + 'static\n\n{\n\n}\n\n\n\nimpl<T: Clone + PartialEq + Serialize + DeserializeOwned + Send + Sync + fmt::Debug + 'static>\n\n Metadata for T\n\n{}\n\n\n\n// *pseudo* interval used to calculate aae msg rate\n\nconst DHT_AAE_INTERVAL_MS: u64 = 1_000;\n\n// interval for active anti entropy checks\n\nconst DHT_AAE_TRIGGER_INTERVAL_MS: u64 = 1_000;\n\n\n\n/// The Cluster controller, it knows how to map keys to their vnodes and\n\n/// whose nodes hold data for each vnodes.\n\n/// Calls a callback on cluster changes so the database can execute logic to converge\n\n/// it's state to match the DHT.\n\n/// Only knows about NodeIds (and their fabric addresses), Vnodes and other high level info,\n\n/// Extra (static) information is attached to NodeId through a Metadata type.\n\npub struct DHT<T: Metadata> {\n\n node: NodeId,\n\n inner: Arc<RwLock<Inner<T>>>,\n\n}\n\n\n", "file_path": "src/dht.rs", "rank": 2, "score": 113012.15028052495 }, { "content": "pub trait LoggerExt {\n\n fn log_error(&self, msg: &str);\n\n fn log_warn(&self, msg: &str);\n\n}\n\n\n\nimpl<T, U: fmt::Debug> LoggerExt for Result<T, U> {\n\n fn log_error(&self, msg: &str) {\n\n if let &Err(ref e) = self {\n\n error!(\"{}: {:?}\", msg, e);\n\n }\n\n }\n\n fn log_warn(&self, msg: &str) {\n\n if let &Err(ref e) = self {\n\n warn!(\"{}: {:?}\", msg, e);\n\n }\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 4, "score": 109644.02019253114 }, { "content": "pub fn setup_default_logging() {\n\n let config = log4rs::config::Config::builder()\n\n .appender(\n\n log4rs::config::Appender::builder().build(\n\n \"console\",\n\n Box::new(\n\n log4rs::append::console::ConsoleAppender::builder()\n\n .target(log4rs::append::console::Target::Stderr)\n\n .build(),\n\n ),\n\n ),\n\n ).logger(\n\n log4rs::config::Logger::builder()\n\n .appender(\"console\")\n\n .build(\"sucredb\", log::LevelFilter::Info),\n\n ).build(log4rs::config::Root::builder().build(log::LevelFilter::Off))\n\n .expect(\"failed to setup default logging\");\n\n\n\n log4rs::init_config(config).expect(\"failed to init logging\");\n\n}\n", "file_path": "src/config.rs", "rank": 5, "score": 104534.74962819979 }, { "content": "pub trait CausalValue: Default {\n\n fn merge<VV: AbsVersionVector>(&mut self, other: &mut Self, s_vv: &VV, o_vv: &VV);\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct VersionVector(LinearMap<Id, Version>);\n\n\n\n// A VersionVector with exceptions (non continuous dots)\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\npub struct DeltaVersionVector(Vec<(Id, Version)>);\n\n\n\nimpl BitmappedVersion {\n\n fn int_to_bitmap(base: Version, bitmap: u32) -> RoaringTreemap {\n\n let mut result = RoaringTreemap::new();\n\n if bitmap != 0 {\n\n for i in 0..32 {\n\n if bitmap & (1 << i) != 0 {\n\n result.insert(base + 1 + i as Version);\n\n }\n", "file_path": "src/version_vector.rs", "rank": 6, "score": 99725.41589875324 }, { "content": "pub fn is_dir_empty_or_absent<P: AsRef<path::Path>>(path: P) -> io::Result<bool> {\n\n match fs::read_dir(path.as_ref()) {\n\n Ok(dir) => Ok(dir.count() == 0),\n\n Err(ref err) if err.kind() == io::ErrorKind::NotFound => Ok(true),\n\n Err(err) => Err(err),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 7, "score": 99418.408479762 }, { "content": "pub trait AbsVersionVector: Default {\n\n fn contains(&self, id: Id, version: Version) -> bool;\n\n}\n\n\n", "file_path": "src/version_vector.rs", "rank": 8, "score": 97219.58707113654 }, { "content": "#[cfg(test)]\n\npub fn sleep_ms(ms: u64) {\n\n ::std::thread::sleep(::std::time::Duration::from_millis(ms));\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_eq_repr {\n\n ($left:expr, $right:expr) => {{\n\n match (format!(\"{:?}\", &$left), format!(\"{:?}\", &$right)) {\n\n (left_val, right_val) => {\n\n if !(left_val == right_val) {\n\n panic!(\n\n \"repr assertion failed: `(debug(left) == debug(right))` \\\n\n (left: `{:?}`, right: `{:?}`)\",\n\n left_val, right_val\n\n )\n\n }\n\n }\n\n }\n\n }};\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 94483.11005912823 }, { "content": "pub fn assume_str(bytes: &[u8]) -> &str {\n\n unsafe { ::std::str::from_utf8_unchecked(bytes) }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 10, "score": 89138.48243676391 }, { "content": "pub fn render_set(cube: Cube) -> RespValue {\n\n match cube {\n\n Cube::Set(s) => {\n\n let array = s\n\n .values\n\n .into_iter()\n\n .map(|(v, _)| RespValue::Data(v))\n\n .collect();\n\n RespValue::Array(array)\n\n }\n\n Cube::Void(_) => RespValue::Array(vec![]),\n\n _ => CommandError::TypeError.into(),\n\n }\n\n}\n\n\n\n/*\n\nUsing the vv from cubes to track key dots (the latest version from each node) doesn't work, example:\n\n\n\n-> n3 is partitioned out\n\n-> n1 \"SET a v\" gets dot n1-1\n", "file_path": "src/cubes.rs", "rank": 11, "score": 86856.2201450992 }, { "content": "pub fn render_counter(cube: Cube) -> RespValue {\n\n match cube {\n\n Cube::Counter(c) => RespValue::Int(c.get()),\n\n Cube::Void(_vv) => RespValue::Nil,\n\n _ => CommandError::TypeError.into(),\n\n }\n\n}\n\n\n", "file_path": "src/cubes.rs", "rank": 12, "score": 86856.2201450992 }, { "content": "pub fn render_value(cube: Cube) -> RespValue {\n\n match cube {\n\n Cube::Value(v) => {\n\n let serialized_vv = bincode::serialize(&v.vv).unwrap();\n\n let mut values: Vec<_> = v\n\n .values\n\n .into_iter()\n\n .filter_map(|(_, ov)| ov.map(RespValue::Data))\n\n .collect();\n\n values.push(RespValue::Data(serialized_vv.into()));\n\n RespValue::Array(values)\n\n }\n\n Cube::Void(vv) => {\n\n let serialized_vv = bincode::serialize(&vv).unwrap();\n\n RespValue::Array(vec![RespValue::Data(serialized_vv.into())])\n\n }\n\n _ => CommandError::TypeError.into(),\n\n }\n\n}\n\n\n", "file_path": "src/cubes.rs", "rank": 13, "score": 86856.2201450992 }, { "content": "pub fn setup_logging(config_value: &yaml::Value) {\n\n let raw_config: log4rs::file::RawConfig =\n\n yaml::from_value(config_value.clone()).expect(\"failed to parse logging config\");\n\n\n\n let (appenders, errors) = raw_config.appenders_lossy(&Default::default());\n\n if !errors.is_empty() {\n\n panic!(\"failed to configure logging: {:?}\", errors);\n\n }\n\n\n\n let (config, errors) = log4rs::config::Config::builder()\n\n .appenders(appenders)\n\n .loggers(raw_config.loggers())\n\n .build_lossy(raw_config.root());\n\n\n\n if !errors.is_empty() {\n\n panic!(\"failed to configure logging: {:?}\", errors);\n\n }\n\n\n\n log4rs::init_config(config).expect(\"failed to init logging\");\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 14, "score": 86856.2201450992 }, { "content": "pub fn render_map(cube: Cube) -> RespValue {\n\n match cube {\n\n Cube::Map(m) => {\n\n let mut array = Vec::with_capacity(m.values.len() * 2);\n\n for (k, v) in m.values.into_iter() {\n\n array.push(RespValue::Data(k));\n\n array.push(RespValue::Data(v.value));\n\n }\n\n RespValue::Array(array)\n\n }\n\n Cube::Void(_) => RespValue::Array(vec![]),\n\n _ => CommandError::TypeError.into(),\n\n }\n\n}\n\n\n", "file_path": "src/cubes.rs", "rank": 15, "score": 86856.2201450992 }, { "content": "pub fn render_type(cube: Cube) -> RespValue {\n\n use self::Cube::*;\n\n let ty = match cube {\n\n Counter(_) => \"counter\", // non-standard\n\n Value(_) => \"string\",\n\n Map(_) => \"hash\",\n\n Set(_) => \"set\",\n\n Void(_) => \"none\",\n\n };\n\n RespValue::Data(ty.into())\n\n}\n\n\n", "file_path": "src/cubes.rs", "rank": 16, "score": 86856.2201450992 }, { "content": "pub fn split_u64(uint: u64) -> (u32, u32) {\n\n ((uint >> 32) as u32, uint as u32)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 17, "score": 84398.10696633196 }, { "content": "/// RedisCluster style partitioning\n\npub fn hash_slot(mut key: &[u8]) -> u16 {\n\n if let Some(open) = key.iter().position(|&x| x == b'{') {\n\n // note that close will be relative to open due to the skip()\n\n if let Some(close) = key[open + 1..].iter().position(|&x| x == b'}') {\n\n if close > 0 {\n\n // found { and } with something in between\n\n key = &key[open + 1..open + 1 + close];\n\n }\n\n }\n\n }\n\n crc16::State::<crc16::XMODEM>::calculate(key) % HASH_SLOTS\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn raw_hash(key: &[u8]) -> u16 {\n\n crc16::State::<crc16::XMODEM>::calculate(key) % HASH_SLOTS\n\n }\n", "file_path": "src/hash.rs", "rank": 18, "score": 84398.10696633196 }, { "content": "pub fn join_u64(hi: u32, lo: u32) -> u64 {\n\n ((hi as u64) << 32) | (lo as u64)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 80160.52646919721 }, { "content": "pub fn read_config_file(path: &Path, config: &mut Config) {\n\n debug!(\"Reading config file\");\n\n let yaml = {\n\n let mut s = String::new();\n\n File::open(path)\n\n .and_then(|mut f| f.read_to_string(&mut s))\n\n .expect(\"Error reading config file\");\n\n yaml::from_str::<yaml::Value>(&s).expect(\"Error parsing config file\")\n\n };\n\n debug!(\"Done reading config file: {:?}\", config);\n\n\n\n cfi!(yaml, config, data_dir, as_str);\n\n cfi!(yaml, config, cluster_name, as_str);\n\n cfi!(yaml, config, listen_addr, as_str, SocketAddr::from_str);\n\n cfi!(yaml, config, fabric_addr, as_str, SocketAddr::from_str);\n\n // pub cmd_init: Option<InitCommand>,\n\n cfi!(yaml, config, worker_timer, as_str, parse_duration);\n\n cfi!(yaml, config, worker_count, as_u64, try_into);\n\n cfi!(yaml, config, sync_incomming_max, as_u64, try_into);\n\n cfi!(yaml, config, sync_outgoing_max, as_u64, try_into);\n", "file_path": "src/config.rs", "rank": 20, "score": 78231.15959863242 }, { "content": "pub fn parse_size(size_text: &str) -> Result<i64, GenericError> {\n\n let (number, suffix) = split_number_suffix(size_text)?;\n\n let scale = match suffix.to_lowercase().as_ref() {\n\n \"b\" => 1,\n\n \"k\" | \"kb\" => 1024,\n\n \"m\" | \"mb\" => 1024 * 1024,\n\n \"g\" | \"gb\" => 1024 * 1024 * 1024,\n\n _ => return Err(format!(\"Unknown size suffix `{}`\", suffix).into()),\n\n };\n\n number.checked_mul(scale).ok_or(\"Overflow error\".into())\n\n}\n\n\n\nmacro_rules! cfi {\n\n ($yaml:ident, $target:ident, $string:ident, $method:ident) => {\n\n if let Some(v) = $yaml.get(stringify!($string)) {\n\n let v = v\n\n .$method()\n\n .expect(concat!(\"Can't access field with\", stringify!($method)));\n\n $target.$string = v.into();\n\n }\n", "file_path": "src/config.rs", "rank": 21, "score": 76448.30357189555 }, { "content": "pub fn parse_duration(duration_text: &str) -> Result<i64, GenericError> {\n\n let (number, suffix) = split_number_suffix(duration_text)?;\n\n let scale = match suffix.to_lowercase().as_ref() {\n\n \"ms\" => 1,\n\n \"s\" => 1000,\n\n \"m\" => 1000 * 60,\n\n \"h\" => 1000 * 60 * 60,\n\n _ => return Err(format!(\"Unknown duration suffix `{}`\", suffix).into()),\n\n };\n\n number.checked_mul(scale).ok_or(\"Overflow error\".into())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 22, "score": 76448.30357189555 }, { "content": "pub fn replace_default<T: Default>(subject: &mut T) -> T {\n\n ::std::mem::replace(subject, Default::default())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 23, "score": 76346.54559887541 }, { "content": "pub fn into_io_error<E: Error + Send + Sync + 'static>(e: E) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, e)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 24, "score": 68411.56518496042 }, { "content": "pub fn deserialize_bitmap<'de, D>(deserializer: D) -> Result<RoaringTreemap, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n use serde::de::{Error, Visitor};\n\n use std::fmt;\n\n\n\n struct ByteBufVisitor;\n\n\n\n impl<'de> Visitor<'de> for ByteBufVisitor {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"byte array\")\n\n }\n\n\n\n #[inline]\n\n fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n", "file_path": "src/version_vector.rs", "rank": 25, "score": 66783.24340854984 }, { "content": "pub fn serialize_bitmap<S>(value: &RoaringTreemap, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::Serializer,\n\n{\n\n use serde::ser::Error;\n\n\n\n let mut bitmap_count = 0;\n\n let mut buffer_len = 4;\n\n for (_, b) in value.bitmaps() {\n\n bitmap_count += 1;\n\n buffer_len += 4 + b.serialized_size();\n\n }\n\n let mut buffer = Vec::with_capacity(buffer_len);\n\n buffer\n\n .write_u32::<LittleEndian>(bitmap_count)\n\n .map_err(Error::custom)?;\n\n for (p, b) in value.bitmaps() {\n\n buffer.write_u32::<LittleEndian>(p).map_err(Error::custom)?;\n\n b.serialize_into(&mut buffer).map_err(Error::custom)?;\n\n }\n\n serializer.serialize_bytes(&buffer)\n\n}\n\n\n", "file_path": "src/version_vector.rs", "rank": 26, "score": 61647.88279410364 }, { "content": "#[derive(Default)]\n\nstruct Stats {\n\n incomming_syncs: u16,\n\n outgoing_syncs: u16,\n\n}\n\n\n\npub struct ContextRead {\n\n pub cube: Cube,\n\n // first key contains the render_fn for all keys\n\n pub response: Option<ResponseFn>,\n\n}\n\n\n\npub struct ContextWrite {\n\n pub version: Version,\n\n pub mutator_fn: Option<MutatorFn>,\n\n pub key: Bytes,\n\n pub cube: Cube,\n\n pub reply_result: bool,\n\n pub response: Option<RespValue>,\n\n pub response_fn: Option<ResponseFn>,\n\n}\n", "file_path": "src/database.rs", "rank": 27, "score": 60430.88423618293 }, { "content": "struct Context {\n\n context: Rc<SharedContext>,\n\n token: Token,\n\n requests: VecDeque<RespValue>,\n\n db_context: Option<DbContext>,\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 28, "score": 60430.88423618293 }, { "content": "#[cfg(not(test))]\n\nfn main() {\n\n let server = server::Server::new(configure());\n\n server.run();\n\n}\n", "file_path": "src/main.rs", "rank": 29, "score": 59514.127785222576 }, { "content": "struct GenericIterator {\n\n db: Arc<rocksdb::DB>,\n\n iterator: rocksdb::rocksdb::DBIterator<Arc<rocksdb::DB>>,\n\n first: bool,\n\n}\n\n\n\npub struct StorageIterator(GenericIterator);\n\n\n\npub struct LogStorageIterator(GenericIterator);\n\n\n\nunsafe impl Send for GenericIterator {}\n\n\n\nimpl StorageManager {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<StorageManager, GenericError> {\n\n let mut opts = rocksdb::DBOptions::new();\n\n opts.create_if_missing(true);\n\n opts.set_max_background_jobs(4);\n\n opts.enable_pipelined_write(true);\n\n let mut def_cf_opts = rocksdb::ColumnFamilyOptions::new();\n\n def_cf_opts\n", "file_path": "src/storage.rs", "rank": 30, "score": 58555.56136210484 }, { "content": "struct WriterContext {\n\n context: Arc<SharedContext>,\n\n peer: NodeId,\n\n connection_id: usize,\n\n}\n\n\n", "file_path": "src/fabric.rs", "rank": 31, "score": 58555.56136210484 }, { "content": "#[derive(Clone, Debug, Default, Serialize, Deserialize)]\n\nstruct MapValue {\n\n dots: DotSet,\n\n value: Bytes,\n\n timestamp: u64, // millis since epoch\n\n}\n\n\n\nimpl MapValue {\n\n fn new(dot: (Id, Version), value: Bytes) -> Self {\n\n let timestamp = time::UNIX_EPOCH.elapsed().unwrap();\n\n MapValue {\n\n dots: DotSet::from_dot(dot),\n\n value,\n\n timestamp: timestamp.as_secs() * 1_000 + (timestamp.subsec_nanos() / 1_000_000) as u64,\n\n }\n\n }\n\n}\n\n\n\nimpl CausalValue for MapValue {\n\n fn merge<VV: AbsVersionVector>(&mut self, other: &mut Self, s_vv: &VV, o_vv: &VV) {\n\n self.dots.merge(&mut other.dots, s_vv, o_vv);\n", "file_path": "src/cubes.rs", "rank": 32, "score": 58555.56136210484 }, { "content": "struct RespCodec;\n\n\n\nimpl codec::Decoder for RespCodec {\n\n type Item = RespValue;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> io::Result<Option<Self::Item>> {\n\n let (consumed, result) = resp::Parser::new(&*src)\n\n .and_then(|mut p| match p.parse() {\n\n Ok(v) => Ok((p.consumed(), Ok(Some(v)))),\n\n Err(e) => Err(e),\n\n }).unwrap_or_else(|e| match e {\n\n resp::RespError::Incomplete => (0, Ok(None)),\n\n _ => (0, Err(io::ErrorKind::InvalidData.into())),\n\n });\n\n src.split_to(consumed);\n\n result\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 33, "score": 58555.56136210484 }, { "content": "struct ReaderContext {\n\n context: Arc<SharedContext>,\n\n peer: NodeId,\n\n}\n\n\n", "file_path": "src/fabric.rs", "rank": 34, "score": 58555.56136210484 }, { "content": "#[derive(Debug, Default, Serialize, Deserialize, Clone)]\n\nstruct VNode {\n\n // nodes with ownership\n\n owners: LinearMap<NodeId, VNodeNodeStatus>,\n\n // vv\n\n version: VersionVector,\n\n}\n\n\n", "file_path": "src/dht.rs", "rank": 35, "score": 58555.56136210484 }, { "content": "struct ReqState {\n\n replies: u8,\n\n succesfull: u8,\n\n required: u8,\n\n total: u8,\n\n context: Context,\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_any {\n\n ($value: expr, $($status:pat)|*) => {\n\n match $value {\n\n $($status)|* => (),\n\n _ => panic!(\"{:?} is not any of {}\", $value, stringify!($($status)|*))\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! check_status {\n\n ($this: expr, $($status:pat)|*, $db: expr, $from: expr, $msg: expr, $emsg: ident, $col: ident) => {\n", "file_path": "src/vnode.rs", "rank": 36, "score": 58555.56136210484 }, { "content": "struct SharedContext {\n\n node: NodeId,\n\n addr: SocketAddr,\n\n loop_remote: tokio::reactor::Remote,\n\n msg_handlers: RwLock<LinearMap<u8, FabricMsgFn>>,\n\n con_handlers: RwLock<Vec<FabricConFn>>,\n\n // TODO: unify nodes_addr and connections maps\n\n nodes_addr: RwLock<IdHashMap<NodeId, SocketAddr>>,\n\n connections: RwLock<IdHashMap<NodeId, Vec<(usize, SenderChan)>>>,\n\n connection_gen: AtomicUsize,\n\n}\n\n\n\nimpl SharedContext {\n\n fn register_node(&self, peer: NodeId, peer_addr: SocketAddr) -> Option<SocketAddr> {\n\n self.nodes_addr.write().unwrap().insert(peer, peer_addr)\n\n }\n\n\n\n fn remove_node(&self, peer: NodeId) -> Option<SocketAddr> {\n\n self.nodes_addr.write().unwrap().remove(&peer)\n\n }\n", "file_path": "src/fabric.rs", "rank": 37, "score": 58555.56136210484 }, { "content": "struct SharedContext {\n\n database: Arc<Database>,\n\n db_sender: RefCell<WorkerSender<WorkerMsg>>,\n\n token_chans: Arc<Mutex<IdHashMap<Token, fmpsc::UnboundedSender<DbContext>>>>,\n\n}\n\n\n\npub struct Server {\n\n config: Config,\n\n}\n\n\n\nimpl Context {\n\n fn new(\n\n context: Rc<SharedContext>,\n\n token: Token,\n\n chan_tx: fmpsc::UnboundedSender<DbContext>,\n\n ) -> Self {\n\n metrics::CLIENT_CONNECTION.inc();\n\n context.token_chans.lock().unwrap().insert(token, chan_tx);\n\n Context {\n\n context: context,\n", "file_path": "src/server.rs", "rank": 38, "score": 58555.56136210484 }, { "content": "// u32(le) payload len + bincode payload\n\nstruct FramedBincodeCodec;\n\n\n\nimpl codec::Decoder for FramedBincodeCodec {\n\n type Item = FabricMsg;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> io::Result<Option<Self::Item>> {\n\n let (consumed, result) = {\n\n let mut bytes: &[u8] = &*src;\n\n if let Ok(msg_len) = bytes.read_u32::<LittleEndian>() {\n\n if bytes.len() >= msg_len as usize {\n\n match bincode::deserialize_from(&mut bytes) {\n\n Ok(v) => (4 + msg_len as usize, Ok(Some(v))),\n\n Err(e) => (0, Err(into_io_error(e))),\n\n }\n\n } else {\n\n (0, Ok(None))\n\n }\n\n } else {\n\n (0, Ok(None))\n", "file_path": "src/fabric.rs", "rank": 39, "score": 56874.19530426996 }, { "content": "struct U16BeSuffixTransform;\n\n\n\nimpl rocksdb::SliceTransform for U16BeSuffixTransform {\n\n fn transform<'a>(&mut self, key: &'a [u8]) -> &'a [u8] {\n\n &key[..2]\n\n }\n\n\n\n fn in_domain(&mut self, _key: &[u8]) -> bool {\n\n true\n\n }\n\n}\n\n\n\npub struct StorageManager {\n\n db: Arc<rocksdb::DB>,\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 40, "score": 56874.19530426996 }, { "content": "struct SyncKeysIterator {\n\n dots_delta: BitmappedVersionVectorDelta,\n\n keys: hash_set::IntoIter<Bytes>,\n\n}\n\n\n\n// TODO: Refactor into trait objects\n\n// trait Synchronization { fn on_.., .. }\n\n// new_sync_sender -> Box<Synchronization>\n\npub enum Synchronization {\n\n SyncSender {\n\n // bvv in peer at the time of sync start\n\n clocks_in_peer: BitmappedVersionVector,\n\n // partial copy of the local bvv at the time of sync start\n\n clocks_snapshot: BitmappedVersionVector,\n\n iterator: IteratorFn,\n\n // TODO: only store keys as resends should be rare\n\n inflight: InFlightSyncMsgMap,\n\n cookie: Cookie,\n\n peer: NodeId,\n\n // count of sent keys (includes inflight)\n", "file_path": "src/vnode_sync.rs", "rank": 41, "score": 55358.175388254414 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct SavedVNodeState {\n\n id: NodeId,\n\n clocks: BitmappedVersionVector,\n\n log_clocks: BitmappedVersionVector,\n\n clean_shutdown: bool,\n\n}\n\n\n", "file_path": "src/vnode.rs", "rank": 42, "score": 55358.175388254414 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(bound = \"T: DeserializeOwned\")]\n\nstruct Node<T: Metadata> {\n\n addr: SocketAddr,\n\n status: NodeStatus,\n\n meta: T,\n\n version: VersionVector,\n\n}\n\n\n", "file_path": "src/dht.rs", "rank": 43, "score": 52382.23582715145 }, { "content": "#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(bound = \"T: DeserializeOwned\")]\n\nstruct Ring<T: Metadata> {\n\n vnodes: Vec<VNode>,\n\n nodes: IdHashMap<NodeId, Node<T>>,\n\n replication_factor: usize,\n\n // Note: For the version vector to be \"valid\" all proposedchanges by a node must be serialized\n\n version: VersionVector,\n\n cluster: String,\n\n}\n\n\n\npub struct RingDescription {\n\n pub replication_factor: u8,\n\n pub partitions: u16,\n\n}\n\n\n\nimpl RingDescription {\n\n pub fn new(replication_factor: u8, partitions: u16) -> Self {\n\n RingDescription {\n\n replication_factor: replication_factor,\n\n partitions: partitions,\n\n }\n", "file_path": "src/dht.rs", "rank": 44, "score": 52382.23582715145 }, { "content": "#[derive(Debug)]\n\nstruct Node<T: Metadata> {\n\n incarnation: Seq,\n\n status_change: Instant,\n\n status: NodeStatus,\n\n meta: T,\n\n}\n\n\n", "file_path": "src/gossip.rs", "rank": 45, "score": 52382.23582715145 }, { "content": "struct Inner<T: Metadata> {\n\n addr: SocketAddr,\n\n seq: Seq,\n\n incarnation: Seq,\n\n meta: T,\n\n nodes: HashMap<SocketAddr, Node<T>>,\n\n next_alive_probe: Instant,\n\n next_dead_probe: Instant,\n\n pingreq_inflight: InFlightMap<Seq, (SocketAddr, SocketAddr), Instant>,\n\n ping_inflight: InFlightMap<Seq, SocketAddr, Instant>,\n\n suspect_inflight: InFlightMap<SocketAddr, Instant, Instant>,\n\n send_queue: fmpsc::UnboundedSender<(SocketAddr, Message<T>)>,\n\n broadcast_queue: Vec<(u32, Message<T>)>,\n\n callback: GossiperCallback<T>,\n\n leaving: bool,\n\n bootstraping: bool,\n\n}\n\n\n", "file_path": "src/gossip.rs", "rank": 46, "score": 52382.23582715145 }, { "content": "struct Inner<T: Metadata> {\n\n node: NodeId,\n\n ring: Ring<T>,\n\n callback: Option<DHTChangeFn>,\n\n fabric: Arc<Fabric>,\n\n next_req_broadcast: Instant,\n\n sync_on_connect: bool,\n\n sync_aae: bool,\n\n}\n\n\n", "file_path": "src/dht.rs", "rank": 47, "score": 52382.23582715145 }, { "content": "fn configure() -> config::Config {\n\n use clap::{App, Arg, SubCommand};\n\n use config::*;\n\n use std::path::Path;\n\n\n\n let matches = App::new(\"SucreDB\")\n\n .version(\"0.1\")\n\n .about(\"A database made of sugar cubes\")\n\n .arg(\n\n Arg::with_name(\"config_file\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .takes_value(true)\n\n .help(\".yaml config file\")\n\n .long_help(\n\n \"Path to the .yaml config file. Note that configuration \\\n\n set through the command line will take precedence \\\n\n over the config file.\",\n\n ).display_order(0),\n\n ).arg(\n", "file_path": "src/main.rs", "rank": 48, "score": 51616.47152639438 }, { "content": "#[derive(Debug)]\n\nstruct Pair<T, V>(T, V);\n\n\n\nimpl<T: PartialEq, V> PartialEq<Pair<T, V>> for Pair<T, V> {\n\n fn eq(&self, other: &Pair<T, V>) -> bool {\n\n other.0.eq(&self.0)\n\n }\n\n}\n\n\n\nimpl<T: Eq, V> Eq for Pair<T, V> {}\n\n\n\nimpl<T: PartialOrd, V> PartialOrd<Pair<T, V>> for Pair<T, V> {\n\n fn partial_cmp(&self, other: &Pair<T, V>) -> Option<Ordering> {\n\n other.0.partial_cmp(&self.0)\n\n }\n\n}\n\n\n\nimpl<T: Ord, V> Ord for Pair<T, V> {\n\n fn cmp(&self, other: &Pair<T, V>) -> Ordering {\n\n other.0.cmp(&self.0)\n\n }\n\n}\n", "file_path": "src/inflightmap.rs", "rank": 49, "score": 46321.98762698597 }, { "content": "struct UdpCodec<T: Metadata>(PhantomData<T>);\n\n\n\nimpl<T: Metadata> tokio::net::UdpCodec for UdpCodec<T> {\n\n type In = (SocketAddr, Message<T>);\n\n type Out = (SocketAddr, Message<T>);\n\n\n\n fn decode(&mut self, addr: &SocketAddr, buf: &[u8]) -> io::Result<Self::In> {\n\n trace!(\"decoding {:?}\", buf);\n\n match bincode::deserialize(buf) {\n\n Ok(msg) => Ok((*addr, msg)),\n\n Err(err) => {\n\n warn!(\"decode err: {:?}\", err);\n\n Err(into_io_error(err))\n\n }\n\n }\n\n }\n\n\n\n fn encode(&mut self, addr_msg: Self::Out, buf: &mut Vec<u8>) -> SocketAddr {\n\n let (addr, msg) = addr_msg;\n\n trace!(\"encoding {:?}\", msg);\n", "file_path": "src/gossip.rs", "rank": 50, "score": 43927.3760638381 }, { "content": "fn parse_int<T: ::std::str::FromStr + Default>(\n\n try: bool,\n\n args: &[&Bytes],\n\n i: usize,\n\n) -> Result<T, CommandError> {\n\n if try {\n\n assume_str(&args[i])\n\n .parse()\n\n .map_err(|_| CommandError::InvalidIntValue)\n\n } else {\n\n Ok(Default::default())\n\n }\n\n}\n\n\n", "file_path": "src/command.rs", "rank": 51, "score": 42056.524905664344 }, { "content": "fn check_value_len(value_len: usize) -> Result<(), CommandError> {\n\n if value_len > config::MAX_VALUE_LEN {\n\n Err(CommandError::InvalidKey)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Database {\n\n pub fn handler_cmd(&self, mut context: Context) {\n\n let cmd = context.commands.pop().unwrap();\n\n if let Err(e) = self.handle_cmd(&mut context, cmd) {\n\n context.clear();\n\n self.respond_error(&mut context, e);\n\n }\n\n }\n\n\n\n fn handle_cmd(&self, context: &mut Context, cmd: RespValue) -> Result<(), CommandError> {\n\n debug!(\"Processing ({:?}) {:?}\", context.token, cmd);\n\n let mut args = Vec::new();\n", "file_path": "src/command.rs", "rank": 52, "score": 41265.99894786852 }, { "content": "fn check_key_len(key_len: usize) -> Result<(), CommandError> {\n\n if key_len > config::MAX_KEY_LEN {\n\n Err(CommandError::InvalidKey)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/command.rs", "rank": 53, "score": 41265.99894786852 }, { "content": "fn split_number_suffix(s: &str) -> Result<(i64, &str), GenericError> {\n\n let digits_end = s\n\n .trim()\n\n .chars()\n\n .position(|c| !c.is_digit(10))\n\n .unwrap_or(s.len());\n\n let (digits, suffix) = s.split_at(digits_end);\n\n Ok((digits.parse::<i64>()?, suffix.trim_left()))\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 54, "score": 38982.47621482232 }, { "content": "fn check_arg_count(count: usize, min: usize, max: usize) -> Result<(), CommandError> {\n\n if count < min || count > max {\n\n Err(CommandError::InvalidArgCount)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/command.rs", "rank": 55, "score": 34734.42523579293 }, { "content": "#[inline]\n\nfn build_key<'a>(buffer: &'a mut [u8], num: u16, key: &[u8]) -> &'a [u8] {\n\n (&mut buffer[..2]).write_u16::<BigEndian>(num).unwrap();\n\n (&mut buffer[2..]).write_all(key).unwrap();\n\n &buffer[..2 + key.len()]\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 56, "score": 34323.27420019868 }, { "content": "#[inline]\n\nfn build_log_prefix<'a>(buffer: &'a mut [u8], num: u16, prefix: u64) -> &'a [u8] {\n\n (&mut buffer[..2]).write_u16::<BigEndian>(num).unwrap();\n\n (&mut buffer[2..2 + 8])\n\n .write_u64::<BigEndian>(prefix)\n\n .unwrap();\n\n &buffer[..2 + 8]\n\n}\n\n\n\n// TODO: support TTL\n\n// TODO: specific comparator for log cf\n\n// TODO: merge operator could be a big win\n\npub struct Storage {\n\n db: Arc<rocksdb::DB>,\n\n cf: &'static rocksdb::CFHandle,\n\n log_cf: &'static rocksdb::CFHandle,\n\n num: u16,\n\n}\n\n\n\nunsafe impl Sync for Storage {}\n\nunsafe impl Send for Storage {}\n", "file_path": "src/storage.rs", "rank": 57, "score": 33497.68764315889 }, { "content": "#[inline]\n\nfn build_log_key<'a>(buffer: &'a mut [u8], num: u16, log_key: (u64, u64)) -> &'a [u8] {\n\n (&mut buffer[..2]).write_u16::<BigEndian>(num).unwrap();\n\n (&mut buffer[2..2 + 8])\n\n .write_u64::<BigEndian>(log_key.0)\n\n .unwrap();\n\n (&mut buffer[2 + 8..2 + 8 + 8])\n\n .write_u64::<BigEndian>(log_key.1)\n\n .unwrap();\n\n &buffer[..2 + 8 + 8]\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 58, "score": 31471.07756711547 }, { "content": "type IteratorFn = Box<FnMut(&VNodeState) -> Result<Option<(Bytes, Cube)>, ()> + Send>;\n\n\n", "file_path": "src/vnode_sync.rs", "rank": 59, "score": 29849.640501802598 }, { "content": "type SenderChan = fmpsc::UnboundedSender<Bytes>;\n", "file_path": "src/fabric.rs", "rank": 60, "score": 26857.20219596626 }, { "content": "use bincode;\n\nuse bytes::Bytes;\n\nuse command::CommandError;\n\nuse linear_map::{Entry as LMEntry, LinearMap};\n\nuse resp::RespValue;\n\nuse std::boxed::FnBox;\n\nuse std::time;\n\nuse version_vector::*;\n\n\n\npub type MutatorFn =\n\n Box<FnBox(Id, Version, Cube) -> Result<(Cube, Option<RespValue>), CommandError> + Send>;\n\npub type ResponseFn = Box<FnMut(Cube) -> RespValue + Send>;\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub enum Cube {\n\n // the order is used to merge different types in a deterministic way\n\n Counter(Counter),\n\n Value(Value),\n\n Map(Map),\n\n Set(Set),\n", "file_path": "src/cubes.rs", "rank": 61, "score": 16.969724887505095 }, { "content": "// require sync as it can be called from any worker thread\n\npub type DatabaseResponseFn = Box<Fn(Context) + Send + Sync>;\n\n\n\npub enum WorkerMsg {\n\n Fabric(NodeId, FabricMsg),\n\n Command(Context),\n\n Tick(time::Instant),\n\n DHTFabric(NodeId, FabricMsg),\n\n DHTChange,\n\n Exit,\n\n}\n\n\n\nimpl ExitMsg for WorkerMsg {\n\n fn exit_msg() -> Self {\n\n WorkerMsg::Exit\n\n }\n\n fn is_exit(&self) -> bool {\n\n if let WorkerMsg::Exit = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n", "file_path": "src/database.rs", "rank": 63, "score": 14.102824510302785 }, { "content": " }\n\n\n\n pub fn contained(&self, bvv: &BitmappedVersionVector) -> bool {\n\n self.0.iter().all(|(&i, &v)| bvv.contains_all(i, v))\n\n }\n\n}\n\n\n\nimpl AbsVersionVector for VersionVector {\n\n fn contains(&self, id: Id, version: Version) -> bool {\n\n Self::contains(self, id, version)\n\n }\n\n}\n\n\n\nimpl DotSet {\n\n pub fn new() -> Self {\n\n DotSet(Default::default())\n\n }\n\n\n\n pub fn from_dot(dot: (Id, Version)) -> Self {\n\n let mut result = Self::new();\n", "file_path": "src/version_vector.rs", "rank": 65, "score": 12.43032873301796 }, { "content": " true\n\n }\n\n}\n\n\n\nimpl AbsVersionVector for DeltaVersionVector {\n\n fn contains(&self, id: Id, version: Version) -> bool {\n\n Self::contains(self, id, version)\n\n }\n\n}\n\n\n\nimpl VersionVector {\n\n pub fn new() -> Self {\n\n VersionVector(Default::default())\n\n }\n\n\n\n pub fn contains(&self, id: Id, version: Version) -> bool {\n\n self.0.get(&id).map(|&x| x >= version).unwrap_or(false)\n\n }\n\n\n\n pub fn merge(&mut self, other: &Self) {\n", "file_path": "src/version_vector.rs", "rank": 66, "score": 12.093689986916251 }, { "content": " for (&id, &version) in &other.0 {\n\n self.add(id, version);\n\n }\n\n }\n\n\n\n pub fn descends(&self, other: &Self) -> bool {\n\n other\n\n .0\n\n .iter()\n\n .all(|(k, ov)| self.0.get(k).map(|v| v >= ov).unwrap_or(false))\n\n }\n\n\n\n pub fn strict_descends(&self, other: &Self) -> bool {\n\n self.descends(other) && self.0 != other.0\n\n }\n\n\n\n pub fn add(&mut self, id: Id, version: Version) {\n\n match self.0.entry(id) {\n\n LMEntry::Vacant(vac) => {\n\n vac.insert(version);\n", "file_path": "src/version_vector.rs", "rank": 67, "score": 11.976344650595665 }, { "content": "\n\n fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> io::Result<()> {\n\n dst.reserve(item.len());\n\n dst.put(&item);\n\n Ok(())\n\n }\n\n}\n\n\n\npub type FabricMsgFn = Box<Fn(NodeId, FabricMsg) + Sync + Send>;\n\npub type FabricConFn = Box<Fn(NodeId) + Sync + Send>;\n\n\n", "file_path": "src/fabric.rs", "rank": 68, "score": 11.184369120864083 }, { "content": "use std::cmp::Ordering;\n\nuse std::collections::hash_map::{Entry, RandomState};\n\nuse std::collections::{BinaryHeap, HashMap};\n\nuse std::fmt;\n\nuse std::hash::{BuildHasher, Hash};\n\nuse std::ops::Deref;\n\n\n\n// TODO: need a more efficient implementation and possibly more flexibility\n\n\n\n#[derive(Debug)]\n\npub struct InFlightMap<K: Hash + Eq + Copy, V, T: Ord + Copy, H: BuildHasher = RandomState> {\n\n map: HashMap<K, V, H>,\n\n heap: BinaryHeap<Pair<T, K>>,\n\n}\n\n\n\nimpl<K: Hash + Eq + Copy + fmt::Debug, V, T: Ord + Copy, H: BuildHasher + Default>\n\n InFlightMap<K, V, T, H>\n\n{\n\n pub fn new() -> Self {\n\n InFlightMap {\n", "file_path": "src/inflightmap.rs", "rank": 69, "score": 10.83518198944193 }, { "content": "use bytes::Bytes;\n\nuse std::error::Error;\n\nuse std::io::{self, Write};\n\nuse std::{fmt, str};\n\nuse utils::assume_str;\n\n\n\n#[derive(Eq, PartialEq, Debug)]\n\npub enum RespError {\n\n Incomplete,\n\n Invalid(&'static str),\n\n}\n\n\n\nimpl From<&'static str> for RespError {\n\n fn from(from: &'static str) -> Self {\n\n RespError::Invalid(from)\n\n }\n\n}\n\n\n\npub type RespResult<T> = Result<T, RespError>;\n\n\n", "file_path": "src/resp.rs", "rank": 70, "score": 10.80276876825542 }, { "content": "\n\n pub fn contains(&self, version: Version) -> bool {\n\n self.base >= version || self.bitmap.contains(version)\n\n }\n\n\n\n pub fn contains_all(&self, version: Version) -> bool {\n\n self.base >= version\n\n }\n\n\n\n /// self - other\n\n pub fn delta(&self, other: &Self) -> BitmappedVersionDelta {\n\n if self.base < other.base {\n\n return Default::default();\n\n }\n\n let last_version = cmp::max(self.bitmap.max().unwrap_or(0), self.base);\n\n BitmappedVersionDelta {\n\n from: other.clone(),\n\n to: self.clone(),\n\n pos: 0,\n\n len: last_version - other.base,\n", "file_path": "src/version_vector.rs", "rank": 71, "score": 10.521008068542082 }, { "content": "use std::cmp::min;\n\nuse std::collections::hash_map::Entry as HMEntry;\n\nuse std::collections::BTreeMap;\n\nuse std::net::SocketAddr;\n\nuse std::sync::{Arc, RwLock};\n\nuse std::time::{Duration, Instant};\n\nuse std::{fmt, thread};\n\n\n\nuse bincode;\n\nuse bytes::Bytes;\n\nuse linear_map::{Entry as LMEntry, LinearMap};\n\nuse rand::{thread_rng, Rng};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n\nuse config::Config;\n\nuse database::{NodeId, VNodeNo};\n\nuse fabric::{Fabric, FabricMsg, FabricMsgRef, FabricMsgType};\n\nuse hash::{hash_slot, HASH_SLOTS};\n\nuse types::PhysicalNodeId;\n\nuse utils::{split_u64, GenericError, IdHashMap, IdHashSet};\n\nuse version_vector::VersionVector;\n\n\n\n// can be called by the network thread or a worker doing a dht mutation\n\npub type DHTChangeFn = Box<Fn() + Send + Sync>;\n\n\n\n// rwlock needs metadata to be sync, as it's read concurrently by multiple threads\n", "file_path": "src/dht.rs", "rank": 72, "score": 10.516621610789521 }, { "content": "\n\n#[derive(Default)]\n\npub struct Context {\n\n pub token: Token,\n\n pub is_multi_cmd: bool,\n\n pub is_exec_cmd: bool,\n\n // response queue\n\n // if not multi the first element is threated as a standalone response\n\n // if multi it contains an array response\n\n pub response: Vec<RespValue>,\n\n // commands queue\n\n // if multi it may contain pending commands\n\n // the latest command is always at the back\n\n pub commands: Vec<RespValue>,\n\n pub reads: Vec<ContextRead>,\n\n pub writes: Vec<ContextWrite>,\n\n}\n\n\n\nimpl Context {\n\n pub fn new(token: Token) -> Self {\n", "file_path": "src/database.rs", "rank": 73, "score": 10.462298328318028 }, { "content": " pub fn for_each_dot<CB: FnMut(Id, Version)>(&self, mut cb: CB) {\n\n use self::Cube::*;\n\n match *self {\n\n Counter(ref a) => a.values.iter().for_each(|(&i, &(v, _))| cb(i, v)),\n\n Value(ref a) => a.values.iter().for_each(|(&(i, v), _)| cb(i, v)),\n\n Map(ref a) => a.dots.iter().for_each(|(i, v)| cb(i, v)),\n\n Set(ref a) => a.dots.iter().for_each(|(i, v)| cb(i, v)),\n\n Void(_) => unreachable!(),\n\n }\n\n }\n\n\n\n pub fn new(bvv: &BitmappedVersionVector) -> Cube {\n\n let mut vv = VersionVector::new();\n\n for (&n, bv) in bvv.iter() {\n\n vv.add(n, bv.base());\n\n }\n\n Cube::Void(vv)\n\n }\n\n\n\n pub fn del(&mut self, id: Id, version: Version, vv: &VersionVector) -> bool {\n", "file_path": "src/cubes.rs", "rank": 74, "score": 10.371789985513134 }, { "content": "\n\n pub fn contains(&self, id: Id, v: Version) -> bool {\n\n self.0.get(&id).map_or(false, |bv| bv.contains(v))\n\n }\n\n\n\n pub fn contains_all(&self, id: Id, v: Version) -> bool {\n\n self.0.get(&id).map_or(false, |bv| bv.contains_all(v))\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> linear_map::IterMut<Id, BitmappedVersion> {\n\n self.0.iter_mut()\n\n }\n\n\n\n pub fn iter(&self) -> linear_map::Iter<Id, BitmappedVersion> {\n\n self.0.iter()\n\n }\n\n\n\n pub fn delta(&self, other: &Self) -> BitmappedVersionVectorDelta {\n\n let min_versions: Vec<_> = self\n\n .0\n", "file_path": "src/version_vector.rs", "rank": 75, "score": 10.288564891562764 }, { "content": "use std::convert::TryFrom;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n/// Identifier for a Database instance\n\n/// node id should be a positive i64 to work nicelly with the RESP protocol\n\npub type NodeId = u64;\n\n/// Identifier for physical node (high u32 of NodeId)\n\npub type PhysicalNodeId = u32;\n\n/// Identifier for connection with client\n\npub type Token = u64;\n\n/// Identifier for a vnode\n\npub type VNodeNo = u16;\n\n\n\n/// Identifier for communication between nodes\n\n#[derive(PartialEq, Eq, Hash, Serialize, Deserialize, Default, Copy, Clone)]\n\npub struct Cookie(u64, u64);\n\n\n\nimpl Cookie {\n\n pub fn new(a: u64, b: u64) -> Self {\n", "file_path": "src/types.rs", "rank": 76, "score": 10.286263703157767 }, { "content": "use bincode;\n\nuse bytes::Bytes;\n\nuse cubes::Cube;\n\nuse database::*;\n\nuse fabric::*;\n\nuse inflightmap::InFlightMap;\n\nuse metrics::{self, Meter};\n\nuse std::collections::{hash_set, HashSet};\n\nuse std::time::{Duration, Instant};\n\nuse utils::IdHasherBuilder;\n\nuse version_vector::*;\n\nuse vnode::VNodeState;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\n#[must_use]\n\npub enum SyncResult {\n\n Continue,\n\n Done,\n\n Error,\n\n}\n", "file_path": "src/vnode_sync.rs", "rank": 77, "score": 10.172969177013872 }, { "content": " }\n\n\n\n pub fn run(self) {\n\n let mut core = tokio::reactor::Core::new().unwrap();\n\n\n\n let token_chans: Arc<Mutex<IdHashMap<Token, fmpsc::UnboundedSender<_>>>> =\n\n Default::default();\n\n let token_chans_cloned = token_chans.clone();\n\n let response_fn = Box::new(move |context: DbContext| {\n\n let token = context.token;\n\n if let Some(chan) = token_chans_cloned.lock().unwrap().get_mut(&token) {\n\n if let Err(e) = chan.unbounded_send(context) {\n\n warn!(\"Can't send to token {} chan: {:?}\", token, e);\n\n }\n\n } else {\n\n debug!(\"Can't find response channel for token {:?}\", token);\n\n }\n\n });\n\n\n\n let database = Database::new(&self.config, response_fn);\n", "file_path": "src/server.rs", "rank": 78, "score": 10.16186901975294 }, { "content": "use std::collections::hash_map::Entry as HMEntry;\n\nuse std::net::SocketAddr;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::{mpsc, Arc, RwLock};\n\nuse std::time::Duration;\n\nuse std::{io, thread};\n\n\n\nuse bincode;\n\nuse byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse bytes::{BufMut, Bytes, BytesMut};\n\nuse linear_map::LinearMap;\n\nuse rand::{thread_rng, Rng};\n\n\n\nuse futures::future::Either;\n\nuse futures::sync::mpsc as fmpsc;\n\nuse futures::sync::oneshot as foneshot;\n\nuse futures::{Future, Sink, Stream};\n\nuse tokio_codec as codec;\n\nuse tokio_core as tokio;\n\nuse tokio_io::{io as tokio_io, AsyncRead};\n\n\n\nuse config::Config;\n\nuse database::NodeId;\n\npub use fabric_msg::*;\n\nuse utils::{into_io_error, GenericError, IdHashMap};\n\n\n\n// u32(le) payload len + bincode payload\n", "file_path": "src/fabric.rs", "rank": 80, "score": 9.969052465023683 }, { "content": " pub sync_msg_inflight: u32,\n\n pub dht_sync_on_connect: bool,\n\n pub dht_sync_aae: bool,\n\n pub fabric_timeout: u32,\n\n pub request_timeout: u32,\n\n pub client_connection_max: u32,\n\n pub value_version_max: u16,\n\n pub seed_nodes: Vec<SocketAddr>,\n\n // TODO: these should be in the cluster config instead\n\n pub consistency_read: ConsistencyLevel,\n\n pub consistency_write: ConsistencyLevel,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n // Remember to update defaults in sucre.yaml!\n\n Config {\n\n data_dir: DEFAULT_DATA_DIR.into(),\n\n cluster_name: DEFAULT_CLUSTER_NAME.into(),\n\n listen_addr: DEFAULT_LISTEN_ADDR.parse().unwrap(),\n", "file_path": "src/config.rs", "rank": 81, "score": 9.868875934931204 }, { "content": " .send_msg(node_id, FabricMsgRef::DHTAE(&inner.ring.version));\n\n }\n\n }\n\n }\n\n\n\n pub fn handler_fabric_msg(&self, from: NodeId, msg: FabricMsg) {\n\n Self::on_message(&mut *self.inner.write().unwrap(), from, msg);\n\n }\n\n\n\n pub fn handler_tick(&self, time: Instant) {\n\n let r_inner = self.inner.read().unwrap();\n\n if r_inner.sync_aae && time >= r_inner.next_req_broadcast {\n\n trace!(\"Triggered AAE\");\n\n Self::broadcast_req(&*r_inner);\n\n drop(r_inner);\n\n self.inner.write().unwrap().next_req_broadcast +=\n\n Duration::from_millis(DHT_AAE_TRIGGER_INTERVAL_MS);\n\n }\n\n }\n\n\n", "file_path": "src/dht.rs", "rank": 82, "score": 9.723569038215647 }, { "content": " Ok(self.respond_resp(context, RespValue::Array(Default::default())))\n\n }\n\n }\n\n\n\n pub fn set(\n\n &self,\n\n context: &mut Context,\n\n key: &Bytes,\n\n mutator_fn: MutatorFn,\n\n consistency: ConsistencyLevel,\n\n reply_result: bool,\n\n response_fn: Option<ResponseFn>,\n\n ) -> Result<(), CommandError> {\n\n debug_assert!(!context.is_exec_cmd);\n\n context.writes.push(ContextWrite {\n\n version: 0,\n\n mutator_fn: Some(mutator_fn),\n\n key: key.clone(),\n\n cube: Default::default(),\n\n reply_result: reply_result,\n", "file_path": "src/database.rs", "rank": 83, "score": 9.542422939520577 }, { "content": "use bytes::Bytes;\n\nuse command::CommandError;\n\nuse config::Config;\n\nuse cubes::*;\n\nuse dht::{RingDescription, DHT};\n\nuse fabric::*;\n\nuse metrics::{self, Gauge};\n\nuse rand::{thread_rng, Rng};\n\nuse resp::RespValue;\n\nuse std::sync::{Arc, Mutex, RwLock};\n\nuse std::{net, time};\n\nuse storage::{Storage, StorageManager};\n\npub use types::*;\n\nuse utils::LoggerExt;\n\nuse utils::{assume_str, is_dir_empty_or_absent, join_u64, replace_default, split_u64};\n\nuse version_vector::Version;\n\nuse vnode::*;\n\nuse vnode_sync::SyncDirection;\n\nuse workers::*;\n\n\n", "file_path": "src/database.rs", "rank": 84, "score": 9.53569976729573 }, { "content": "impl Map {\n\n fn with(vv: VersionVector) -> Self {\n\n Map {\n\n values: Default::default(),\n\n dots: Default::default(),\n\n vv,\n\n }\n\n }\n\n\n\n pub fn insert(&mut self, node: Id, version: Version, key: Bytes, value: Bytes) -> bool {\n\n let result = self\n\n .values\n\n .insert(key, MapValue::new((node, version), value))\n\n .is_none();\n\n self.vv.add(node, version);\n\n self.dots.add(node, version);\n\n result\n\n }\n\n\n\n pub fn remove(&mut self, node: Id, version: Version, key: &[u8]) -> bool {\n", "file_path": "src/cubes.rs", "rank": 85, "score": 9.261176761880591 }, { "content": " pub fn set_callback(&self, callback: DHTChangeFn) {\n\n self.inner.write().unwrap().callback = Some(callback);\n\n }\n\n\n\n pub fn node(&self) -> NodeId {\n\n self.node\n\n }\n\n\n\n pub fn partitions(&self) -> usize {\n\n // FIXME: should not lock\n\n self.inner.read().unwrap().ring.vnodes.len()\n\n }\n\n\n\n pub fn replication_factor(&self) -> usize {\n\n self.inner.read().unwrap().ring.replication_factor\n\n }\n\n\n\n pub fn key_vnode(&self, key: &[u8]) -> VNodeNo {\n\n // use / instead of % to get continuous hash slots for each vnode\n\n (hash_slot(key) / (HASH_SLOTS / self.partitions() as VNodeNo)) as VNodeNo\n", "file_path": "src/dht.rs", "rank": 86, "score": 9.06815995114639 }, { "content": "use std::cmp::max;\n\nuse std::convert::TryInto;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::net::SocketAddr;\n\nuse std::path::{Path, PathBuf};\n\nuse std::str::FromStr;\n\n\n\nuse log;\n\nuse log4rs;\n\nuse num_cpus;\n\nuse serde_yaml as yaml;\n\n\n\nuse types::ConsistencyLevel;\n\nuse utils::GenericError;\n\n\n\n// Remember to update defaults in sucredb.yaml!\n\npub const DEFAULT_LISTEN_ADDR: &str = \"127.0.0.1:6379\";\n\npub const DEFAULT_FABRIC_ADDR: &str = \"127.0.0.1:16379\";\n\npub const DEFAULT_CLUSTER_NAME: &str = \"default\";\n", "file_path": "src/config.rs", "rank": 87, "score": 8.991621853649349 }, { "content": " use self::Cube::*;\n\n match *self {\n\n Counter(ref mut a) => a.clear(id, version),\n\n Value(ref mut a) => a.set(id, version, None, vv),\n\n Map(ref mut a) => a.clear(id, version),\n\n Set(ref mut a) => a.clear(id, version),\n\n Void(_) => return false,\n\n }\n\n true\n\n }\n\n\n\n pub fn merge(self, other: Self) -> Self {\n\n use self::Cube::*;\n\n match (self, other) {\n\n (Counter(a), Counter(b)) => Counter(a.merge(b)),\n\n (Value(a), Value(b)) => Value(a.merge(b)),\n\n (Map(a), Map(b)) => Map(a.merge(b)),\n\n (Set(a), Set(b)) => Set(a.merge(b)),\n\n (Void(vv), a) | (a, Void(vv)) => match a {\n\n Counter(a) => Counter(a.merge(self::Counter::with(vv))),\n", "file_path": "src/cubes.rs", "rank": 88, "score": 8.7721000322986 }, { "content": "use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse linear_map::set::LinearSet;\n\nuse linear_map::{self, Entry as LMEntry, LinearMap};\n\nuse roaring::{RoaringBitmap, RoaringTreemap};\n\nuse serde;\n\nuse std::hash::Hash;\n\nuse std::{cmp, str};\n\nuse types::NodeId;\n\n\n\npub type Id = NodeId;\n\npub type Version = u64;\n\n/// A Dot is a Node Version Pair\n\n// pub type Dot = (NodeId, Version);\n\n\n\n#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]\n\npub struct BitmappedVersion {\n\n base: Version,\n\n #[serde(\n\n serialize_with = \"serialize_bitmap\",\n\n deserialize_with = \"deserialize_bitmap\"\n", "file_path": "src/version_vector.rs", "rank": 89, "score": 8.768814143238597 }, { "content": "use std::{cmp, thread, io, time, fmt};\n\nuse std::time::{Duration, Instant};\n\nuse std::net::SocketAddr;\n\nuse std::marker::PhantomData;\n\nuse std::sync::{mpsc, Arc, Mutex};\n\nuse std::collections::HashMap;\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse serde::Serialize;\n\nuse serde::de::DeserializeOwned;\n\nuse bincode;\n\nuse futures::{Future, Stream, Sink};\n\nuse futures::sync::mpsc as fmpsc;\n\nuse futures::sync::oneshot as foneshot;\n\nuse tokio_core as tokio;\n\n\n\nuse inflightmap::InFlightMap;\n\nuse utils::into_io_error;\n\n\n\nconst PACKET_SIZE: usize = 1400;\n", "file_path": "src/gossip.rs", "rank": 91, "score": 8.532059513025558 }, { "content": "\n\n pub fn from_vv(vv: VersionVector) -> Self {\n\n DeltaVersionVector(unsafe { ::std::mem::transmute::<LinearMap<_, _>, Vec<_>>(vv.0) })\n\n }\n\n\n\n fn contains(&self, id: Id, version: Version) -> bool {\n\n assert!(version != 0, \"Can't check for version 0\");\n\n let mut p = if let Some(p) = self.0.iter().position(|x| x.0 == id) {\n\n p\n\n } else {\n\n return false;\n\n };\n\n if self.0[p].1 <= version {\n\n return true;\n\n }\n\n p += 1;\n\n while p < self.0.len() {\n\n if self.0[p].0 != id {\n\n return false;\n\n }\n", "file_path": "src/version_vector.rs", "rank": 92, "score": 8.526799002060814 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct MsgRemoteSet {\n\n pub vnode: VNodeNo,\n\n pub cookie: Cookie,\n\n pub writes: Vec<(Bytes, Cube, bool)>,\n\n pub reply: bool,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct MsgRemoteSetAck {\n\n pub vnode: VNodeNo,\n\n pub cookie: Cookie,\n\n pub result: Result<Vec<Option<Cube>>, FabricError>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct MsgSyncStart {\n\n pub vnode: VNodeNo,\n\n pub cookie: Cookie,\n", "file_path": "src/fabric_msg.rs", "rank": 93, "score": 8.457457672572362 }, { "content": " inner.ring = proposal(inner.ring.clone())?;\n\n info!(\"Proposing new ring version {:?}\", inner.ring.version);\n\n Self::broadcast(&mut *inner);\n\n Self::call_callback(&mut *inner);\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use config::Config;\n\n use env_logger;\n\n use fabric::Fabric;\n\n use rand::{thread_rng, Rng};\n\n use utils::join_u64;\n\n use utils::sleep_ms;\n\n\n\n #[test]\n\n fn test_ring_dup_join() {\n", "file_path": "src/dht.rs", "rank": 94, "score": 8.451344871970157 }, { "content": "use bytes::Bytes;\n\nuse cubes::Cube;\n\nuse database::*;\n\nuse version_vector::*;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum FabricMsgType {\n\n Crud,\n\n Synch,\n\n DHT,\n\n Unknown,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n\npub enum FabricError {\n\n NoRoute,\n\n CookieNotFound,\n\n BadVNodeStatus,\n\n NotReady,\n\n SyncInterrupted,\n", "file_path": "src/fabric_msg.rs", "rank": 95, "score": 8.287937320202559 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::error::Error;\n\nuse std::hash::{BuildHasherDefault, Hasher};\n\nuse std::{fmt, fs, io, path};\n\n\n\npub type GenericError = Box<Error + Send + Sync + 'static>;\n\n\n\npub type IdHasherBuilder = BuildHasherDefault<IdHasher>;\n\npub type IdHashMap<K, V> = HashMap<K, V, IdHasherBuilder>;\n\npub type IdHashSet<K> = HashSet<K, IdHasherBuilder>;\n\npub struct IdHasher(u64);\n\n\n\nimpl Default for IdHasher {\n\n #[inline]\n\n fn default() -> IdHasher {\n\n IdHasher(0)\n\n }\n\n}\n\n\n\nimpl Hasher for IdHasher {\n", "file_path": "src/utils.rs", "rank": 96, "score": 8.132527911909435 }, { "content": " pub fn add(&mut self, version: Version) -> bool {\n\n if version == self.base + 1 {\n\n self.base += 1;\n\n } else if version > self.base {\n\n if !self.bitmap.insert(version) {\n\n return false;\n\n }\n\n } else {\n\n return false;\n\n };\n\n self.norm();\n\n true\n\n }\n\n\n\n pub fn add_all(&mut self, version: Version) -> bool {\n\n if version > self.base {\n\n self.base = version;\n\n self.norm();\n\n true\n\n } else {\n", "file_path": "src/version_vector.rs", "rank": 97, "score": 8.105095119542748 }, { "content": " }\n\n }\n\n result\n\n }\n\n\n\n pub fn new(base: Version, bitmap: u32) -> BitmappedVersion {\n\n BitmappedVersion {\n\n base: base,\n\n bitmap: Self::int_to_bitmap(base, bitmap),\n\n }\n\n }\n\n\n\n pub fn merge(&mut self, other: &Self) {\n\n self.bitmap |= &other.bitmap;\n\n if self.base < other.base {\n\n self.base = other.base;\n\n }\n\n self.norm();\n\n }\n\n\n", "file_path": "src/version_vector.rs", "rank": 98, "score": 8.057007815683189 }, { "content": " let p = buffer.read_u32::<LittleEndian>().map_err(Error::custom)?;\n\n let b = RoaringBitmap::deserialize_from(&mut buffer).map_err(Error::custom)?;\n\n bitmaps.push((p, b));\n\n }\n\n Ok(RoaringTreemap::from_bitmaps(bitmaps))\n\n })\n\n}\n\n\n\nimpl BitmappedVersionVector {\n\n pub fn new() -> Self {\n\n BitmappedVersionVector(Default::default())\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.0.clear();\n\n }\n\n\n\n pub fn from_version(id: Id, bv: BitmappedVersion) -> Self {\n\n let mut bvv = Self::new();\n\n bvv.0.insert(id, bv);\n", "file_path": "src/version_vector.rs", "rank": 99, "score": 7.873943194033969 } ]
Rust
src/lib.rs
jonas-schievink/vmem
bc31326d5e202c34444243802cb6ffab07dcff02
#![doc(html_root_url = "https://docs.rs/vmem/0.1.0")] #![warn(missing_debug_implementations)] #![warn(missing_docs)] #[cfg(unix)] #[path = "unix.rs"] mod imp; #[cfg(windows)] #[path = "win.rs"] mod imp; use failure::{Backtrace, Fail}; use std::marker::PhantomData; use std::ops::Range; use std::sync::Mutex; use std::{fmt, io}; #[derive(Debug)] pub struct ReservedMemory { addr: usize, len: usize, allocations: Mutex<Allocations>, } impl ReservedMemory { pub fn reserve(bytes: usize) -> Self { Self::try_reserve(bytes).expect("failed to reserve address space") } pub fn try_reserve(bytes: usize) -> Result<Self, Error> { match imp::reserve(bytes) { Ok(ptr) => Ok(Self { addr: ptr as usize, len: bytes, allocations: Mutex::new(Allocations { list: Vec::new() }), }), Err(e) => Err(ErrorKind::Os(e).into()), } } pub fn addr(&self) -> usize { self.addr } pub fn page_size(&self) -> usize { page_size::get() } pub fn allocate(&self, offset: usize, bytes: usize) -> Result<AllocatedMemory, Error> { self.addr .checked_add(offset) .and_then(|sum| sum.checked_add(bytes)) .ok_or_else(|| ErrorKind::TooLarge)?; if offset + bytes > self.len { return Err(ErrorKind::TooLarge.into()); } if bytes == 0 { return Err(ErrorKind::ZeroSize.into()); } if offset & (self.page_size() - 1) != 0 { return Err(ErrorKind::NotAligned(offset).into()); } let bytes = bytes + self.page_size() - 1; let bytes = bytes & !(self.page_size() - 1); let mut allocs = self.allocations.lock().unwrap(); if allocs.find_allocation_overlapping(offset).is_ok() { return Err(ErrorKind::Overlap.into()); } let addr = self.addr + offset; imp::alloc(addr, bytes).map_err(ErrorKind::Os)?; allocs.register_allocation(offset, bytes); Ok(AllocatedMemory { addr, len: bytes, _p: PhantomData, }) } } impl Drop for ReservedMemory { fn drop(&mut self) { unsafe { imp::unreserve(self.addr, self.len).expect("failed to deallocate memory"); } } } #[derive(Debug)] struct Allocations { list: Vec<Range<usize>>, } impl Allocations { fn register_allocation(&mut self, offset: usize, size: usize) { let idx = match self.find_allocation_overlapping(offset) { Ok(_) => { panic!( "new allocation at offset {} and size {} overlaps existing one", offset, size ); } Err(idx) => idx, }; self.list.insert(idx, offset..offset + size); } fn find_allocation_overlapping(&self, offset: usize) -> Result<usize, usize> { use std::cmp::Ordering; self.list.binary_search_by(|alloc| { if alloc.end <= offset { Ordering::Less } else if alloc.start > offset { Ordering::Greater } else { assert!(alloc.start <= offset && alloc.end > offset); Ordering::Equal } }) } } #[derive(Debug)] pub struct AllocatedMemory<'a> { addr: usize, len: usize, _p: PhantomData<&'a ()>, } impl<'a> AllocatedMemory<'a> { pub fn addr(&self) -> usize { self.addr } pub fn len(&self) -> usize { self.len } pub fn set_protection(&mut self, prot: Protection) { imp::protect(self.addr, self.len, prot).expect("could not change protection") } } #[derive(Debug, Copy, Clone)] pub enum Protection { ReadOnly, ReadWrite, ReadExecute, } #[derive(Debug)] pub struct Error { inner: ErrorKind, } impl Fail for Error { fn cause(&self) -> Option<&Fail> { self.inner.cause() } fn backtrace(&self) -> Option<&Backtrace> { self.inner.backtrace() } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f) } } impl From<ErrorKind> for Error { fn from(e: ErrorKind) -> Self { Self { inner: e } } } #[derive(Fail, Debug)] enum ErrorKind { #[fail(display = "operating system reported error: {}", _0)] Os(#[cause] io::Error), #[fail(display = "requested size is too large")] TooLarge, #[fail(display = "zero-sized allocation requested")] ZeroSize, #[fail(display = "requested allocation overlaps an existing one")] Overlap, #[fail(display = "requested location {:#X} is not page-aligned", _0)] NotAligned(usize), } /* Tests: * Test for leaks * `mem::forget` AllocatedMemory, then drop ReservedMemory normally - should not leak anything! */ #[cfg(test)] mod tests { use super::*; use std::ptr; #[test] fn reserve() { let mem = ReservedMemory::reserve(1024 * 1024); drop(mem); } #[test] fn alloc() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, 1).expect("failed to allocate page"); mem.allocate(0, 1).expect_err("allocated page twice"); mem.allocate(page_size::get() - 1, 1) .expect_err("allocated first page twice (at end)"); mem.allocate(page_size::get(), 1) .expect("failed to allocate second page"); mem.allocate(page_size::get(), 1) .expect_err("allocated second page twice"); mem.allocate(0, 1).expect_err("allocated page twice"); let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, page_size::get()) .expect("failed to allocate"); mem.allocate(page_size::get(), 1) .expect("failed to allocate second page"); } #[test] fn alloc_same_page_different_offset() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, 1).expect("failed to allocate page"); mem.allocate(1, 1).expect_err("allocated page twice"); mem.allocate(page_size::get() - 10, 1) .expect_err("allocated page twice"); } #[test] fn doesnt_fit() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(1024 * 1024 - page_size::get(), page_size::get() + 1) .expect_err("allocated more than last page"); mem.allocate(1024 * 1024, 1) .expect_err("allocated past last page"); mem.allocate(1024 * 1024 * 256, 1) .expect_err("allocated past last page"); mem.allocate(1024 * 1024 - page_size::get(), page_size::get()) .expect("couldn't allocate last page"); } #[test] fn page_boundary() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(1024 + 1, 1) .expect_err("allocation not on page boundary succeeded"); } #[test] fn absurdly_large() { ReservedMemory::try_reserve(1024 * 1024 * 1024 * 1024 * 500).unwrap_err(); } #[test] fn access() { let mem = ReservedMemory::reserve(1024 * 1024); let alloc = mem .allocate(0, page_size::get()) .expect("failed to allocate"); for addr in alloc.addr()..alloc.addr() + alloc.len() { unsafe { ptr::read(addr as *const u8); } } } }
#![doc(html_root_url = "https://docs.rs/vmem/0.1.0")] #![warn(missing_debug_implementations)] #![warn(missing_docs)] #[cfg(unix)] #[path = "unix.rs"] mod imp; #[cfg(windows)] #[path = "win.rs"] mod imp; use failure::{Backtrace, Fail}; use std::marker::PhantomData; use std::ops::Range; use std::sync::Mutex; use std::{fmt, io}; #[derive(Debug)] pub struct ReservedMemory { addr: usize, len: usize, allocations: Mutex<Allocations>, } impl ReservedMemory { pub fn reserve(bytes: usize) -> Self { Self::try_reserve(bytes).expect("failed to reserve address space") } pub fn try_reserve(bytes: usize) -> Result<Self, Error> {
} pub fn addr(&self) -> usize { self.addr } pub fn page_size(&self) -> usize { page_size::get() } pub fn allocate(&self, offset: usize, bytes: usize) -> Result<AllocatedMemory, Error> { self.addr .checked_add(offset) .and_then(|sum| sum.checked_add(bytes)) .ok_or_else(|| ErrorKind::TooLarge)?; if offset + bytes > self.len { return Err(ErrorKind::TooLarge.into()); } if bytes == 0 { return Err(ErrorKind::ZeroSize.into()); } if offset & (self.page_size() - 1) != 0 { return Err(ErrorKind::NotAligned(offset).into()); } let bytes = bytes + self.page_size() - 1; let bytes = bytes & !(self.page_size() - 1); let mut allocs = self.allocations.lock().unwrap(); if allocs.find_allocation_overlapping(offset).is_ok() { return Err(ErrorKind::Overlap.into()); } let addr = self.addr + offset; imp::alloc(addr, bytes).map_err(ErrorKind::Os)?; allocs.register_allocation(offset, bytes); Ok(AllocatedMemory { addr, len: bytes, _p: PhantomData, }) } } impl Drop for ReservedMemory { fn drop(&mut self) { unsafe { imp::unreserve(self.addr, self.len).expect("failed to deallocate memory"); } } } #[derive(Debug)] struct Allocations { list: Vec<Range<usize>>, } impl Allocations { fn register_allocation(&mut self, offset: usize, size: usize) { let idx = match self.find_allocation_overlapping(offset) { Ok(_) => { panic!( "new allocation at offset {} and size {} overlaps existing one", offset, size ); } Err(idx) => idx, }; self.list.insert(idx, offset..offset + size); } fn find_allocation_overlapping(&self, offset: usize) -> Result<usize, usize> { use std::cmp::Ordering; self.list.binary_search_by(|alloc| { if alloc.end <= offset { Ordering::Less } else if alloc.start > offset { Ordering::Greater } else { assert!(alloc.start <= offset && alloc.end > offset); Ordering::Equal } }) } } #[derive(Debug)] pub struct AllocatedMemory<'a> { addr: usize, len: usize, _p: PhantomData<&'a ()>, } impl<'a> AllocatedMemory<'a> { pub fn addr(&self) -> usize { self.addr } pub fn len(&self) -> usize { self.len } pub fn set_protection(&mut self, prot: Protection) { imp::protect(self.addr, self.len, prot).expect("could not change protection") } } #[derive(Debug, Copy, Clone)] pub enum Protection { ReadOnly, ReadWrite, ReadExecute, } #[derive(Debug)] pub struct Error { inner: ErrorKind, } impl Fail for Error { fn cause(&self) -> Option<&Fail> { self.inner.cause() } fn backtrace(&self) -> Option<&Backtrace> { self.inner.backtrace() } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f) } } impl From<ErrorKind> for Error { fn from(e: ErrorKind) -> Self { Self { inner: e } } } #[derive(Fail, Debug)] enum ErrorKind { #[fail(display = "operating system reported error: {}", _0)] Os(#[cause] io::Error), #[fail(display = "requested size is too large")] TooLarge, #[fail(display = "zero-sized allocation requested")] ZeroSize, #[fail(display = "requested allocation overlaps an existing one")] Overlap, #[fail(display = "requested location {:#X} is not page-aligned", _0)] NotAligned(usize), } /* Tests: * Test for leaks * `mem::forget` AllocatedMemory, then drop ReservedMemory normally - should not leak anything! */ #[cfg(test)] mod tests { use super::*; use std::ptr; #[test] fn reserve() { let mem = ReservedMemory::reserve(1024 * 1024); drop(mem); } #[test] fn alloc() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, 1).expect("failed to allocate page"); mem.allocate(0, 1).expect_err("allocated page twice"); mem.allocate(page_size::get() - 1, 1) .expect_err("allocated first page twice (at end)"); mem.allocate(page_size::get(), 1) .expect("failed to allocate second page"); mem.allocate(page_size::get(), 1) .expect_err("allocated second page twice"); mem.allocate(0, 1).expect_err("allocated page twice"); let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, page_size::get()) .expect("failed to allocate"); mem.allocate(page_size::get(), 1) .expect("failed to allocate second page"); } #[test] fn alloc_same_page_different_offset() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(0, 1).expect("failed to allocate page"); mem.allocate(1, 1).expect_err("allocated page twice"); mem.allocate(page_size::get() - 10, 1) .expect_err("allocated page twice"); } #[test] fn doesnt_fit() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(1024 * 1024 - page_size::get(), page_size::get() + 1) .expect_err("allocated more than last page"); mem.allocate(1024 * 1024, 1) .expect_err("allocated past last page"); mem.allocate(1024 * 1024 * 256, 1) .expect_err("allocated past last page"); mem.allocate(1024 * 1024 - page_size::get(), page_size::get()) .expect("couldn't allocate last page"); } #[test] fn page_boundary() { let mem = ReservedMemory::reserve(1024 * 1024); mem.allocate(1024 + 1, 1) .expect_err("allocation not on page boundary succeeded"); } #[test] fn absurdly_large() { ReservedMemory::try_reserve(1024 * 1024 * 1024 * 1024 * 500).unwrap_err(); } #[test] fn access() { let mem = ReservedMemory::reserve(1024 * 1024); let alloc = mem .allocate(0, page_size::get()) .expect("failed to allocate"); for addr in alloc.addr()..alloc.addr() + alloc.len() { unsafe { ptr::read(addr as *const u8); } } } }
match imp::reserve(bytes) { Ok(ptr) => Ok(Self { addr: ptr as usize, len: bytes, allocations: Mutex::new(Allocations { list: Vec::new() }), }), Err(e) => Err(ErrorKind::Os(e).into()), }
if_condition
[]
Rust
notedata/src/sm_parser.rs
martensm/rustmania
c5f40dcd5f8bf7a555cd8a7d470594409a7ccc8f
use crate::{ parser_generic::{beat_pair, comma_separated, stepmania_tag, ws_trimmed}, Chart, DisplayBpm, Measure, Note, NoteData, NoteRow, NoteType, }; use nom::{ branch::alt, bytes::complete::{tag, take_until}, character::complete::{char, multispace1, none_of, not_line_ending}, combinator::map, error::ErrorKind, multi::{count, fold_many0, fold_many1, many0, separated_nonempty_list}, number::complete::double, sequence::{preceded, separated_pair, terminated}, Err, IResult, }; use num_rational::Rational32; fn display_bpm(input: &str) -> IResult<&str, DisplayBpm> { alt(( map( separated_pair(double, ws_trimmed(char(':')), double), |(min, max)| DisplayBpm::Range(min, max), ), map(double, DisplayBpm::Static), map(char('*'), |_| DisplayBpm::Random), ))(input) } fn notetype(input: &str) -> IResult<&str, Option<NoteType>> { map(none_of("\r\n,"), into_sm_notetype)(input) } fn into_sm_notetype(sm_char: char) -> Option<NoteType> { match sm_char { '1' => Some(NoteType::Tap), '2' => Some(NoteType::Hold), '3' => Some(NoteType::HoldEnd), '4' => Some(NoteType::Roll), 'M' => Some(NoteType::Mine), 'L' => Some(NoteType::Lift), 'F' => Some(NoteType::Fake), _ => None, } } fn noterow(input: &str) -> IResult<&str, NoteRow> { map( fold_many1(notetype, (vec![], 0), |(mut noterow, mut index), item| { if let Some(item) = item { noterow.push(Note::new(item, index)) } index += 1; (noterow, index) }), |(noterow, _)| noterow, )(input) } fn measure(input: &str) -> IResult<&str, Measure> { map( fold_many0( terminated(noterow, multispace1), (vec![], 0), |(mut noterows, mut index), item| { if !item.is_empty() { noterows.push((item, index)) } index += 1; (noterows, index) }, ), |(noterows, total)| { noterows .into_iter() .map(|(item, index)| (item, Rational32::new(index, total))) .collect() }, )(input) } fn chart(input: &str) -> IResult<&str, Chart> { preceded( terminated( count(terminated(take_until(":"), char(':')), 5), many0(alt((comment, multispace1))), ), separated_nonempty_list( preceded( many0(alt((comment, multispace1))), terminated(char(','), many0(alt((comment, multispace1)))), ), measure, ), )(input) } fn comment(input: &str) -> IResult<&str, &str> { preceded(tag("//"), not_line_ending)(input) } fn notedata(input: &str) -> IResult<&str, NoteData> { let mut input = input; let mut nd = NoteData::new(); while let Ok((output, (tag, value))) = preceded(take_until("#"), stepmania_tag)(input) { input = output; if !value.trim().is_empty() { match tag { "TITLE" => nd.meta.title = Some(value.to_owned()), "SUBTITLE" => nd.meta.subtitle = Some(value.to_owned()), "ARTIST" => nd.meta.artist = Some(value.to_owned()), "TITLETRANSLIT" => nd.meta.title_translit = Some(value.to_owned()), "SUBTITLETRANSLIT" => nd.meta.subtitle_translit = Some(value.to_owned()), "ARTISTTRANSLIT" => nd.meta.artist_translit = Some(value.to_owned()), "GENRE" => nd.meta.genre = Some(value.to_owned()), "CREDIT" => nd.meta.credit = Some(value.to_owned()), "BANNER" => nd.meta.banner_path = Some(value.to_owned()), "BACKGROUND" => nd.meta.background_path = Some(value.to_owned()), "LYRICSPATH" => nd.meta.lyrics_path = Some(value.to_owned()), "CDTITLE" => nd.meta.cd_title = Some(value.to_owned()), "MUSIC" => nd.meta.music_path = Some(value.to_owned()), "SAMPLESTART" => nd.meta.sample_start = Some(ws_trimmed(double)(value)?.1), "SAMPLELENGTH" => nd.meta.sample_length = Some(ws_trimmed(double)(value)?.1), "OFFSET" => nd.structure.offset = Some(-ws_trimmed(double)(value)?.1), "DISPLAYBPM" => nd.meta.display_bpm = Some(ws_trimmed(display_bpm)(value)?.1), "BPMS" => { nd.structure.bpms = ws_trimmed(comma_separated(beat_pair(double, 4.0)))(value)?.1 } "STOPS" => { nd.structure.stops = Some(ws_trimmed(comma_separated(beat_pair(double, 4.0)))(value)?.1) } "NOTES" => nd.charts.push(chart(value)?.1), _ => {} } } } Ok((input, nd)) } pub fn parse(input: &str) -> Result<NoteData, Err<(&str, ErrorKind)>> { notedata(input).map(|notedata| notedata.1) } #[cfg(test)] mod tests { use super::*; use crate::{BeatPair, ChartMetadata, StructureData}; use nom::Err::Error; #[test] fn parse_display_bpm() { assert_eq!( display_bpm("1.2 : 3.4 foo"), Ok((" foo", DisplayBpm::Range(1.2, 3.4))) ); assert_eq!(display_bpm("1.2foo"), Ok(("foo", DisplayBpm::Static(1.2)))); assert_eq!(display_bpm("*"), Ok(("", DisplayBpm::Random))); } #[test] fn parse_notetype() { assert_eq!(notetype("1foo"), Ok(("foo", Some(NoteType::Tap)))); assert_eq!(notetype("2foo"), Ok(("foo", Some(NoteType::Hold)))); assert_eq!(notetype("3foo"), Ok(("foo", Some(NoteType::HoldEnd)))); assert_eq!(notetype("4foo"), Ok(("foo", Some(NoteType::Roll)))); assert_eq!(notetype("Mfoo"), Ok(("foo", Some(NoteType::Mine)))); assert_eq!(notetype("Lfoo"), Ok(("foo", Some(NoteType::Lift)))); assert_eq!(notetype("Ffoo"), Ok(("foo", Some(NoteType::Fake)))); assert_eq!(notetype("0foo"), Ok(("foo", None))); assert_eq!(notetype("\rfoo"), Err(Error(("\rfoo", ErrorKind::NoneOf)))); assert_eq!(notetype("\nfoo"), Err(Error(("\nfoo", ErrorKind::NoneOf)))); assert_eq!(notetype(",foo"), Err(Error((",foo", ErrorKind::NoneOf)))); } #[test] fn parse_noterow() { assert_eq!( noterow("0101\n"), Ok(( "\n", vec![Note::new(NoteType::Tap, 1), Note::new(NoteType::Tap, 3)] )) ); } #[test] fn parse_measure() { assert_eq!( measure( "0000\n \ 0100\n \ 0000\n \ 0010\n \ 0000\n" ), Ok(( "", vec![ (vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 5)), (vec![Note::new(NoteType::Tap, 2)], Rational32::new(3, 5)), ] )) ); } #[test] fn parse_comment() { assert_eq!(comment("// foo\nbar"), Ok(("\nbar", " foo"))); } #[test] fn parse_chart() { assert_eq!( chart( " foo:: bar :: 0.000,0.000 :\n\n \ 0000\n \ 0100\n \ 0000\n \ 0000\n \ , // baz\n 0000\n \ 0000\n \ 0010\n \ 0000\n" ), Ok(( "", vec![ vec![(vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 4))], vec![(vec![Note::new(NoteType::Tap, 2)], Rational32::new(1, 2))], ] )) ); } #[test] fn parse_notedata() { assert_eq!( notedata( "content that is #TITLE:bar1; not part of a tag is discarded #SUBTITLE:bar2;#ARTIST:bar3;#TITLETRANSLIT:bar4;#SUBTITLETRANSLIT:bar5; #ARTISTTRANSLIT:bar6;#GENRE:bar7;#CREDIT:bar8;#BANNER:bar9; #BACKGROUND:bar10;#LYRICSPATH:bar11;#CDTITLE:bar12;#MUSIC:bar13; #SAMPLESTART: 1.2 ;#SAMPLELENGTH: 3.4 ;#BPMS: 1.0=2 ; #STOPS: 3.0=4 ;#OFFSET: 1 ;#DISPLAYBPM: * ;#STOPS: ; #NOTES: ::::: \ 0000\n \ 0100\n \ 0000\n \ 0000\n \ ; #NOTES: ::::: \ 0000\n \ 0000\n \ 0010\n \ 0000\n \ ;" ), Ok(( "", NoteData { meta: ChartMetadata { title: Some("bar1".to_owned()), subtitle: Some("bar2".to_owned()), artist: Some("bar3".to_owned()), title_translit: Some("bar4".to_owned()), subtitle_translit: Some("bar5".to_owned()), artist_translit: Some("bar6".to_owned()), genre: Some("bar7".to_owned()), credit: Some("bar8".to_owned()), banner_path: Some("bar9".to_owned()), background_path: Some("bar10".to_owned()), lyrics_path: Some("bar11".to_owned()), cd_title: Some("bar12".to_owned()), music_path: Some("bar13".to_owned()), sample_start: Some(1.2), sample_length: Some(3.4), display_bpm: Some(DisplayBpm::Random), background_changes: None, foreground_changes: None, selectable: None, }, structure: StructureData { bpms: vec![BeatPair::from_pair(1. / 4.0, 2.0).unwrap()], stops: Some(vec![BeatPair::from_pair(3. / 4.0, 4.0).unwrap()]), offset: Some(-1.0), }, charts: vec![ vec![vec![( vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 4), )]], vec![vec![( vec![Note::new(NoteType::Tap, 2)], Rational32::new(2, 4), )]], ], } )) ); } }
use crate::{ parser_generic::{beat_pair, comma_separated, stepmania_tag, ws_trimmed}, Chart, DisplayBpm, Measure, Note, NoteData, NoteRow, NoteType, }; use nom::{ branch::alt, bytes::complete::{tag, take_until}, character::complete::{char, multispace1, none_of, not_line_ending}, combinator::map, error::ErrorKind, multi::{count, fold_many0, fold_many1, many0, separated_nonempty_list}, number::complete::double, sequence::{preceded, separat
IT" => nd.meta.subtitle_translit = Some(value.to_owned()), "ARTISTTRANSLIT" => nd.meta.artist_translit = Some(value.to_owned()), "GENRE" => nd.meta.genre = Some(value.to_owned()), "CREDIT" => nd.meta.credit = Some(value.to_owned()), "BANNER" => nd.meta.banner_path = Some(value.to_owned()), "BACKGROUND" => nd.meta.background_path = Some(value.to_owned()), "LYRICSPATH" => nd.meta.lyrics_path = Some(value.to_owned()), "CDTITLE" => nd.meta.cd_title = Some(value.to_owned()), "MUSIC" => nd.meta.music_path = Some(value.to_owned()), "SAMPLESTART" => nd.meta.sample_start = Some(ws_trimmed(double)(value)?.1), "SAMPLELENGTH" => nd.meta.sample_length = Some(ws_trimmed(double)(value)?.1), "OFFSET" => nd.structure.offset = Some(-ws_trimmed(double)(value)?.1), "DISPLAYBPM" => nd.meta.display_bpm = Some(ws_trimmed(display_bpm)(value)?.1), "BPMS" => { nd.structure.bpms = ws_trimmed(comma_separated(beat_pair(double, 4.0)))(value)?.1 } "STOPS" => { nd.structure.stops = Some(ws_trimmed(comma_separated(beat_pair(double, 4.0)))(value)?.1) } "NOTES" => nd.charts.push(chart(value)?.1), _ => {} } } } Ok((input, nd)) } pub fn parse(input: &str) -> Result<NoteData, Err<(&str, ErrorKind)>> { notedata(input).map(|notedata| notedata.1) } #[cfg(test)] mod tests { use super::*; use crate::{BeatPair, ChartMetadata, StructureData}; use nom::Err::Error; #[test] fn parse_display_bpm() { assert_eq!( display_bpm("1.2 : 3.4 foo"), Ok((" foo", DisplayBpm::Range(1.2, 3.4))) ); assert_eq!(display_bpm("1.2foo"), Ok(("foo", DisplayBpm::Static(1.2)))); assert_eq!(display_bpm("*"), Ok(("", DisplayBpm::Random))); } #[test] fn parse_notetype() { assert_eq!(notetype("1foo"), Ok(("foo", Some(NoteType::Tap)))); assert_eq!(notetype("2foo"), Ok(("foo", Some(NoteType::Hold)))); assert_eq!(notetype("3foo"), Ok(("foo", Some(NoteType::HoldEnd)))); assert_eq!(notetype("4foo"), Ok(("foo", Some(NoteType::Roll)))); assert_eq!(notetype("Mfoo"), Ok(("foo", Some(NoteType::Mine)))); assert_eq!(notetype("Lfoo"), Ok(("foo", Some(NoteType::Lift)))); assert_eq!(notetype("Ffoo"), Ok(("foo", Some(NoteType::Fake)))); assert_eq!(notetype("0foo"), Ok(("foo", None))); assert_eq!(notetype("\rfoo"), Err(Error(("\rfoo", ErrorKind::NoneOf)))); assert_eq!(notetype("\nfoo"), Err(Error(("\nfoo", ErrorKind::NoneOf)))); assert_eq!(notetype(",foo"), Err(Error((",foo", ErrorKind::NoneOf)))); } #[test] fn parse_noterow() { assert_eq!( noterow("0101\n"), Ok(( "\n", vec![Note::new(NoteType::Tap, 1), Note::new(NoteType::Tap, 3)] )) ); } #[test] fn parse_measure() { assert_eq!( measure( "0000\n \ 0100\n \ 0000\n \ 0010\n \ 0000\n" ), Ok(( "", vec![ (vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 5)), (vec![Note::new(NoteType::Tap, 2)], Rational32::new(3, 5)), ] )) ); } #[test] fn parse_comment() { assert_eq!(comment("// foo\nbar"), Ok(("\nbar", " foo"))); } #[test] fn parse_chart() { assert_eq!( chart( " foo:: bar :: 0.000,0.000 :\n\n \ 0000\n \ 0100\n \ 0000\n \ 0000\n \ , // baz\n 0000\n \ 0000\n \ 0010\n \ 0000\n" ), Ok(( "", vec![ vec![(vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 4))], vec![(vec![Note::new(NoteType::Tap, 2)], Rational32::new(1, 2))], ] )) ); } #[test] fn parse_notedata() { assert_eq!( notedata( "content that is #TITLE:bar1; not part of a tag is discarded #SUBTITLE:bar2;#ARTIST:bar3;#TITLETRANSLIT:bar4;#SUBTITLETRANSLIT:bar5; #ARTISTTRANSLIT:bar6;#GENRE:bar7;#CREDIT:bar8;#BANNER:bar9; #BACKGROUND:bar10;#LYRICSPATH:bar11;#CDTITLE:bar12;#MUSIC:bar13; #SAMPLESTART: 1.2 ;#SAMPLELENGTH: 3.4 ;#BPMS: 1.0=2 ; #STOPS: 3.0=4 ;#OFFSET: 1 ;#DISPLAYBPM: * ;#STOPS: ; #NOTES: ::::: \ 0000\n \ 0100\n \ 0000\n \ 0000\n \ ; #NOTES: ::::: \ 0000\n \ 0000\n \ 0010\n \ 0000\n \ ;" ), Ok(( "", NoteData { meta: ChartMetadata { title: Some("bar1".to_owned()), subtitle: Some("bar2".to_owned()), artist: Some("bar3".to_owned()), title_translit: Some("bar4".to_owned()), subtitle_translit: Some("bar5".to_owned()), artist_translit: Some("bar6".to_owned()), genre: Some("bar7".to_owned()), credit: Some("bar8".to_owned()), banner_path: Some("bar9".to_owned()), background_path: Some("bar10".to_owned()), lyrics_path: Some("bar11".to_owned()), cd_title: Some("bar12".to_owned()), music_path: Some("bar13".to_owned()), sample_start: Some(1.2), sample_length: Some(3.4), display_bpm: Some(DisplayBpm::Random), background_changes: None, foreground_changes: None, selectable: None, }, structure: StructureData { bpms: vec![BeatPair::from_pair(1. / 4.0, 2.0).unwrap()], stops: Some(vec![BeatPair::from_pair(3. / 4.0, 4.0).unwrap()]), offset: Some(-1.0), }, charts: vec![ vec![vec![( vec![Note::new(NoteType::Tap, 1)], Rational32::new(1, 4), )]], vec![vec![( vec![Note::new(NoteType::Tap, 2)], Rational32::new(2, 4), )]], ], } )) ); } }
ed_pair, terminated}, Err, IResult, }; use num_rational::Rational32; fn display_bpm(input: &str) -> IResult<&str, DisplayBpm> { alt(( map( separated_pair(double, ws_trimmed(char(':')), double), |(min, max)| DisplayBpm::Range(min, max), ), map(double, DisplayBpm::Static), map(char('*'), |_| DisplayBpm::Random), ))(input) } fn notetype(input: &str) -> IResult<&str, Option<NoteType>> { map(none_of("\r\n,"), into_sm_notetype)(input) } fn into_sm_notetype(sm_char: char) -> Option<NoteType> { match sm_char { '1' => Some(NoteType::Tap), '2' => Some(NoteType::Hold), '3' => Some(NoteType::HoldEnd), '4' => Some(NoteType::Roll), 'M' => Some(NoteType::Mine), 'L' => Some(NoteType::Lift), 'F' => Some(NoteType::Fake), _ => None, } } fn noterow(input: &str) -> IResult<&str, NoteRow> { map( fold_many1(notetype, (vec![], 0), |(mut noterow, mut index), item| { if let Some(item) = item { noterow.push(Note::new(item, index)) } index += 1; (noterow, index) }), |(noterow, _)| noterow, )(input) } fn measure(input: &str) -> IResult<&str, Measure> { map( fold_many0( terminated(noterow, multispace1), (vec![], 0), |(mut noterows, mut index), item| { if !item.is_empty() { noterows.push((item, index)) } index += 1; (noterows, index) }, ), |(noterows, total)| { noterows .into_iter() .map(|(item, index)| (item, Rational32::new(index, total))) .collect() }, )(input) } fn chart(input: &str) -> IResult<&str, Chart> { preceded( terminated( count(terminated(take_until(":"), char(':')), 5), many0(alt((comment, multispace1))), ), separated_nonempty_list( preceded( many0(alt((comment, multispace1))), terminated(char(','), many0(alt((comment, multispace1)))), ), measure, ), )(input) } fn comment(input: &str) -> IResult<&str, &str> { preceded(tag("//"), not_line_ending)(input) } fn notedata(input: &str) -> IResult<&str, NoteData> { let mut input = input; let mut nd = NoteData::new(); while let Ok((output, (tag, value))) = preceded(take_until("#"), stepmania_tag)(input) { input = output; if !value.trim().is_empty() { match tag { "TITLE" => nd.meta.title = Some(value.to_owned()), "SUBTITLE" => nd.meta.subtitle = Some(value.to_owned()), "ARTIST" => nd.meta.artist = Some(value.to_owned()), "TITLETRANSLIT" => nd.meta.title_translit = Some(value.to_owned()), "SUBTITLETRANSL
random
[]
Rust
src/client.rs
erikjohnston/matrix-hyper-federation-client
9939cf12f9bc8437a376370a135c2ee4eddb8c33
use std::convert::TryInto; use std::sync::Arc; use anyhow::{bail, format_err, Context, Error}; use ed25519_dalek::Keypair; use http::header::{AUTHORIZATION, CONTENT_TYPE}; use http::request::{Builder, Parts}; use http::{HeaderValue, Uri}; use hyper::body::{to_bytes, HttpBody}; use hyper::client::connect::Connect; use hyper::{Body, Client, Request, Response}; use serde::Serialize; use serde_json::value::RawValue; use signed_json::{Canonical, Signed}; use crate::server_resolver::MatrixConnector; pub type FederationClient = hyper::Client<MatrixConnector>; pub async fn new_federation_client() -> Result<FederationClient, Error> { let connector = MatrixConnector::with_default_resolver().await?; Ok(Client::builder().build(connector)) } #[derive(Debug, Clone)] pub struct SigningFederationClient<C = MatrixConnector> { client: Client<C>, server_name: String, key_id: String, secret_key: Arc<Keypair>, } impl SigningFederationClient<MatrixConnector> { pub async fn new( server_name: impl ToString, key_id: impl ToString, secret_key: Keypair, ) -> Result<Self, Error> { let connector = MatrixConnector::with_default_resolver().await?; Ok(SigningFederationClient { client: Client::builder().build(connector), server_name: server_name.to_string(), key_id: key_id.to_string(), secret_key: Arc::new(secret_key), }) } } impl<C> SigningFederationClient<C> { pub fn with_client( client: Client<C>, server_name: String, key_name: String, secret_key: Keypair, ) -> Self { SigningFederationClient { client, server_name, key_id: key_name, secret_key: Arc::new(secret_key), } } } impl<C> SigningFederationClient<C> where C: Connect + Clone + Send + Sync + 'static, { pub async fn get(&self, uri: Uri) -> Result<Response<Body>, Error> { let body = Body::default(); let mut req = Request::new(body); *req.uri_mut() = uri; Ok(self.request(req).await?) } pub async fn request(&self, req: Request<Body>) -> Result<Response<Body>, Error> { if req.uri().scheme() != Some(&"matrix".parse()?) { return Ok(self.client.request(req).await?); } if !req.body().is_end_stream() && req.headers().get(CONTENT_TYPE) != Some(&HeaderValue::from_static("application/json")) { bail!("Request has a non-JSON body") } let (mut parts, body) = req.into_parts(); let content = if body.is_end_stream() { None } else { let bytes = to_bytes(body).await?; let json_string = String::from_utf8(bytes.to_vec())?; Some(RawValue::from_string(json_string)?) }; let auth_header = make_auth_header_from_parts( &self.server_name, &self.key_id, &self.secret_key, &parts, content.as_ref(), ) .context("Failed to sign request")?; parts.headers.insert(AUTHORIZATION, auth_header.parse()?); let new_body = if let Some(raw_value) = content { raw_value.to_string().into() } else { Body::default() }; let new_req = Request::from_parts(parts, new_body); Ok(self.client.request(new_req).await?) } } pub fn make_auth_header<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, method: &str, path: &str, destination: &str, content: Option<T>, ) -> Result<String, Error> { let request_json = RequestJson { method, uri: path, origin: server_name, destination, content, }; let signed: Signed<_> = Signed::wrap(request_json).context("Failed to serialize content")?; let sig = signed.sign_detached(secret_key); let b64_sig = base64::encode_config(&sig, base64::STANDARD_NO_PAD); Ok(format!( r#"X-Matrix origin={},key="{}",sig="{}""#, server_name, key_id, b64_sig, )) } pub fn make_auth_header_from_parts<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, parts: &Parts, content: Option<T>, ) -> Result<String, Error> { make_auth_header( server_name, key_id, secret_key, parts.method.as_str(), parts .uri .path_and_query() .ok_or_else(|| format_err!("Path is required"))? .as_str(), parts .uri .host() .ok_or_else(|| format_err!("Host is required"))?, content, ) } pub fn sign_and_build_json_request<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, mut request_builder: Builder, content: Option<T>, ) -> Result<Request<Body>, Error> { let uri = request_builder .uri_ref() .ok_or_else(|| format_err!("URI must be set"))?; let host = uri .host() .ok_or_else(|| format_err!("Host is required in URI"))?; let path = uri .path_and_query() .ok_or_else(|| format_err!("Path is required in URI"))? .as_str(); let method = request_builder .method_ref() .ok_or_else(|| format_err!("Method must be set"))?; let canonical_content = if let Some(content) = content { Some(Canonical::wrap(content).context("Failed to serialize content")?) } else { None }; let header_string = make_auth_header( server_name, key_id, secret_key, method.as_str(), path, host, canonical_content.as_ref(), )?; let header_value = header_string.try_into()?; let body = if let Some(c) = canonical_content { Body::from(c.into_canonical()) } else { Body::default() }; request_builder .headers_mut() .map(|header_map| header_map.insert(AUTHORIZATION, header_value)); let request = request_builder.body(body)?; Ok(request) } #[derive(Serialize)] pub struct RequestJson<'a, T> { method: &'a str, uri: &'a str, origin: &'a str, destination: &'a str, #[serde(skip_serializing_if = "Option::is_none")] content: Option<T>, } pub trait SignedRequestBuilderExt { fn signed( self, server_name: &str, key_id: &str, secret_key: &Keypair, ) -> Result<Request<Body>, Error>; fn signed_json<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: T, ) -> Result<Request<Body>, Error>; fn signed_json_opt<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: Option<T>, ) -> Result<Request<Body>, Error>; } impl SignedRequestBuilderExt for Builder { fn signed( self, server_name: &str, key_id: &str, secret_key: &Keypair, ) -> Result<Request<Body>, Error> { sign_and_build_json_request::<()>(server_name, key_id, secret_key, self, None) } fn signed_json<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: T, ) -> Result<Request<Body>, Error> { sign_and_build_json_request(server_name, key_id, secret_key, self, Some(content)) } fn signed_json_opt<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: Option<T>, ) -> Result<Request<Body>, Error> { if let Some(content) = content { self.signed_json(server_name, key_id, secret_key, content) } else { self.signed(server_name, key_id, secret_key) } } } pub struct AuthHeader<'a> { pub origin: &'a str, pub key_id: &'a str, pub signature: &'a str, } pub fn parse_auth_header(header: &str) -> Option<AuthHeader> { let header = header.strip_prefix("X-Matrix ")?; let mut origin = None; let mut key_id = None; let mut signature = None; for item in header.split(',') { let (key, value) = item.split_at(item.find('=')?); let value = value.trim_matches('='); let value = if value.starts_with('"') && value.ends_with('"') { &value[1..value.len() - 1] } else { value }; match key { "origin" => origin = Some(value), "key" => key_id = Some(value), "sig" => signature = Some(value), _ => {} } } Some(AuthHeader { origin: origin?, key_id: key_id?, signature: signature?, }) } #[cfg(test)] mod test { use std::collections::BTreeMap; use ed25519_dalek::{PublicKey, SecretKey}; use super::*; #[test] fn test_parse_auth_header() { let header = parse_auth_header(r#"X-Matrix origin=foo.com,key="key_id",sig="some_signature""#) .unwrap(); assert_eq!(header.origin, "foo.com"); assert_eq!(header.key_id, "key_id"); assert_eq!(header.signature, "some_signature"); } #[tokio::test] async fn auth_header_no_content() { let secret = SecretKey::from_bytes(&[0u8; 32]).unwrap(); let public = PublicKey::from(&secret); let secret_key = Keypair { secret, public }; let header = make_auth_header::<()>( "localhost", "ed25519:test", &secret_key, "GET", "/test", "matrix.org", None, ) .unwrap(); assert_eq!( header, r#"X-Matrix origin=localhost,key="ed25519:test",sig="aemgn56SKst12mSbh2X0l3pBuzyWmAkURVknrTqz/ev2p8KDnKHXnFw/UsLOfwbD6V/om4Lh+DzeyE0MlJ1GBA""# ); } #[tokio::test] async fn auth_header_content() { let secret = SecretKey::from_bytes(&[0u8; 32]).unwrap(); let public = PublicKey::from(&secret); let secret_key = Keypair { secret, public }; let mut map = BTreeMap::new(); map.insert("foo", "bar"); let header = make_auth_header( "localhost", "ed25519:test", &secret_key, "GET", "/test", "matrix.org", Some(map), ) .unwrap(); assert_eq!( header, r#"X-Matrix origin=localhost,key="ed25519:test",sig="JwOvw9q9rGU1FOX+nVqZkXL9P6WhsKE3aNV2Q+Ftj0urJHv8olv7r7gOMZM3nITm0gVwYBN8s0FBGJymeQt9DA""# ); } }
use std::convert::TryInto; use std::sync::Arc; use anyhow::{bail, format_err, Context, Error}; use ed25519_dalek::Keypair; use http::header::{AUTHORIZATION, CONTENT_TYPE}; use http::request::{Builder, Parts}; use http::{HeaderValue, Uri}; use hyper::body::{to_bytes, HttpBody}; use hyper::client::connect::Connect; use hyper::{Body, Client, Request, Response}; use serde::Serialize; use serde_json::value::RawValue; use signed_json::{Canonical, Signed}; use crate::server_resolver::MatrixConnector; pub type FederationClient = hyper::Client<MatrixConnector>; pub async fn new_federation_client() -> Result<FederationClient, Error> { let connector = MatrixConnector::with_default_resolver().await?; Ok(Client::builder().build(connector)) } #[derive(Debug, Clone)] pub struct SigningFederationClient<C = MatrixConnector> { client: Client<C>, server_name: String, key_id: String, secret_key: Arc<Keypair>, } impl SigningFederationClient<MatrixConnector> { pub async fn new( server_name: impl ToString, key_id: impl ToString, secret_key: Keypair, ) -> Result<Self, Error> { let connector = MatrixConnector::with_default_resolver().await?; Ok(SigningFederationClient { client: Client::builder().build(connector), server_name: server_name.to_string(), key_id: key_id.to_string(), secret_key: Arc::new(secret_key), }) } } impl<C> SigningFederationClient<C> { pub fn with_client( client: Client<C>, server_name: String, key_name: String, secret_key: Keypair, ) -> Self { SigningFederationClient { client, server_name, key_id: key_name, secret_key: Arc::new(secret_key), } } } impl<C> SigningFederationClient<C> where C: Connect + Clone + Send + Sync + 'static, { pub async fn get(&self, uri: Uri) -> Result<Response<Body>, Error> { let body = Body::default(); let mut req = Request::new(body); *req.uri_mut() = uri; Ok(self.request(req).await?) } pub async fn request(&self, req: Request<Body>) -> Result<Response<Body>, Error> { if req.uri().scheme() != Some(&"matrix".parse()?) { return Ok(self.client.request(req).await?); } if !req.body().is_end_stream() && req.headers().get(CONTENT_TYPE) != Some(&HeaderValue::from_static("application/json")) { bail!("Request has a non-JSON body") } let (mut parts, body) = req.into_parts(); let content = if body.is_end_stream() { None } else { let bytes = to_bytes(body).await?; let json_string = String::from_utf8(bytes.to_vec())?; Some(RawValue::from_string(json_string)?) }; let auth_header = make_auth_header_from_parts( &self.server_name, &self.key_id, &self.secret_key, &parts, content.as_ref(), ) .context("Failed to sign request")?; parts.headers.insert(AUTHORIZATION, auth_header.parse()?); let new_body = if let Some(raw_value) = content { raw_value.to_string().into() } else { Body::default() }; let new_req = Request::from_parts(parts, new_body); Ok(self.client.request(new_req).await?) } } pub fn make_auth_header<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, method: &str, path: &str, destination: &str, content: Option<T>, ) -> Result<String, Error> { let request_json = RequestJson { method, uri: path, origin: server_name, destination, content, }; let signed: Signed<_> = Signed::wrap(request_json).context("Failed to serialize content")?; let sig = signed.sign_detached(secret_key); let b64_sig = base64::encode_config(&sig, base64::STANDARD_NO_PAD); Ok(format!( r#"X-Matrix origin={},key="{}",sig="{}""#, server_name, key_id, b64_sig, )) } pub fn make_auth_header_from_parts<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, parts: &Parts, content: Option<T>, ) -> Result<String, Error> { make_auth_header( server_name, key_id, secret_key, parts.method.as_str(), parts .uri .path_and_query() .ok_or_else(|| format_err!("Path is required"))? .as_str(), parts .uri .host() .ok_or_else(|| format_err!("Host is required"))?, content, ) } pub fn sign_and_build_json_request<T: serde::Serialize>( server_name: &str, key_id: &str, secret_key: &Keypair, mut request_builder: Builder, content: Option<T>, ) -> Result<Request<Body>, Error> { let uri = request_builder .uri_ref() .ok_or_else(|| format_err!("URI must be set"))?; let host = uri .host() .ok_or_else(|| format_err!("Host is required in URI"))?; let path = uri .path_and_query() .ok_or_else(|| format_err!("Path is required in URI"))? .as_str(); let method = request_builder .method_ref() .ok_or_else(|| format_err!("Method must be set"))?; let canonical_content = if let Some(content) = content { Some(Canonical::wrap(content).context("Failed to serialize content")?) } else { None }; let header_string = make_auth_header( server_name, key_id, secret_key, method.as_str(), path, host, canonical_content.as_ref(), )?; let header_value = header_string.try_into()?; let body = if let Some(c) = canonical_content { Body::from(c.into_canonical()) } else { Body::default() }; request_builder .headers_mut() .map(|header_map| header_map.insert(AUTHORIZATION, header_value)); let request = request_builder.body(body)?; Ok(request) } #[derive(Serialize)] pub struct RequestJson<'a, T> { method: &'a str, uri: &'a str, origin: &'a str, destination: &'a str, #[serde(skip_serializing_if = "Option::is_none")] content: Option<T>, } pub trait SignedRequestBuilderExt { fn signed( self, server_name: &str, key_id: &str, secret_key: &Keypair, ) -> Result<Request<Body>, Error>; fn signed_json<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: T, ) -> Result<Request<Body>, Error>; fn signed_json_opt<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: Option<T>, ) -> Result<Request<Body>, Error>; } impl SignedRequestBuilderExt for Builder { fn signed( self, server_name: &str, key_id: &str, secret_key: &Keypair, ) -> Result<Request<Body>, Error> { sign_and_build_json_request::<()>(server_name, key_id, secret_key, self, None) } fn signed_json<T: Serialize>( self, server_name: &str, key_id: &str, secret_key: &Keypair, content: T, ) -> Result<Request<Body>, Error> { sign_and_build_json_request(server_name, key_id, secret_key, self, Some(content)) } fn signed_json_opt<T: Serialize>( self, server_name: &str, key_id: &str,
} pub struct AuthHeader<'a> { pub origin: &'a str, pub key_id: &'a str, pub signature: &'a str, } pub fn parse_auth_header(header: &str) -> Option<AuthHeader> { let header = header.strip_prefix("X-Matrix ")?; let mut origin = None; let mut key_id = None; let mut signature = None; for item in header.split(',') { let (key, value) = item.split_at(item.find('=')?); let value = value.trim_matches('='); let value = if value.starts_with('"') && value.ends_with('"') { &value[1..value.len() - 1] } else { value }; match key { "origin" => origin = Some(value), "key" => key_id = Some(value), "sig" => signature = Some(value), _ => {} } } Some(AuthHeader { origin: origin?, key_id: key_id?, signature: signature?, }) } #[cfg(test)] mod test { use std::collections::BTreeMap; use ed25519_dalek::{PublicKey, SecretKey}; use super::*; #[test] fn test_parse_auth_header() { let header = parse_auth_header(r#"X-Matrix origin=foo.com,key="key_id",sig="some_signature""#) .unwrap(); assert_eq!(header.origin, "foo.com"); assert_eq!(header.key_id, "key_id"); assert_eq!(header.signature, "some_signature"); } #[tokio::test] async fn auth_header_no_content() { let secret = SecretKey::from_bytes(&[0u8; 32]).unwrap(); let public = PublicKey::from(&secret); let secret_key = Keypair { secret, public }; let header = make_auth_header::<()>( "localhost", "ed25519:test", &secret_key, "GET", "/test", "matrix.org", None, ) .unwrap(); assert_eq!( header, r#"X-Matrix origin=localhost,key="ed25519:test",sig="aemgn56SKst12mSbh2X0l3pBuzyWmAkURVknrTqz/ev2p8KDnKHXnFw/UsLOfwbD6V/om4Lh+DzeyE0MlJ1GBA""# ); } #[tokio::test] async fn auth_header_content() { let secret = SecretKey::from_bytes(&[0u8; 32]).unwrap(); let public = PublicKey::from(&secret); let secret_key = Keypair { secret, public }; let mut map = BTreeMap::new(); map.insert("foo", "bar"); let header = make_auth_header( "localhost", "ed25519:test", &secret_key, "GET", "/test", "matrix.org", Some(map), ) .unwrap(); assert_eq!( header, r#"X-Matrix origin=localhost,key="ed25519:test",sig="JwOvw9q9rGU1FOX+nVqZkXL9P6WhsKE3aNV2Q+Ftj0urJHv8olv7r7gOMZM3nITm0gVwYBN8s0FBGJymeQt9DA""# ); } }
secret_key: &Keypair, content: Option<T>, ) -> Result<Request<Body>, Error> { if let Some(content) = content { self.signed_json(server_name, key_id, secret_key, content) } else { self.signed(server_name, key_id, secret_key) } }
function_block-function_prefix_line
[ { "content": "type ConnectorFuture =\n\n Pin<Box<dyn Future<Output = Result<MaybeHttpsStream<TcpStream>, Error>> + Send>>;\n\n\n\nimpl Service<Uri> for MatrixConnector {\n\n type Response = MaybeHttpsStream<TcpStream>;\n\n type Error = Error;\n\n type Future = ConnectorFuture;\n\n\n\n fn poll_ready(&mut self, _: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n // This connector is always ready, but others might not be.\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, dst: Uri) -> Self::Future {\n\n let resolver = self.resolver.clone();\n\n async move {\n\n let endpoints = resolver\n\n .resolve_server_name_from_host_port(\n\n dst.host().expect(\"hostname\").to_string(),\n\n dst.port_u16(),\n", "file_path": "src/server_resolver.rs", "rank": 5, "score": 46552.87096641003 }, { "content": "# Matrix Hyper Federation Client\n\n\n\nA hyper client for connecting over Matrix federation.\n\n\n\n\n\n## Example\n\n\n\n```rust,no_run\n\nuse ed25519_dalek::Keypair;\n\nuse matrix_hyper_federation_client::SigningFederationClient;\n\n\n\nasync fn run(secret_key: Keypair) -> Result<(), anyhow::Error> {\n\n let client = SigningFederationClient::new(\"local_server\", \"ed25519:sg5Sa\", secret_key).await?;\n\n\n\n let resp = client.get(\"matrix://matrix.org/_matrix/federation/v1/version\".parse()?).await?;\n\n\n\n assert_eq!(resp.status(), 200);\n\n\n\n Ok(())\n\n}\n\n```\n", "file_path": "README.md", "rank": 6, "score": 20140.686521257554 }, { "content": " }\n\n}\n\n\n\n/// Check if there is a `.well-known` file present on the given host.\n\npub async fn get_well_known<C>(http_client: &Client<C>, host: &str) -> Option<WellKnownServer>\n\nwhere\n\n C: Service<Uri> + Clone + Sync + Send + 'static,\n\n C::Error: Into<Box<dyn std::error::Error + Send + Sync>>,\n\n C::Future: Unpin + Send,\n\n C::Response: AsyncRead + AsyncWrite + Connection + Unpin + Send + 'static,\n\n{\n\n // TODO: Add timeout and cache result\n\n\n\n let uri = hyper::Uri::builder()\n\n .scheme(\"https\")\n\n .authority(host)\n\n .path_and_query(\"/.well-known/matrix/server\")\n\n .build()\n\n .ok()?;\n\n\n", "file_path": "src/server_resolver.rs", "rank": 24, "score": 26.131168840445337 }, { "content": " #[derive(Clone)]\n\n pub struct TestConnector;\n\n\n\n impl Service<Uri> for TestConnector {\n\n type Response = TestConnection;\n\n type Error = Error;\n\n type Future = TestConnectorFuture;\n\n\n\n fn poll_ready(&mut self, _: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n // This connector is always ready, but others might not be.\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, _dst: Uri) -> Self::Future {\n\n let (client, server) = TestConnection::double_ended();\n\n\n\n {\n\n let service = hyper::service::service_fn(|_| async move {\n\n Ok(hyper::Response::new(hyper::Body::from(\"Hello World\")))\n\n as Result<_, hyper::http::Error>\n", "file_path": "src/server_resolver.rs", "rank": 25, "score": 23.443711749779943 }, { "content": "//! # Ok(())\n\n//! # }\n\n//! ```\n\n//!\n\n//! Note however, that this incurs some overhead due to have to deserialize the\n\n//! request body back into JSON so that it can be signed.\n\n//!\n\n//! # [`FederationClient`]\n\n//!\n\n//! The [`FederationClient`] is just a standard [`hyper::Client`] with a\n\n//! [`MatrixConnector`] that can route `matrix://` URIs, but does *not* sign the\n\n//! requests automatically:\n\n//!\n\n//! ```no_run\n\n//! # use matrix_hyper_federation_client::client::{new_federation_client, sign_and_build_json_request};\n\n//! # use hyper::Request;\n\n//! use matrix_hyper_federation_client::SignedRequestBuilderExt;\n\n//! # use ed25519_dalek::Keypair;\n\n//! #\n\n//! # async fn run(secret_key: &Keypair) -> Result<(), anyhow::Error> {\n", "file_path": "src/lib.rs", "rank": 26, "score": 18.96077165695197 }, { "content": " ///\n\n /// *Note*: This can be different from the `host_header` field if the server\n\n /// name has been delegated to a different server name using a `.well-known`\n\n /// file.\n\n pub tls_name: String,\n\n}\n\n\n\n/// A resolver for Matrix server names.\n\n#[derive(Debug, Clone)]\n\npub struct MatrixResolver {\n\n resolver: trust_dns_resolver::TokioAsyncResolver,\n\n http_client: Client<HttpsConnector<HttpConnector>>,\n\n}\n\n\n\nimpl MatrixResolver {\n\n /// Create a new [`MatrixResolver`] with a default HTTP client.\n\n pub async fn new() -> Result<MatrixResolver, Error> {\n\n let http_client = hyper::Client::builder().build(HttpsConnector::new());\n\n\n\n MatrixResolver::with_http_client(http_client).await\n", "file_path": "src/server_resolver.rs", "rank": 27, "score": 18.258466551444272 }, { "content": "//! A crate for sending Matrix federation HTTP requests using [`hyper`].\n\n//!\n\n//! # [`SigningFederationClient`]\n\n//!\n\n//! The [`SigningFederationClient`] correctly routes `matrix://` URIs and\n\n//! automatically signs such requests:\n\n//!\n\n//! ```no_run\n\n//! # use ed25519_dalek::Keypair;\n\n//! # use matrix_hyper_federation_client::SigningFederationClient;\n\n//! #\n\n//! # async fn run(secret_key: Keypair) -> Result<(), anyhow::Error> {\n\n//! #\n\n//! let client = SigningFederationClient::new(\"local_server\", \"ed25519:sg5Sa\", secret_key).await?;\n\n//!\n\n//! let uri = \"matrix://matrix.org/_matrix/federation/v1/version\".parse()?;\n\n//! let resp = client.get(uri).await?;\n\n//!\n\n//! assert_eq!(resp.status(), 200);\n\n//!\n", "file_path": "src/lib.rs", "rank": 28, "score": 17.185780558208524 }, { "content": "use std::net::IpAddr;\n\nuse std::pin::Pin;\n\nuse std::str::FromStr;\n\nuse std::task::{self, Poll};\n\n\n\n/// A resolved host for a Matrix server.\n\n#[derive(Debug, Clone)]\n\npub struct Endpoint {\n\n /// The host name to connect to.\n\n ///\n\n /// These can be resolved as normal `A`/`AAAA` records.\n\n pub host: String,\n\n\n\n /// The port to connect to.\n\n pub port: u16,\n\n\n\n /// The value to use in the `Host` header of requests to this endpoint.\n\n pub host_header: String,\n\n\n\n /// The TLS server name to use when connecting to this endpoint.\n", "file_path": "src/server_resolver.rs", "rank": 29, "score": 16.864335169357016 }, { "content": " if let Ok(parsed_port) = maybe_port.parse::<u16>() {\n\n host = maybe_host.to_string();\n\n port = Some(parsed_port);\n\n } else {\n\n host = server_name.to_string();\n\n port = None;\n\n }\n\n } else {\n\n host = server_name.to_string();\n\n port = None;\n\n }\n\n\n\n self.resolve_server_name_from_host_port(host, port).await\n\n }\n\n\n\n /// Create a new [`MatrixResolver`] with a given HTTP client.\n\n pub async fn with_http_client(\n\n http_client: Client<HttpsConnector<HttpConnector>>,\n\n ) -> Result<MatrixResolver, Error> {\n\n let resolver = trust_dns_resolver::TokioAsyncResolver::tokio_from_system_conf()?;\n", "file_path": "src/server_resolver.rs", "rank": 30, "score": 16.33821701920676 }, { "content": "mod test {\n\n use anyhow::Error;\n\n use futures::FutureExt;\n\n use http::Uri;\n\n use hyper::client::connect::Connected;\n\n use hyper::client::connect::Connection;\n\n use hyper::server::conn::Http;\n\n use hyper::service::Service;\n\n use std::future::Future;\n\n use std::pin::Pin;\n\n use std::{\n\n io::Cursor,\n\n sync::{Arc, Mutex},\n\n task::{self, Poll},\n\n };\n\n use tokio::io::{AsyncRead, AsyncWrite};\n\n\n\n type TestConnectorFuture = Pin<Box<dyn Future<Output = Result<TestConnection, Error>> + Send>>;\n\n\n\n /// A connector that returns a connection which returns 200 OK to all connections.\n", "file_path": "src/server_resolver.rs", "rank": 31, "score": 16.288895447720773 }, { "content": "/// A connector that can be used with a [`hyper::Client`] that correctly\n\n/// resolves and connects to `matrix://` URIs.\n\n#[derive(Debug, Clone)]\n\npub struct MatrixConnector {\n\n resolver: MatrixResolver,\n\n}\n\n\n\nimpl MatrixConnector {\n\n /// Create new [`MatrixConnector`] with the given [`MatrixResolver`].\n\n pub fn with_resolver(resolver: MatrixResolver) -> MatrixConnector {\n\n MatrixConnector { resolver }\n\n }\n\n\n\n /// Create new [`MatrixConnector`] with a default [`MatrixResolver`].\n\n pub async fn with_default_resolver() -> Result<MatrixConnector, Error> {\n\n let resolver = MatrixResolver::new().await?;\n\n\n\n Ok(MatrixConnector { resolver })\n\n }\n\n}\n\n\n", "file_path": "src/server_resolver.rs", "rank": 32, "score": 16.037593432795497 }, { "content": "//! #\n\n//! let client = new_federation_client().await?;\n\n//!\n\n//! let request = Request::builder()\n\n//! .method(\"GET\")\n\n//! .uri(\"matrix://matrix.org/_matrix/federation/v1/version\")\n\n//! .signed(\"localhost\", \"ed25519:sg5Sa\", &secret_key)?;\n\n//!\n\n//! let resp = client.request(request).await?;\n\n//!\n\n//! assert_eq!(resp.status(), 200);\n\n//!\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n\n\npub mod client;\n\npub mod server_resolver;\n\n\n\n#[doc(inline)]\n", "file_path": "src/lib.rs", "rank": 33, "score": 15.029974720551937 }, { "content": " let mut body = http_client.get(uri).await.ok()?.into_body();\n\n\n\n let mut vec = Vec::new();\n\n while let Some(next) = body.next().await {\n\n // TODO: Limit size of body.\n\n let chunk = next.ok()?;\n\n vec.extend(chunk);\n\n }\n\n\n\n serde_json::from_slice(&vec).ok()?\n\n}\n\n\n\n/// A parsed Matrix `.well-known` file.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct WellKnownServer {\n\n #[serde(rename = \"m.server\")]\n\n pub server: String,\n\n}\n\n\n", "file_path": "src/server_resolver.rs", "rank": 34, "score": 14.817418059345083 }, { "content": "pub use client::{FederationClient, SignedRequestBuilderExt, SigningFederationClient};\n\n#[doc(inline)]\n\npub use server_resolver::MatrixConnector;\n\n\n\n/// Uses the [`doc-comment`] crate to run mark the examples in the README as doc\n\n/// tests.\n\n#[cfg(doctest)]\n\nmod readme_tests {\n\n use doc_comment::doctest;\n\n doctest!(\"../README.md\");\n\n}\n", "file_path": "src/lib.rs", "rank": 35, "score": 14.440909473657813 }, { "content": "//! Module for resolving Matrix server names.\n\n\n\nuse anyhow::{bail, format_err, Error};\n\nuse futures::FutureExt;\n\nuse futures_util::stream::StreamExt;\n\nuse http::Uri;\n\nuse hyper::client::connect::{Connection, HttpConnector};\n\nuse hyper::service::Service;\n\nuse hyper::Client;\n\nuse hyper_tls::{HttpsConnector, MaybeHttpsStream};\n\nuse log::{debug, trace};\n\nuse native_tls::TlsConnector;\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::io::{AsyncRead, AsyncWrite};\n\nuse tokio::net::TcpStream;\n\nuse tokio_native_tls::TlsConnector as AsyncTlsConnector;\n\nuse trust_dns_resolver::error::ResolveErrorKind;\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::future::Future;\n", "file_path": "src/server_resolver.rs", "rank": 36, "score": 14.006082263237445 }, { "content": " fn poll_shutdown(\n\n self: Pin<&mut Self>,\n\n cx: &mut task::Context<'_>,\n\n ) -> Poll<Result<(), std::io::Error>> {\n\n let mut conn = self.inner.lock().expect(\"mutex\");\n\n\n\n if self.direction {\n\n Pin::new(&mut conn.outbound_buffer).poll_shutdown(cx)\n\n } else {\n\n Pin::new(&mut conn.inbound_buffer).poll_shutdown(cx)\n\n }\n\n }\n\n }\n\n\n\n impl Connection for TestConnection {\n\n fn connected(&self) -> Connected {\n\n Connected::new()\n\n }\n\n }\n\n\n", "file_path": "src/server_resolver.rs", "rank": 37, "score": 13.97894944733332 }, { "content": "\n\n Ok(MatrixResolver {\n\n resolver,\n\n http_client,\n\n })\n\n }\n\n\n\n /// Resolves a [`Uri`] to a list of [`Endpoint`]s to try.\n\n ///\n\n /// See [`MatrixResolver::resolve_server_name`].\n\n pub async fn resolve_server_name_from_uri(&self, uri: &Uri) -> Result<Vec<Endpoint>, Error> {\n\n let host = uri.host().expect(\"URI has no host\").to_string();\n\n let port = uri.port_u16();\n\n\n\n self.resolve_server_name_from_host_port(host, port).await\n\n }\n\n\n\n /// Resolves a host and optional port to a list of [`Endpoint`]s to try.\n\n ///\n\n /// *Note*: The host *must not* contain a port.\n", "file_path": "src/server_resolver.rs", "rank": 38, "score": 13.598744194034339 }, { "content": " #[tokio::test]\n\n async fn test_memory_connection() {\n\n // TODO: Flesh out tests.\n\n let client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(TestConnector);\n\n\n\n let response = client\n\n .get(\"http://localhost\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n\n\n assert!(response.status().is_success());\n\n\n\n let bytes = hyper::body::to_bytes(response.into_body()).await.unwrap();\n\n assert_eq!(&bytes[..], b\"Hello World\");\n\n }\n\n}\n", "file_path": "src/server_resolver.rs", "rank": 39, "score": 13.477075676387788 }, { "content": " Poll::Ready(Ok(()))\n\n } else {\n\n conn.wakers.push(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n\n\n impl AsyncWrite for TestConnection {\n\n fn poll_write(\n\n self: Pin<&mut Self>,\n\n _cx: &mut task::Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<Result<usize, std::io::Error>> {\n\n let mut conn = self.inner.lock().expect(\"mutex\");\n\n\n\n if self.direction {\n\n conn.outbound_buffer.get_mut().extend_from_slice(buf);\n\n } else {\n\n conn.inbound_buffer.get_mut().extend_from_slice(buf);\n", "file_path": "src/server_resolver.rs", "rank": 40, "score": 12.242298374018276 }, { "content": "\n\n impl AsyncRead for TestConnection {\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n cx: &mut task::Context<'_>,\n\n buf: &mut tokio::io::ReadBuf<'_>,\n\n ) -> Poll<std::io::Result<()>> {\n\n let mut conn = self.inner.lock().expect(\"mutex\");\n\n\n\n let buffer = if self.direction {\n\n &mut conn.inbound_buffer\n\n } else {\n\n &mut conn.outbound_buffer\n\n };\n\n\n\n let mut slice = [0; 1024];\n\n\n\n let bytes_read = std::io::Read::read(buffer, &mut slice)?;\n\n if bytes_read > 0 {\n\n buf.put_slice(&slice[..bytes_read]);\n", "file_path": "src/server_resolver.rs", "rank": 41, "score": 12.144251274732875 }, { "content": " Some(\"https\" | \"matrix\") => {}\n\n Some(s) => bail!(\"Unknown scheme '{}'\", s),\n\n None => bail!(\"URL missing scheme\"),\n\n }\n\n\n\n let connector: AsyncTlsConnector = if dst.host().expect(\"hostname\").contains(\"localhost\") {\n\n TlsConnector::builder()\n\n .danger_accept_invalid_certs(true)\n\n .build()?\n\n .into()\n\n } else {\n\n TlsConnector::new().unwrap().into()\n\n };\n\n\n\n let tls = connector.connect(&endpoint.tls_name, tcp).await?;\n\n\n\n Ok(tls.into())\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/server_resolver.rs", "rank": 42, "score": 11.834002344694527 }, { "content": " ///\n\n /// See [`MatrixResolver::resolve_server_name`].\n\n pub async fn resolve_server_name_from_host_port(\n\n &self,\n\n mut host: String,\n\n mut port: Option<u16>,\n\n ) -> Result<Vec<Endpoint>, Error> {\n\n debug!(\"Resolving host={}, port={:?}\", host, port);\n\n\n\n let mut authority = if let Some(p) = port {\n\n format!(\"{}:{}\", host, p)\n\n } else {\n\n host.to_string()\n\n };\n\n\n\n // If a literal IP or includes port then we shortcircuit.\n\n if host.parse::<IpAddr>().is_ok() || port.is_some() {\n\n return Ok(vec![Endpoint {\n\n host: host.to_string(),\n\n port: port.unwrap_or(8448),\n", "file_path": "src/server_resolver.rs", "rank": 43, "score": 11.522027630476249 }, { "content": " debug!(\"SRV returned not found, using host and port 8448\");\n\n return Ok(vec![Endpoint {\n\n host: host.clone(),\n\n port: 8448,\n\n host_header: authority.to_string(),\n\n tls_name: host.clone(),\n\n }]);\n\n }\n\n _ => return Err(err.into()),\n\n },\n\n };\n\n\n\n let mut priority_map: BTreeMap<u16, Vec<_>> = BTreeMap::new();\n\n\n\n let mut count = 0;\n\n for record in records {\n\n count += 1;\n\n let priority = record.priority();\n\n priority_map.entry(priority).or_default().push(record);\n\n }\n", "file_path": "src/server_resolver.rs", "rank": 44, "score": 11.152386986670326 }, { "content": " e,\n\n ),\n\n }\n\n }\n\n\n\n Err(format_err!(\"help\"))\n\n }\n\n .boxed()\n\n }\n\n}\n\n\n\n/// Attempts to connect to a particular endpoint.\n\nasync fn try_connecting(\n\n dst: &Uri,\n\n endpoint: &Endpoint,\n\n) -> Result<MaybeHttpsStream<TcpStream>, Error> {\n\n let tcp = TcpStream::connect((&endpoint.host as &str, endpoint.port)).await?;\n\n\n\n match dst.scheme_str() {\n\n Some(\"http\") => return Ok(tcp.into()),\n", "file_path": "src/server_resolver.rs", "rank": 45, "score": 11.033167827822954 }, { "content": " });\n\n let fut = Http::new().serve_connection(server, service);\n\n tokio::spawn(fut);\n\n }\n\n\n\n futures::future::ok(client).boxed()\n\n }\n\n }\n\n\n\n #[derive(Default)]\n\n struct TestConnectionInner {\n\n outbound_buffer: Cursor<Vec<u8>>,\n\n inbound_buffer: Cursor<Vec<u8>>,\n\n wakers: Vec<futures::task::Waker>,\n\n }\n\n\n\n /// A in memory connection for use with tests.\n\n #[derive(Clone, Default)]\n\n pub struct TestConnection {\n\n inner: Arc<Mutex<TestConnectionInner>>,\n", "file_path": "src/server_resolver.rs", "rank": 46, "score": 9.643262545828069 }, { "content": " debug!(\"Host is IP or port is set\");\n\n\n\n return Ok(vec![Endpoint {\n\n host: host.clone(),\n\n port: port.unwrap_or(8448),\n\n\n\n host_header: authority.to_string(),\n\n tls_name: host.clone(),\n\n }]);\n\n }\n\n\n\n let result = self\n\n .resolver\n\n .srv_lookup(format!(\"_matrix._tcp.{}\", host).as_ref())\n\n .await;\n\n\n\n let records = match result {\n\n Ok(records) => records,\n\n Err(err) => match err.kind() {\n\n ResolveErrorKind::NoRecordsFound { .. } => {\n", "file_path": "src/server_resolver.rs", "rank": 47, "score": 9.44524907515486 }, { "content": "\n\n host_header: authority.to_string(),\n\n tls_name: host.to_string(),\n\n }]);\n\n }\n\n\n\n // Do well-known delegation lookup.\n\n if let Some(server) = get_well_known(&self.http_client, &host).await {\n\n let a = http::uri::Authority::from_str(&server.server)?;\n\n host = a.host().to_string();\n\n port = a.port_u16();\n\n authority = a.to_string();\n\n\n\n debug!(\"Found .well-known, returned {}\", &server.server);\n\n } else {\n\n debug!(\"No .well-known found\");\n\n }\n\n\n\n // If a literal IP or includes port then we short circuit.\n\n if host.parse::<IpAddr>().is_ok() || port.is_some() {\n", "file_path": "src/server_resolver.rs", "rank": 48, "score": 9.39074819176179 }, { "content": " }\n\n\n\n for waker in conn.wakers.drain(..) {\n\n waker.wake()\n\n }\n\n\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n fn poll_flush(\n\n self: Pin<&mut Self>,\n\n cx: &mut task::Context<'_>,\n\n ) -> Poll<Result<(), std::io::Error>> {\n\n let mut conn = self.inner.lock().expect(\"mutex\");\n\n\n\n if self.direction {\n\n Pin::new(&mut conn.outbound_buffer).poll_flush(cx)\n\n } else {\n\n Pin::new(&mut conn.inbound_buffer).poll_flush(cx)\n\n }\n\n }\n", "file_path": "src/server_resolver.rs", "rank": 49, "score": 9.146100821538942 }, { "content": " direction: bool,\n\n }\n\n\n\n impl TestConnection {\n\n pub fn double_ended() -> (TestConnection, TestConnection) {\n\n let inner: Arc<Mutex<TestConnectionInner>> = Arc::default();\n\n\n\n let a = TestConnection {\n\n inner: inner.clone(),\n\n direction: false,\n\n };\n\n\n\n let b = TestConnection {\n\n inner,\n\n direction: true,\n\n };\n\n\n\n (a, b)\n\n }\n\n }\n", "file_path": "src/server_resolver.rs", "rank": 50, "score": 8.269231993626274 }, { "content": " }\n\n\n\n /// Resolves a Matrix server name to a list of [`Endpoint`]s to try.\n\n ///\n\n /// This will first do a `.well-known` lookup to check if the server has\n\n /// delegated Matrix traffic to another host, and then will do the\n\n /// appropriate SRV lookups.\n\n ///\n\n /// *Note*: The [`Endpoint`]s returned include host names that will need to\n\n /// be resolved as normal.\n\n pub async fn resolve_server_name(&self, server_name: &str) -> Result<Vec<Endpoint>, Error> {\n\n let host;\n\n let port;\n\n\n\n // Check if we have a port on the end, being careful of the case where\n\n // `server_name` is a IPv6 literal.\n\n if let Some((maybe_host, maybe_port)) = server_name.rsplit_once(':') {\n\n // There is a colon, so now we just need to check that the right\n\n // hand part is as valid port, i.e. a positive number. (Note that in\n\n // the case of IPv6 literals there would be a `]` in the right hand portion)\n", "file_path": "src/server_resolver.rs", "rank": 51, "score": 7.773058893671346 }, { "content": " )\n\n .await?;\n\n\n\n for endpoint in endpoints {\n\n match try_connecting(&dst, &endpoint).await {\n\n Ok(r) => {\n\n trace!(\n\n \"Connected to host={} port={}\",\n\n &endpoint.host,\n\n &endpoint.port\n\n );\n\n return Ok(r);\n\n }\n\n // Errors here are not unexpected, and we just move on\n\n // with our lives.\n\n Err(e) => debug!(\n\n \"Failed to connect to {} via {}:{} because {}\",\n\n dst.host().expect(\"hostname\"),\n\n endpoint.host,\n\n endpoint.port,\n", "file_path": "src/server_resolver.rs", "rank": 52, "score": 6.9042666120981115 }, { "content": "\n\n let mut results = Vec::with_capacity(count);\n\n\n\n for (_priority, records) in priority_map {\n\n // TODO: Correctly shuffle records\n\n results.extend(records.into_iter().map(|record| Endpoint {\n\n host: record.target().to_utf8(),\n\n port: record.port(),\n\n\n\n host_header: host.to_string(),\n\n tls_name: host.to_string(),\n\n }))\n\n }\n\n\n\n debug!(\n\n \"SRV returned {} results. First: host={} port={}\",\n\n count, &results[0].host, &results[0].port\n\n );\n\n\n\n Ok(results)\n", "file_path": "src/server_resolver.rs", "rank": 53, "score": 6.4691250203791375 } ]
Rust
src/terminal/config/input_filter.rs
ovnz/BearLibTerminal.rs
0a963d631125e20eae6f7d9a652ba4e278dc9df9
use std::fmt; use terminal::config::{ConfigPart, escape_config_string}; #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilter { Event{name: InputFilterEvent, both: bool}, Group{group: InputFilterGroup, both: bool}, Alnum{keys: String, both: bool}, } #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilterGroup { Arrow, Keypad, Keyboard, Mouse, System, } #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilterEvent { A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z, Row0, Row1, Row2, Row3, Row4, Row5, Row6, Row7, Row8, Row9, Space, Minus, Equals, LBracket, RBracket, Backslash, Semicolon, Apostrophe, Grave, Comma, Period, Slash, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, Return, Escape, Backspace, Tab, Pause, Insert, Home, Pageup, Delete, End, Pagedown, Right, Left, Down, Up, Shift, Control, Pad0, Pad1, Pad2, Pad3, Pad4, Pad5, Pad6, Pad7, Pad8, Pad9, PadDivide, PadMultiply, PadMinus, PadPlus, PadPeriod, PadEnter, MouseLeft, MouseRight, MouseMiddle, MouseX1, MouseX2, MouseMove, MouseScroll, MouseWheel, MouseX, MouseY, MousePixelX, MousePixelY, MouseClicks, Width, Height, CellWidth, CellHeight, Color, Bkcolor, Layer, Composition, Char, Wchar, Event, Fullscreen, Close, Resized, } impl ConfigPart for Vec<InputFilter> { fn to_config_str(&self) -> String { format!("input.filter = [{}];", { let mut elems = "".to_string(); for filter in self { elems = format!("{}{}, ", elems, escape_config_string(&match filter { &InputFilter::Event{ref name, both} => format!("{}{}", name, if both {"+"} else {""}), &InputFilter::Group{ref group, both} => format!("{}{}", group, if both {"+"} else {""}), &InputFilter::Alnum{ref keys, both} => format!("{}{}", keys, if both {"+"} else {""}), })); } elems.pop(); elems.pop(); elems }) } } impl fmt::Display for InputFilterGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str(match self { &InputFilterGroup::Arrow => "arrow", &InputFilterGroup::Keypad => "keypad", &InputFilterGroup::Keyboard => "keyboard", &InputFilterGroup::Mouse => "mouse", &InputFilterGroup::System => "system", }) } } impl fmt::Display for InputFilterEvent { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str(match self { &InputFilterEvent::A => "A", &InputFilterEvent::B => "B", &InputFilterEvent::C => "C", &InputFilterEvent::D => "D", &InputFilterEvent::E => "E", &InputFilterEvent::F => "F", &InputFilterEvent::G => "G", &InputFilterEvent::H => "H", &InputFilterEvent::I => "I", &InputFilterEvent::J => "J", &InputFilterEvent::K => "K", &InputFilterEvent::L => "L", &InputFilterEvent::M => "M", &InputFilterEvent::N => "N", &InputFilterEvent::O => "O", &InputFilterEvent::P => "P", &InputFilterEvent::Q => "Q", &InputFilterEvent::R => "R", &InputFilterEvent::S => "S", &InputFilterEvent::T => "T", &InputFilterEvent::U => "U", &InputFilterEvent::V => "V", &InputFilterEvent::W => "W", &InputFilterEvent::X => "X", &InputFilterEvent::Y => "Y", &InputFilterEvent::Z => "Z", &InputFilterEvent::Row0 => "0", &InputFilterEvent::Row1 => "1", &InputFilterEvent::Row2 => "2", &InputFilterEvent::Row3 => "3", &InputFilterEvent::Row4 => "4", &InputFilterEvent::Row5 => "5", &InputFilterEvent::Row6 => "6", &InputFilterEvent::Row7 => "7", &InputFilterEvent::Row8 => "8", &InputFilterEvent::Row9 => "9", &InputFilterEvent::Space => "space", &InputFilterEvent::Minus => "minus", &InputFilterEvent::Equals => "equals", &InputFilterEvent::LBracket => "lbracket", &InputFilterEvent::RBracket => "rbracket", &InputFilterEvent::Backslash => "backslash", &InputFilterEvent::Semicolon => "semicolon", &InputFilterEvent::Apostrophe => "apostrophe", &InputFilterEvent::Grave => "grave", &InputFilterEvent::Comma => "comma", &InputFilterEvent::Period => "period", &InputFilterEvent::Slash => "slash", &InputFilterEvent::F1 => "F1", &InputFilterEvent::F2 => "F2", &InputFilterEvent::F3 => "F3", &InputFilterEvent::F4 => "F4", &InputFilterEvent::F5 => "F5", &InputFilterEvent::F6 => "F6", &InputFilterEvent::F7 => "F7", &InputFilterEvent::F8 => "F8", &InputFilterEvent::F9 => "F9", &InputFilterEvent::F10 => "F10", &InputFilterEvent::F11 => "F11", &InputFilterEvent::F12 => "F12", &InputFilterEvent::Return => "return", &InputFilterEvent::Escape => "escape", &InputFilterEvent::Backspace => "backspace", &InputFilterEvent::Tab => "tab", &InputFilterEvent::Pause => "pause", &InputFilterEvent::Insert => "insert", &InputFilterEvent::Home => "home", &InputFilterEvent::Pageup => "pageup", &InputFilterEvent::Delete => "delete", &InputFilterEvent::End => "end", &InputFilterEvent::Pagedown => "pagedown", &InputFilterEvent::Right => "right", &InputFilterEvent::Left => "left", &InputFilterEvent::Down => "down", &InputFilterEvent::Up => "up", &InputFilterEvent::Shift => "shift", &InputFilterEvent::Control => "control", &InputFilterEvent::Pad0 => "KP_0", &InputFilterEvent::Pad1 => "KP_1", &InputFilterEvent::Pad2 => "KP_2", &InputFilterEvent::Pad3 => "KP_3", &InputFilterEvent::Pad4 => "KP_4", &InputFilterEvent::Pad5 => "KP_5", &InputFilterEvent::Pad6 => "KP_6", &InputFilterEvent::Pad7 => "KP_7", &InputFilterEvent::Pad8 => "KP_8", &InputFilterEvent::Pad9 => "KP_9", &InputFilterEvent::PadDivide => "KP_divide", &InputFilterEvent::PadMultiply => "KP_multiply", &InputFilterEvent::PadMinus => "KP_minus", &InputFilterEvent::PadPlus => "KP_plus", &InputFilterEvent::PadPeriod => "KP_period", &InputFilterEvent::PadEnter => "KP_enter", &InputFilterEvent::MouseLeft => "mouse_left", &InputFilterEvent::MouseRight => "mouse_right", &InputFilterEvent::MouseMiddle => "mouse_middle", &InputFilterEvent::MouseX1 => "mouse_x1", &InputFilterEvent::MouseX2 => "mouse_x2", &InputFilterEvent::MouseMove => "mouse_move", &InputFilterEvent::MouseScroll => "mouse_scroll", &InputFilterEvent::MouseWheel => "mouse_wheel", &InputFilterEvent::MouseX => "mouse_x", &InputFilterEvent::MouseY => "mouse_y", &InputFilterEvent::MousePixelX => "mouse_pixelx", &InputFilterEvent::MousePixelY => "mouse_pixely", &InputFilterEvent::MouseClicks => "mouse_clicks", &InputFilterEvent::Width => "width", &InputFilterEvent::Height => "height", &InputFilterEvent::CellWidth => "cell_width", &InputFilterEvent::CellHeight => "cell_height", &InputFilterEvent::Color => "color", &InputFilterEvent::Bkcolor => "bkcolor", &InputFilterEvent::Layer => "layer", &InputFilterEvent::Composition => "composition", &InputFilterEvent::Char => "char", &InputFilterEvent::Wchar => "wchar", &InputFilterEvent::Event => "event", &InputFilterEvent::Fullscreen => "fullscreen", &InputFilterEvent::Close => "close", &InputFilterEvent::Resized => "resized", }) } }
use std::fmt; use terminal::config::{ConfigPart, escape_config_string}; #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilter { Event{name: InputFilterEvent, both: bool}, Group{group: InputFilterGroup, both: bool}, Alnum{keys: String, both: bool}, } #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilterGroup { Arrow, Keypad, Keyboard, Mouse, System, } #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum InputFilterEvent { A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z, Row0, Row1, Row2, Row3, Row4, Row5, Row6, Row7, Row8, Row9, Space, Minus, Equals, LBracket, RBracket, Backslash, Semicolon, Apostrophe, Grave, Comma, Period, Slash, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, Return, Escape, Backspace, Tab, Pause, Insert, Home, Pageup, Delete, End, Pagedown, Right, Left, Down, Up, Shift, Control, Pad0, Pad1, Pad2, Pad3, Pad4, Pad5, Pad6, Pad7, Pad8, Pad9, PadDivide, PadMultiply, PadMinus, PadPlus, PadPeriod, PadEnter, MouseLeft, MouseRight, MouseMiddle, MouseX1, MouseX2, MouseMove, MouseScroll, MouseWheel, MouseX, MouseY, MousePixelX, MousePixelY, MouseClicks, Width, Height, CellWidth, CellHeight, Color, Bkcolor, Layer, Composition, Char, Wchar, Event, Fullscreen, Close, Resized, } impl ConfigPart for Vec<InputFilter> { fn to_config_str(&self) -> String { format!("input.filter = [{}];", { let mut elems = "".to_string(); for filter in self { elems = format!("{}{}, ", elems, escape_config_string(&match filter { &InputFilter::Event{ref name, both} => format!("{}{}", name, if both {"+"} else {""}), &InputFilter::Group{ref group, both} => format!("{}{}", group, if both {"+"} else {""}), &InputFilter::Alnum{ref keys, both} => format!("{}{}", keys, if both {"+"} else {""}), })); } elems.pop(); elems.pop(); elems }) } } impl fmt::Display for InputFilterGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str(match self { &InputFilterGroup::Arrow => "arrow", &InputFilterGroup::Keypad => "keypad", &InputFilterGroup::Keyboard => "keyboard", &InputFilterGroup::Mouse => "mouse", &InputFilterGroup::System => "system", }) } } impl fmt::Display for InputFilterEvent {
}
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str(match self { &InputFilterEvent::A => "A", &InputFilterEvent::B => "B", &InputFilterEvent::C => "C", &InputFilterEvent::D => "D", &InputFilterEvent::E => "E", &InputFilterEvent::F => "F", &InputFilterEvent::G => "G", &InputFilterEvent::H => "H", &InputFilterEvent::I => "I", &InputFilterEvent::J => "J", &InputFilterEvent::K => "K", &InputFilterEvent::L => "L", &InputFilterEvent::M => "M", &InputFilterEvent::N => "N", &InputFilterEvent::O => "O", &InputFilterEvent::P => "P", &InputFilterEvent::Q => "Q", &InputFilterEvent::R => "R", &InputFilterEvent::S => "S", &InputFilterEvent::T => "T", &InputFilterEvent::U => "U", &InputFilterEvent::V => "V", &InputFilterEvent::W => "W", &InputFilterEvent::X => "X", &InputFilterEvent::Y => "Y", &InputFilterEvent::Z => "Z", &InputFilterEvent::Row0 => "0", &InputFilterEvent::Row1 => "1", &InputFilterEvent::Row2 => "2", &InputFilterEvent::Row3 => "3", &InputFilterEvent::Row4 => "4", &InputFilterEvent::Row5 => "5", &InputFilterEvent::Row6 => "6", &InputFilterEvent::Row7 => "7", &InputFilterEvent::Row8 => "8", &InputFilterEvent::Row9 => "9", &InputFilterEvent::Space => "space", &InputFilterEvent::Minus => "minus", &InputFilterEvent::Equals => "equals", &InputFilterEvent::LBracket => "lbracket", &InputFilterEvent::RBracket => "rbracket", &InputFilterEvent::Backslash => "backslash", &InputFilterEvent::Semicolon => "semicolon", &InputFilterEvent::Apostrophe => "apostrophe", &InputFilterEvent::Grave => "grave", &InputFilterEvent::Comma => "comma", &InputFilterEvent::Period => "period", &InputFilterEvent::Slash => "slash", &InputFilterEvent::F1 => "F1", &InputFilterEvent::F2 => "F2", &InputFilterEvent::F3 => "F3", &InputFilterEvent::F4 => "F4", &InputFilterEvent::F5 => "F5", &InputFilterEvent::F6 => "F6", &InputFilterEvent::F7 => "F7", &InputFilterEvent::F8 => "F8", &InputFilterEvent::F9 => "F9", &InputFilterEvent::F10 => "F10", &InputFilterEvent::F11 => "F11", &InputFilterEvent::F12 => "F12", &InputFilterEvent::Return => "return", &InputFilterEvent::Escape => "escape", &InputFilterEvent::Backspace => "backspace", &InputFilterEvent::Tab => "tab", &InputFilterEvent::Pause => "pause", &InputFilterEvent::Insert => "insert", &InputFilterEvent::Home => "home", &InputFilterEvent::Pageup => "pageup", &InputFilterEvent::Delete => "delete", &InputFilterEvent::End => "end", &InputFilterEvent::Pagedown => "pagedown", &InputFilterEvent::Right => "right", &InputFilterEvent::Left => "left", &InputFilterEvent::Down => "down", &InputFilterEvent::Up => "up", &InputFilterEvent::Shift => "shift", &InputFilterEvent::Control => "control", &InputFilterEvent::Pad0 => "KP_0", &InputFilterEvent::Pad1 => "KP_1", &InputFilterEvent::Pad2 => "KP_2", &InputFilterEvent::Pad3 => "KP_3", &InputFilterEvent::Pad4 => "KP_4", &InputFilterEvent::Pad5 => "KP_5", &InputFilterEvent::Pad6 => "KP_6", &InputFilterEvent::Pad7 => "KP_7", &InputFilterEvent::Pad8 => "KP_8", &InputFilterEvent::Pad9 => "KP_9", &InputFilterEvent::PadDivide => "KP_divide", &InputFilterEvent::PadMultiply => "KP_multiply", &InputFilterEvent::PadMinus => "KP_minus", &InputFilterEvent::PadPlus => "KP_plus", &InputFilterEvent::PadPeriod => "KP_period", &InputFilterEvent::PadEnter => "KP_enter", &InputFilterEvent::MouseLeft => "mouse_left", &InputFilterEvent::MouseRight => "mouse_right", &InputFilterEvent::MouseMiddle => "mouse_middle", &InputFilterEvent::MouseX1 => "mouse_x1", &InputFilterEvent::MouseX2 => "mouse_x2", &InputFilterEvent::MouseMove => "mouse_move", &InputFilterEvent::MouseScroll => "mouse_scroll", &InputFilterEvent::MouseWheel => "mouse_wheel", &InputFilterEvent::MouseX => "mouse_x", &InputFilterEvent::MouseY => "mouse_y", &InputFilterEvent::MousePixelX => "mouse_pixelx", &InputFilterEvent::MousePixelY => "mouse_pixely", &InputFilterEvent::MouseClicks => "mouse_clicks", &InputFilterEvent::Width => "width", &InputFilterEvent::Height => "height", &InputFilterEvent::CellWidth => "cell_width", &InputFilterEvent::CellHeight => "cell_height", &InputFilterEvent::Color => "color", &InputFilterEvent::Bkcolor => "bkcolor", &InputFilterEvent::Layer => "layer", &InputFilterEvent::Composition => "composition", &InputFilterEvent::Char => "char", &InputFilterEvent::Wchar => "wchar", &InputFilterEvent::Event => "event", &InputFilterEvent::Fullscreen => "fullscreen", &InputFilterEvent::Close => "close", &InputFilterEvent::Resized => "resized", }) }
function_block-full_function
[ { "content": "fn get_key(released: bool, key: KeyCode, ctrl: bool, shift: bool) -> Event {\n\n\tif released {\n\n\t\tEvent::KeyReleased{\n\n\t\t\tkey: key,\n\n\t\t\tctrl: ctrl,\n\n\t\t\tshift: shift,\n\n\t\t}\n\n\t} else {\n\n\t\tEvent::KeyPressed{\n\n\t\t\tkey: key,\n\n\t\t\tctrl: ctrl,\n\n\t\t\tshift: shift,\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/terminal/mod.rs", "rank": 0, "score": 195174.98379360524 }, { "content": "/// Sets the foreground color before calling the function and resets it afterwards.\n\npub fn with_foreground<F: FnOnce()>(color: Color, callback: F) {\n\n\tlet current = ffi::state_color(ffi::TK_COLOR);\n\n\tset_foreground(color);\n\n\tcallback();\n\n\tffi::color(current);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 1, "score": 170290.7111508099 }, { "content": "/// Sets the background color before calling the function and resets it afterwards.\n\npub fn with_background<F: FnOnce()>(color: Color, callback: F) {\n\n\tlet current = ffi::state_color(ffi::TK_BKCOLOR);\n\n\tset_background(color);\n\n\tcallback();\n\n\tffi::bkcolor(current);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 2, "score": 170290.7111508099 }, { "content": "/// Sets the foreground and background color before calling the function and resets them afterwards.\n\npub fn with_colors<F: FnOnce()>(fg: Color, bg: Color, callback: F) {\n\n\twith_foreground(fg, ||\n\n\t\twith_background(bg, ||\n\n\t\t\tcallback()\n\n\t\t)\n\n\t);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 3, "score": 164887.0404611221 }, { "content": "/// Check, whether the terminal is currently full-screen.\n\npub fn fullscreen() -> bool {\n\n\tffi::check(ffi::TK_FULLSCREEN)\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 4, "score": 163694.18533474975 }, { "content": "/// Check, whether a [`KeyCode`](../enum.KeyCode.html)-specified key is currently pressed.\n\npub fn key_pressed(key: KeyCode) -> bool {\n\n\tffi::check(match key {\n\n\t\tKeyCode::A => ffi::TK_A,\n\n\t\tKeyCode::B => ffi::TK_B,\n\n\t\tKeyCode::C => ffi::TK_C,\n\n\t\tKeyCode::D => ffi::TK_D,\n\n\t\tKeyCode::E => ffi::TK_E,\n\n\t\tKeyCode::F => ffi::TK_F,\n\n\t\tKeyCode::G => ffi::TK_G,\n\n\t\tKeyCode::H => ffi::TK_H,\n\n\t\tKeyCode::I => ffi::TK_I,\n\n\t\tKeyCode::J => ffi::TK_J,\n\n\t\tKeyCode::K => ffi::TK_K,\n\n\t\tKeyCode::L => ffi::TK_L,\n\n\t\tKeyCode::M => ffi::TK_M,\n\n\t\tKeyCode::N => ffi::TK_N,\n\n\t\tKeyCode::O => ffi::TK_O,\n\n\t\tKeyCode::P => ffi::TK_P,\n\n\t\tKeyCode::Q => ffi::TK_Q,\n\n\t\tKeyCode::R => ffi::TK_R,\n", "file_path": "src/terminal/state/mod.rs", "rank": 5, "score": 155069.66129125818 }, { "content": "/// Escapes `'`s and wraps the strings with `'`s, as per [this](http://foo.wyrd.name/en:bearlibterminal:reference:configuration#configuration_string_format).\n\n///\n\n/// # Examples\n\n/// ```\n\n/// # use bear_lib_terminal::terminal::config::escape_config_string;\n\n/// assert_eq!(escape_config_string(&\"\".to_string()), \"''\");\n\n/// ```\n\n/// ```\n\n/// # use bear_lib_terminal::terminal::config::escape_config_string;\n\n/// assert_eq!(escape_config_string(&\"'\".to_string()), \"''''\");\n\n/// ```\n\n/// ```\n\n/// # use bear_lib_terminal::terminal::config::escape_config_string;\n\n/// assert_eq!(escape_config_string(&\"asdf'asdf\".to_string()), \"'asdf''asdf'\");\n\n/// ```\n\npub fn escape_config_string(cfg: &String) -> String {\n\n\tformat!(\"'{}'\", cfg.replace(\"'\", \"''\"))\n\n}\n", "file_path": "src/terminal/config/mod.rs", "rank": 6, "score": 143695.725330766 }, { "content": "/// Enable or disable composition, (dis)allowing for \"stacking\" tiles on top of each other in the same cell.\n\n///\n\n/// For details and other uses consult the documentation for the\n\n/// [`terminal_composition()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#composition).\n\npub fn composition(enable: bool) {\n\n\tffi::composition(enable);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 7, "score": 138576.67706857086 }, { "content": "/// Creates the terminal window of the specified size with the specified title, without showing it.\n\n/// To show the window use the [`refresh()`](fn.refresh.html) function.\n\n///\n\n/// Equivalent to the [`terminal_open()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#open) with a subsequent call to\n\n/// the [`terminal_set()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#set) with the title.\n\npub fn open(title: &str, width: u32, height: u32) {\n\n\tffi::open();\n\n\tset(Window::empty().size(Size::new(width as i32, height as i32)).title(title.to_string()));\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 8, "score": 133083.43185440087 }, { "content": "/// Most-recent-event-produced unicode character.\n\npub fn char() -> char {\n\n\tchar::from_u32(ffi::state(ffi::TK_WCHAR) as u32).unwrap()\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 9, "score": 131050.59095840182 }, { "content": "/// Closes the terminal window, causing all subsequent functions from the module (apart from [`open()`](fn.open.html)) to fail\n\n///\n\n/// Equivalent to the [`terminal_close()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#close).\n\npub fn close() {\n\n\tffi::close();\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 10, "score": 126205.2394943526 }, { "content": "/// Get last dequeued event.\n\n///\n\n/// Returns `None` iff no events have been dequeued yet.\n\npub fn event() -> Option<Event> {\n\n\tterminal::to_event(ffi::state(ffi::TK_EVENT))\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 11, "score": 124739.68063593173 }, { "content": "/// Sets the current background color, which will affect all the output functions called later.\n\n///\n\n/// This is equivalent to the [`terminal_bkcolor()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#bkcolor).\n\npub fn set_background(color: Color) {\n\n\tffi::bkcolor(to_color_t(color));\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 12, "score": 124182.07901922333 }, { "content": "/// Sets the current foreground color, which will affect all the output functions called later.\n\n///\n\n/// This is equivalent to the [`terminal_color()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#color).\n\npub fn set_foreground(color: Color) {\n\n\tffi::color(to_color_t(color));\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 13, "score": 124179.85284452664 }, { "content": "/// Sets the current foreground and background color, which will affect all the output functions called later.\n\n///\n\n/// This is equivalent to calling [`set_background()`](fn.set_background.html) and [`set_foreground()`](fn.set_foreground.html) in succession.\n\npub fn set_colors(fg: Color, bg: Color) {\n\n\tset_foreground(fg);\n\n\tset_background(bg);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 14, "score": 119183.48946659252 }, { "content": "/// Check, whether the next [`read_event()`](fn.read_event.html) call will return `Some`.\n\n///\n\n/// Consult the [documentation for the `terminal_has_input()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#has_input).\n\npub fn has_input() -> bool {\n\n\tffi::has_input()\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 15, "score": 118813.17837795781 }, { "content": "/// Returns the next event in the queue if it's available, otherwise returns `None`.\n\n///\n\n/// If one intends on waiting for events, the [`wait_event()`](fn.wait_event.html) function is recommended.\n\n///\n\n/// This is equivalent to the behaviour mentioned in the\n\n/// [docs for the `terminal_read()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#read), but not the function's behaviour itself.\n\npub fn read_event() -> Option<Event> {\n\n\tif !has_input() {\n\n\t\tNone\n\n\t} else {\n\n\t\twait_event()\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 16, "score": 118063.7082369013 }, { "content": "/// Returns the next event in the queue if it's available without popping it therefrom, otherwise returns `None`.\n\n///\n\n/// If one intends on waiting for events, the [`wait_event()`](fn.wait_event.html) function is recommended.\n\n///\n\n/// If one intends on popping the events, the [`read_event()`](fn.read_event.html) function is recommended.\n\n///\n\n/// If one intends on just checking if an event is ready, the [`has_input()`](fn.has_input.html) function is recommended.\n\n///\n\n/// This is equivalent to the [`terminal_peek()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#peek).\n\npub fn peek_event() -> Option<Event> {\n\n\tmatch ffi::peek() {\n\n\t\t0 => None,\n\n\t\tevent => to_event(event),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 17, "score": 118063.63903341786 }, { "content": "/// Returns the next event, blocks until one's available.\n\n///\n\n/// This is equivalent to the [`terminal_read()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#read).\n\npub fn wait_event() -> Option<Event> {\n\n\tto_event(ffi::read())\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 18, "score": 118061.413794616 }, { "content": "/// Get the currently selected layer.\n\n///\n\n/// Layer is selected by using the [`terminal::layer()`](../fn.layer.html) function.\n\npub fn layer() -> i32 {\n\n\tffi::state(ffi::TK_LAYER)\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 19, "score": 116474.14744741884 }, { "content": "/// Get the currently selected background colour.\n\n///\n\n/// Background colours are changed by using the [`terminal::*_background()`](../index.html) function family.\n\npub fn background() -> Color {\n\n\tterminal::from_color_t(ffi::state(ffi::TK_BKCOLOR) as ffi::ColorT)\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 20, "score": 114965.83700401997 }, { "content": "/// Get the currently selected foreground colour.\n\n///\n\n/// Foreground colours are changed by using the [`terminal::*_foreground()`](../index.html) function family.\n\npub fn foreground() -> Color {\n\n\tterminal::from_color_t(ffi::state(ffi::TK_COLOR) as ffi::ColorT)\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 21, "score": 114965.83700401997 }, { "content": "/// Prints the specified character to the specified pixel-offsetted location, gradient-colouring it from the corners.\n\n///\n\n/// For details see the docs for the [`terminal_put_ext()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#put_ext).\n\npub fn put_ext(pos: Point, offset: Point, cell: char, corners: &Vec<Color>) {\n\n\tffi::put_ext(pos.x, pos.y, offset.x, offset.y, cell as i32, &corners.iter().cloned().map(to_color_t).collect::<Vec<_>>()[..]);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 22, "score": 114955.42547047965 }, { "content": "/// Sleep for the specified amount of milliseconds.\n\n///\n\n/// See the [`terminal_delay()` C API function's documentation](http://foo.wyrd.name/en:bearlibterminal:reference#delay).\n\npub fn delay(period: i32) {\n\n\tffi::delay(period)\n\n}\n\n\n\n\n\n/// Infinite iterator over Terminal events, instantiated by [`events()`](fn.events.html).\n\n///\n\n/// Yields `None` iff BLT is closed.\n\n///\n\n/// # Examples\n\n///\n\n/// The \"standard\" way:\n\n///\n\n/// ```ignore\n\n/// while let Some(event) = terminal::wait_event() {\n\n/// \t// ...\n\n/// }\n\n/// ```\n\n///\n\n/// Is equivalent to, but not as good as:\n", "file_path": "src/terminal/mod.rs", "rank": 23, "score": 113453.39404728237 }, { "content": "/// Selects the current layer.\n\n///\n\n/// The layer `index` must be between 0 and 255.\n\n/// For more information consult the documentation for the [`terminal_layer()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#layer).\n\npub fn layer(index: i32) {\n\n\tffi::layer(index);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 24, "score": 113331.27524242879 }, { "content": "/// Invoke the [`terminal_set()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#set) with the argument's `config_str`.\n\n///\n\n/// Returns `false` iff the config string is malformed.\n\n///\n\n/// For build-in [`ConfigPart`](config/trait.ConfigPart.html)s see the [`config`](config/index.html) module.\n\npub fn set<T: ConfigPart>(cfg: T) -> bool {\n\n\tffi::set(&*&cfg.to_config_str())\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 25, "score": 112954.48806121721 }, { "content": "/// Equivalent to [`put()`](fn.put.html) with a `Point` constructed from the first two arguments.\n\npub fn put_xy(x: i32, y: i32, cell: char) {\n\n\tffi::put(x, y, cell as i32);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 26, "score": 112332.63558739361 }, { "content": "/// Get the background color in the specified coordinates.\n\n///\n\n/// Consult the documentation for the [`terminal_pick_bkcolor()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#pick_bkcolor)\n\n/// for the same amount of information.\n\npub fn pick_background_color(point: Point) -> Color {\n\n\tfrom_color_t(ffi::pick_bkcolor(point.x, point.y))\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 27, "score": 110362.08469608118 }, { "content": "fn get_window_resize() -> Event {\n\n\tEvent::Resize{\n\n\t\twidth: ffi::state(ffi::TK_WIDTH),\n\n\t\theight: ffi::state(ffi::TK_HEIGHT),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 28, "score": 105279.84484592627 }, { "content": "fn get_mouse_scroll() -> Event {\n\n\tEvent::MouseScroll{\n\n\t\tdelta: ffi::state(ffi::TK_MOUSE_WHEEL),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 29, "score": 105108.61817435992 }, { "content": "fn get_mouse_move() -> Event {\n\n\tEvent::MouseMove{\n\n\t\tx: ffi::state(ffi::TK_MOUSE_X),\n\n\t\ty: ffi::state(ffi::TK_MOUSE_Y),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 30, "score": 105108.61817435992 }, { "content": "/// Prints the specified character to the specified location.\n\n///\n\n/// Equivalent to the [`terminal_put()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#put).\n\npub fn put(point: Point, cell: char) {\n\n\tffi::put(point.x, point.y, cell as i32);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 31, "score": 102823.86150403626 }, { "content": "/// Returns an instance of [`EventIterator`](struct.EventIterator.html), an infinite iterator over Terminal events.\n\npub fn events() -> EventIterator {\n\n\tEventIterator\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 32, "score": 102498.14201137537 }, { "content": "fn to_key_event(code: i32) -> Option<Event> {\n\n\tlet key = code & !ffi::TK_KEY_RELEASED;\n\n\tlet released = (code & ffi::TK_KEY_RELEASED) == ffi::TK_KEY_RELEASED;\n\n\n\n\tmatch key {\n\n\t\tffi::TK_SHIFT => Some(if released {Event::ShiftReleased} else {Event::ShiftPressed}),\n\n\t\tffi::TK_CONTROL => Some(if released {Event::ControlReleased} else {Event::ControlPressed}),\n\n\t\tffi::TK_ALT => Some(if released {Event::AltReleased} else {Event::AltPressed}),\n\n\t\tkey => {\n\n\t\t\tlet ctrl = ffi::check(ffi::TK_CONTROL);\n\n\t\t\tlet shift = ffi::check(ffi::TK_SHIFT);\n\n\n\n\t\t\tmatch to_keycode(key) {\n\n\t\t\t\tSome(converted) => Some(get_key(released, converted, ctrl, shift)),\n\n\t\t\t\tNone => None,\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 33, "score": 102220.37269417245 }, { "content": "/// Get the color of the character in the specified coordinates on the specified layer.\n\n///\n\n/// Consult the documentation for the [`terminal_pick_color()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#pick_color),\n\n/// despite its laconicity.\n\npub fn pick_foreground_color(point: Point, index: i32) -> Color {\n\n\tfrom_color_t(ffi::pick_color(point.x, point.y, index))\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 34, "score": 101938.34580206218 }, { "content": "/// Get the character in the specified coordinates on the specified layer.\n\n///\n\n/// Returns 0 if the cell is empty on the specified layer.\n\n///\n\n/// Consult the documentation for the [`terminal_pick()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#pick) for more data.\n\npub fn pick(point: Point, index: i32) -> char {\n\n\tchar::from_u32(ffi::pick(point.x, point.y, index) as u32).unwrap()\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 35, "score": 98462.2677875415 }, { "content": "fn to_color_t(color: Color) -> ColorT {\n\n\t(\n\n\t\t((color.alpha as ColorT) << 24) |\n\n\t\t((color.red as ColorT) << 16) |\n\n\t\t((color.green as ColorT) << 8) |\n\n\t\t(color.blue as ColorT)\n\n\t)\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 36, "score": 94582.29416497305 }, { "content": "fn from_color_t(color: ColorT) -> Color {\n\n\tlet alpha = ((color >> 24) & 0xFF) as u8;\n\n\tlet red = ((color >> 16) & 0xFF) as u8;\n\n\tlet green = ((color >> 8) & 0xFF) as u8;\n\n\tlet blue = (color & 0xFF) as u8;\n\n\n\n\tColor::from_rgba(red, green, blue, alpha)\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 37, "score": 94582.29416497305 }, { "content": "/// Amount of fast consecutive clicks for the [`Event::KeyPressed`](../../enum.Event.html#variant.KeyPressed)\n\n/// with [`key: Mouse*`](../../enum.Event.html#variant.KeyPressed).\n\npub fn clicks() -> i32 {\n\n\tffi::state(ffi::TK_MOUSE_CLICKS)\n\n}\n", "file_path": "src/terminal/state/mouse.rs", "rank": 38, "score": 94389.43691417396 }, { "content": "/// Amount of steps the mouse wheel scrolled in the last [`Event::MouseScroll`](../../enum.Event.html#variant.MouseScroll).\n\n///\n\n/// Negative values indicate an \"up\" scroll.\n\n///\n\n/// Positive values indicate a \"down\" scroll.\n\npub fn scroll() -> i32 {\n\n\tffi::state(ffi::TK_MOUSE_WHEEL)\n\n}\n\n\n", "file_path": "src/terminal/state/mouse.rs", "rank": 39, "score": 94385.29578610694 }, { "content": "/// Get the mouse cursor's position in cells.\n\npub fn position() -> Point {\n\n\tPoint::new(ffi::state(ffi::TK_MOUSE_X), ffi::state(ffi::TK_MOUSE_Y))\n\n}\n\n\n", "file_path": "src/terminal/state/mouse.rs", "rank": 40, "score": 94379.01577726947 }, { "content": "/// Reads up to `max` characters without parsing, starting at the specified coordinates.\n\n///\n\n/// Returns `None` if the user closed the window or pressed `Escape`,\n\n/// `Some` containing the read string otherwise.\n\n///\n\n/// The read string will contain up to `max` characters.\n\n///\n\n/// The string being read will be kept on-screen only *during* the reading process, the scene will be restored before returning (needs a refresh, though).\n\n///\n\n/// Refer to the [documentation for the for the `terminal_read_str()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#read_str)\n\n/// for specific information.\n\npub fn read_str(point: Point, max: i32) -> Option<String> {\n\n\tffi::read_str(point.x, point.y, max)\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 41, "score": 92762.83800779647 }, { "content": "/// Get the mouse cursor's position in pixels.\n\npub fn pixel_position() -> Point {\n\n\tPoint::new(ffi::state(ffi::TK_MOUSE_PIXEL_X), ffi::state(ffi::TK_MOUSE_PIXEL_Y))\n\n}\n\n\n", "file_path": "src/terminal/state/mouse.rs", "rank": 42, "score": 91748.36817463745 }, { "content": "/// Equivalent to [`print_ext()`](fn.print_ext.html) with a `Rect` constructed from the first four arguments.\n\npub fn print_ext_xy(x: i32, y: i32, w: i32, h: i32, align: Alignment, value: &str) {\n\n\tprint_ext(Rect::from_values(x, y, w, h), align, value);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 43, "score": 83506.72842262537 }, { "content": "fn to_event(code: i32) -> Option<Event> {\n\n\tmatch code {\n\n\t\tffi::TK_CLOSE => Some(Event::Close),\n\n\t\tffi::TK_RESIZED => Some(get_window_resize()),\n\n\t\tffi::TK_MOUSE_MOVE => Some(get_mouse_move()),\n\n\t\tffi::TK_MOUSE_SCROLL => Some(get_mouse_scroll()),\n\n\t\t_ => to_key_event(code),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 44, "score": 79961.1653106348 }, { "content": "/// Flushes all changes made to the screen; also shows the window after the [`open()`](fn.open.html) call\n\n///\n\n/// Equivalent to the [`terminal_refresh()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#refresh).\n\npub fn refresh() {\n\n\tffi::refresh();\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 45, "score": 74529.96104824309 }, { "content": "/// Equivalent to [`print()`](fn.print.html) with a `Point` constructed from the first two arguments.\n\npub fn print_xy(x: i32, y: i32, value: &str) {\n\n\tprint(Point::new(x, y), value);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 46, "score": 73748.2383374559 }, { "content": "/// Get the terminal size in cells.\n\npub fn size() -> Size {\n\n\tSize::new(ffi::state(ffi::TK_WIDTH), ffi::state(ffi::TK_HEIGHT))\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 47, "score": 68865.56235412777 }, { "content": "/// Sets the current layer's crop area.\n\n///\n\n/// <sub>I don't get it either, refer the [`terminal_crop()` C API function's documentation](http://foo.wyrd.name/en:bearlibterminal:reference#crop).</sub>\n\npub fn crop(rect: Rect) {\n\n\tffi::crop(rect.top_left.x, rect.top_left.y, rect.size.width, rect.size.height);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 48, "score": 67308.41181307548 }, { "content": "/// Get cell size in pixels.\n\npub fn cell_size() -> Size {\n\n\tSize::new(ffi::state(ffi::TK_CELL_WIDTH), ffi::state(ffi::TK_CELL_HEIGHT))\n\n}\n\n\n", "file_path": "src/terminal/state/mod.rs", "rank": 49, "score": 67158.82081594493 }, { "content": "/// Calculate the argument's width/height without printing it.\n\n///\n\n/// Whether the function returns the width or the height depends on the presence of the `bbox` tag in the string.\n\n///\n\n/// Refer to the [docs for the `terminal_measure()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#measure), note,\n\n/// that the return type therein is incorrect.\n\npub fn measure(value: &str) -> i32 {\n\n\tffi::measure(value)\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 50, "score": 64255.33559298748 }, { "content": "/// Clears the screen (either partailly or fully)\n\n///\n\n/// If `area` is `None`, clears the entire screen, all layers\n\n/// (identically to the [`terminal_clear()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#clear).\n\n///\n\n/// Otherwise clears specified area on the current layer, as would the\n\n/// [`terminal_clear_area()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#clear_area).\n\npub fn clear(area: Option<Rect>) {\n\n\tmatch area {\n\n\t\tSome(rect) => ffi::clear_area(rect.top_left.x, rect.top_left.y, rect.size.width, rect.size.height),\n\n\t\tNone => ffi::clear(),\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 51, "score": 64248.0612867052 }, { "content": "/// Trait for generating BLT configuration strings.\n\n///\n\n/// Those will get fed directly to the [`terminal_set()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#set).\n\npub trait ConfigPart {\n\n\tfn to_config_str(&self) -> String;\n\n}\n\n\n\n\n", "file_path": "src/terminal/config/mod.rs", "rank": 52, "score": 63184.692718068676 }, { "content": "/// Prints the specified string to the specified location, formatting it along the way.\n\n///\n\n/// For formatting spec see the docs for the [`terminal_print()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#print).\n\npub fn print(point: Point, value: &str) {\n\n\tlet _ = ffi::print(point.x, point.y, value);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 53, "score": 61478.999304456986 }, { "content": "/// Prints the specified string to the specified location with text wrapping and alignment.\n\n/// \n\n/// See the docs for the [`terminal_print_ext()` C API function](http://foo.wyrd.name/en:bearlibterminal:reference#print).\n\npub fn print_ext(rect: Rect, align: Alignment, value: &str) {\n\n\tlet _ = ffi::print_ext(\n\n\t\trect.top_left.x, \n\n\t\trect.top_left.y, \n\n\t\trect.size.width, \n\n\t\trect.size.height, \n\n\t\talign.into(), \n\n\t\tvalue\n\n\t);\n\n}\n\n\n", "file_path": "src/terminal/mod.rs", "rank": 54, "score": 55344.89554208306 }, { "content": "fn resize_frame(term_size: Size) -> Rect {\n\n Rect::from_values(\n\n PADDING_X,\n\n PADDING_Y + PADDING_TOP,\n\n term_size.width - PADDING_X * 2,\n\n term_size.height - PADDING_Y * 2 - PADDING_TOP,\n\n )\n\n}", "file_path": "examples/text_alignment.rs", "rank": 55, "score": 52384.14497735742 }, { "content": "fn to_keycode(code: i32) -> Option<KeyCode> {\n\n\tmatch code {\n\n\t\tffi::TK_A => Some(KeyCode::A),\n\n\t\tffi::TK_B => Some(KeyCode::B),\n\n\t\tffi::TK_C => Some(KeyCode::C),\n\n\t\tffi::TK_D => Some(KeyCode::D),\n\n\t\tffi::TK_E => Some(KeyCode::E),\n\n\t\tffi::TK_F => Some(KeyCode::F),\n\n\t\tffi::TK_G => Some(KeyCode::G),\n\n\t\tffi::TK_H => Some(KeyCode::H),\n\n\t\tffi::TK_I => Some(KeyCode::I),\n\n\t\tffi::TK_J => Some(KeyCode::J),\n\n\t\tffi::TK_K => Some(KeyCode::K),\n\n\t\tffi::TK_L => Some(KeyCode::L),\n\n\t\tffi::TK_M => Some(KeyCode::M),\n\n\t\tffi::TK_N => Some(KeyCode::N),\n\n\t\tffi::TK_O => Some(KeyCode::O),\n\n\t\tffi::TK_P => Some(KeyCode::P),\n\n\t\tffi::TK_Q => Some(KeyCode::Q),\n\n\t\tffi::TK_R => Some(KeyCode::R),\n", "file_path": "src/terminal/mod.rs", "rank": 56, "score": 51861.00347964936 }, { "content": "/// Construct a bitmap font override segment repr.\n\npub fn bitmap<T: AsRef<Path>>(origin: Origin, path: T) -> Bitmap {\n\n\tBitmap{\n\n\t\torigin: origin,\n\n\t\tpath: path.as_ref().to_str().unwrap().to_string(),\n\n\t\tsize: None,\n\n\t\tresize: None,\n\n\t\tresize_filter: None,\n\n\t\tresize_mode: None,\n\n\t\traw_size: None,\n\n\t\tcodepage: None,\n\n\t\talign: None,\n\n\t\tspacing: None,\n\n\t}\n\n}\n\n\n", "file_path": "src/terminal/config/font.rs", "rank": 57, "score": 51402.11071399472 }, { "content": "/// Construct a TrueType font override segment repr.\n\n///\n\n/// If `title_size.width` is `0`, the resulting `size` prop will be `size=<title_size.width>` as opposed to `size=<title_size>`.\n\npub fn true_type<T: AsRef<Path>>(origin: Origin, path: T, tile_size: Size) -> TrueType {\n\n\tTrueType{\n\n\t\torigin: origin,\n\n\t\tpath: path.as_ref().to_str().unwrap().to_string(),\n\n\t\tsize: tile_size,\n\n\t\tsize_reference: None,\n\n\t\tmode: None,\n\n\t\tcodepage: None,\n\n\t\talign: None,\n\n\t\tspacing: None,\n\n\t}\n\n}\n\n\n\n\n\n/// The origin for the font (the part before `:` in the config string).\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum Origin {\n\n\t/// `font`\n\n\tRoot,\n\n\t/// `[name] font`\n", "file_path": "src/terminal/config/font.rs", "rank": 58, "score": 45286.81610492265 }, { "content": "fn main() {\n\n\tterminal::open(\"Simple example\", 80, 30);\n\n\tterminal::set(config::Window::empty().resizeable(true));\n\n\n\n\tterminal::print_xy(0, 0, \"Your mom\");\n\n\tterminal::with_colors(Color::from_rgb(0xFA, 0xAF, 0x29), Color::from_rgb(0x05, 0x50, 0xD6), || terminal::print_xy(0, 1, \"Colerd\"));\n\n\tfor (i, c) in \"Coloured letters with pixel-offset!\".chars().enumerate() {\n\n\t\tterminal::put_ext(Point::new(i as i32, 2), Point::new(i as i32, i as i32), c, &vec![Color::from_rgb(0xFF, 0x00, 0x00),\n\n\t\t Color::from_rgb(0x00, 0xFF, 0x00),\n\n\t\t Color::from_rgb(0x00, 0x00, 0xFF),\n\n\t\t Color::from_rgb(0xFF, 0xFF, 0xFF)]);\n\n\t}\n\n\tterminal::refresh();\n\n\n\n\tterminal::set_foreground(Color::from_rgb(0xFF, 0xFF, 0xFF));\n\n\tif let Some(string) = terminal::read_str(Point::new(0, 5), 30) {\n\n\t\tterminal::print_xy(0, 5, &*&string);\n\n\t}\n\n\tterminal::refresh();\n\n\tfor event in terminal::events() {\n", "file_path": "examples/simple.rs", "rank": 59, "score": 36128.37955269644 }, { "content": "fn main() {\n\n let lorem_ipsum =\n\n \"[c=orange]Lorem[/c] ipsum dolor sit amet, consectetur adipisicing elit, \\\n\n sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. \\\n\n [c=orange]Ut[/c] enim ad minim veniam, quis nostrud exercitation ullamco \\\n\n laboris nisi ut aliquip ex ea commodo consequat. [c=orange]Duis[/c] aute \\\n\n irure dolor in reprehenderit in voluptate velit esse cillum dolore eu \\\n\n fugiat nulla pariatur. [c=orange]Excepteur[/c] sint occaecat cupidatat \\\n\n non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\";\n\n \n\n terminal::open(\"Text Alignment Demo\", INIT_TERM_W as u32, INIT_TERM_H as u32);\n\n terminal::set(config::Window::empty().resizeable(true));\n\n terminal::composition(true);\n\n \n\n let mut frame = resize_frame(Size::new(INIT_TERM_W, INIT_TERM_H));\n\n \n\n let mut h_align = HAlign::Left;\n\n let mut v_align = VAlign::Top;\n\n \n\n loop {\n", "file_path": "examples/text_alignment.rs", "rank": 60, "score": 34967.52762978513 }, { "content": "/// An RGBA colour repr.\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub struct Color {\n\n\tpub red: u8,\n\n\tpub green: u8,\n\n\tpub blue: u8,\n\n\tpub alpha: u8,\n\n}\n\n\n\nimpl Color {\n\n\t/// Equivalent to [`from_rgba()`](fn.from_rgba.html) with full opacity\n\n\tpub fn from_rgb(red: u8, green: u8, blue: u8) -> Self {\n\n\t\tColor{\n\n\t\t\tred: red,\n\n\t\t\tgreen: green,\n\n\t\t\tblue: blue,\n\n\t\t\talpha: 0xFF,\n\n\t\t}\n\n\t}\n\n\n", "file_path": "src/colors.rs", "rank": 61, "score": 31084.221783879057 }, { "content": "\tpub fn from_rgba(red: u8, green: u8, blue: u8, alpha: u8) -> Self {\n\n\t\tColor{\n\n\t\t\tred: red,\n\n\t\t\tgreen: green,\n\n\t\t\tblue: blue,\n\n\t\t\talpha: alpha,\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/colors.rs", "rank": 62, "score": 31075.189267676975 }, { "content": "//! Checking the state of mouse-related properties, namely the mouse cursor's position, `n`-clicks and scrolling.\n\n\n\n\n\nuse geometry::Point;\n\nuse bear_lib_terminal_sys as ffi;\n\n\n\n\n\n/// Amount of steps the mouse wheel scrolled in the last [`Event::MouseScroll`](../../enum.Event.html#variant.MouseScroll).\n\n///\n\n/// Negative values indicate an \"up\" scroll.\n\n///\n\n/// Positive values indicate a \"down\" scroll.\n", "file_path": "src/terminal/state/mouse.rs", "rank": 63, "score": 28762.66829228058 }, { "content": "# BearLibTerminal.rs [![TravisCI build status](https://travis-ci.org/nabijaczleweli/BearLibTerminal.rs.svg?branch=master)](https://travis-ci.org/nabijaczleweli/BearLibTerminal.rs) [![AppVeyorCI build status](https://ci.appveyor.com/api/projects/status/33799jdins9rctlo/branch/master?svg=true)](https://ci.appveyor.com/project/nabijaczleweli/bearlibterminal-rs/branch/master) [![Licence](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) [![Crates.io version](https://meritbadge.herokuapp.com/bear-lib-terminal)](https://crates.io/crates/bear-lib-terminal)\n\n[BearLibTerminal](https://bitbucket.org/cfyzium/bearlibterminal) FFI for Rust.\n\n\n\n# Requirements\n\nYou need to compile/get a precompiled version of [BearLibTerminal](https://bitbucket.org/cfyzium/bearlibterminal) yourself and put it somewhere, where it'll be linkable with `-lBearLibTerminal`.\n\n\n\nThey can also be found in the [releases](https://github.com/nabijaczleweli/BearLibTerminal.rs/releases).\n\n\n\n# Docs\n\nAutoupdated docs can be found [here](https://rawcdn.githack.com/nabijaczleweli/BearLibTerminal.rs/doc/bear_lib_terminal/index.html).\n", "file_path": "README.md", "rank": 79, "score": 19773.811360534382 }, { "content": "\tF10,\n\n\tF11,\n\n\tF12,\n\n\tEnter,\n\n\tEscape,\n\n\tBackspace,\n\n\tTab,\n\n\tSpace,\n\n\tPause,\n\n\tInsert,\n\n\tHome,\n\n\tPageUp,\n\n\tDelete,\n\n\tEnd,\n\n\tPageDown,\n\n\t/// Right arrow key.\n\n\tRight,\n\n\t/// Left arrow key.\n\n\tLeft,\n\n\t/// Down arrow key.\n", "file_path": "src/terminal/input.rs", "rank": 80, "score": 44.57280307626925 }, { "content": "/// All pressable keys.\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub enum KeyCode {\n\n\tA,\n\n\tB,\n\n\tC,\n\n\tD,\n\n\tE,\n\n\tF,\n\n\tG,\n\n\tH,\n\n\tI,\n\n\tJ,\n\n\tK,\n\n\tL,\n\n\tM,\n\n\tN,\n\n\tO,\n\n\tP,\n\n\tQ,\n", "file_path": "src/terminal/input.rs", "rank": 81, "score": 43.033601880463465 }, { "content": "\tBackslash,\n\n\t/// Third-row `;/:` key.\n\n\tSemicolon,\n\n\t/// Third-row `'/\"` key.\n\n\tApostrophe,\n\n\t/// Fourth-row `,/<` key.\n\n\tComma,\n\n\t/// Fourth-row `./>` key.\n\n\tPeriod,\n\n\t/// Fourth-row `//?` key.\n\n\tSlash,\n\n\tF1,\n\n\tF2,\n\n\tF3,\n\n\tF4,\n\n\tF5,\n\n\tF6,\n\n\tF7,\n\n\tF8,\n\n\tF9,\n", "file_path": "src/terminal/input.rs", "rank": 82, "score": 39.972029069299396 }, { "content": "\t\tKeyCode::Backspace => ffi::TK_BACKSPACE,\n\n\t\tKeyCode::Tab => ffi::TK_TAB,\n\n\t\tKeyCode::Space => ffi::TK_SPACE,\n\n\t\tKeyCode::Minus => ffi::TK_MINUS,\n\n\t\tKeyCode::Equals => ffi::TK_EQUALS,\n\n\t\tKeyCode::LeftBracket => ffi::TK_LBRACKET,\n\n\t\tKeyCode::RightBracket => ffi::TK_RBRACKET,\n\n\t\tKeyCode::Backslash => ffi::TK_BACKSLASH,\n\n\t\tKeyCode::Semicolon => ffi::TK_SEMICOLON,\n\n\t\tKeyCode::Apostrophe => ffi::TK_APOSTROPHE,\n\n\t\tKeyCode::Grave => ffi::TK_GRAVE,\n\n\t\tKeyCode::Comma => ffi::TK_COMMA,\n\n\t\tKeyCode::Period => ffi::TK_PERIOD,\n\n\t\tKeyCode::Slash => ffi::TK_SLASH,\n\n\t\tKeyCode::F1 => ffi::TK_F1,\n\n\t\tKeyCode::F2 => ffi::TK_F2,\n\n\t\tKeyCode::F3 => ffi::TK_F3,\n\n\t\tKeyCode::F4 => ffi::TK_F4,\n\n\t\tKeyCode::F5 => ffi::TK_F5,\n\n\t\tKeyCode::F6 => ffi::TK_F6,\n", "file_path": "src/terminal/state/mod.rs", "rank": 83, "score": 39.75317358604201 }, { "content": "\t\tffi::TK_BACKSPACE => Some(KeyCode::Backspace),\n\n\t\tffi::TK_TAB => Some(KeyCode::Tab),\n\n\t\tffi::TK_SPACE => Some(KeyCode::Space),\n\n\t\tffi::TK_MINUS => Some(KeyCode::Minus),\n\n\t\tffi::TK_EQUALS => Some(KeyCode::Equals),\n\n\t\tffi::TK_LBRACKET => Some(KeyCode::LeftBracket),\n\n\t\tffi::TK_RBRACKET => Some(KeyCode::RightBracket),\n\n\t\tffi::TK_BACKSLASH => Some(KeyCode::Backslash),\n\n\t\tffi::TK_SEMICOLON => Some(KeyCode::Semicolon),\n\n\t\tffi::TK_APOSTROPHE => Some(KeyCode::Apostrophe),\n\n\t\tffi::TK_GRAVE => Some(KeyCode::Grave),\n\n\t\tffi::TK_COMMA => Some(KeyCode::Comma),\n\n\t\tffi::TK_PERIOD => Some(KeyCode::Period),\n\n\t\tffi::TK_SLASH => Some(KeyCode::Slash),\n\n\t\tffi::TK_F1 => Some(KeyCode::F1),\n\n\t\tffi::TK_F2 => Some(KeyCode::F2),\n\n\t\tffi::TK_F3 => Some(KeyCode::F3),\n\n\t\tffi::TK_F4 => Some(KeyCode::F4),\n\n\t\tffi::TK_F5 => Some(KeyCode::F5),\n\n\t\tffi::TK_F6 => Some(KeyCode::F6),\n", "file_path": "src/terminal/mod.rs", "rank": 84, "score": 39.75317358604201 }, { "content": " terminal::clear(None);\n\n \n\n terminal::with_background(Color::from_rgb(32, 32, 32), || {\n\n terminal::clear(Some(frame.clone()));\n\n });\n\n \n\n terminal::print_xy(PADDING_X, 1, \"Use the arrow keys to change alignment.\");\n\n terminal::print_xy(PADDING_X, 2, &format!(\"[color=dark turquoise]Current alignment:[/color] [color=turquoise]H:{:?}, V:{:?}[/color]\", h_align, v_align));\n\n terminal::print_ext(frame, Alignment::new(h_align.clone(), v_align.clone()), lorem_ipsum);\n\n \n\n terminal::refresh();\n\n \n\n if let Some(e) = terminal::wait_event() {\n\n use {HAlign::*, VAlign::*,};\n\n match e {\n\n Event::KeyPressed{ key: KeyCode::Escape, ..} | Event::Close => break,\n\n Event::Resize{width, height} => frame = resize_frame(Size::new(width, height)),\n\n Event::KeyPressed{ key: KeyCode::Left, ..} => h_align = match h_align {\n\n Left => Left, Center => Left, Right => Center,\n\n },\n", "file_path": "examples/text_alignment.rs", "rank": 85, "score": 31.568628690365596 }, { "content": "\t\tKeyCode::F7 => ffi::TK_F7,\n\n\t\tKeyCode::F8 => ffi::TK_F8,\n\n\t\tKeyCode::F9 => ffi::TK_F9,\n\n\t\tKeyCode::F10 => ffi::TK_F10,\n\n\t\tKeyCode::F11 => ffi::TK_F11,\n\n\t\tKeyCode::F12 => ffi::TK_F12,\n\n\t\tKeyCode::Pause => ffi::TK_PAUSE,\n\n\t\tKeyCode::Insert => ffi::TK_INSERT,\n\n\t\tKeyCode::Home => ffi::TK_HOME,\n\n\t\tKeyCode::PageUp => ffi::TK_PAGEUP,\n\n\t\tKeyCode::Delete => ffi::TK_DELETE,\n\n\t\tKeyCode::End => ffi::TK_END,\n\n\t\tKeyCode::PageDown => ffi::TK_PAGEDOWN,\n\n\t\tKeyCode::Right => ffi::TK_RIGHT,\n\n\t\tKeyCode::Left => ffi::TK_LEFT,\n\n\t\tKeyCode::Down => ffi::TK_DOWN,\n\n\t\tKeyCode::Up => ffi::TK_UP,\n\n\t\tKeyCode::NumDivide => ffi::TK_KP_DIVIDE,\n\n\t\tKeyCode::NumMultiply => ffi::TK_KP_MULTIPLY,\n\n\t\tKeyCode::NumMinus => ffi::TK_KP_MINUS,\n", "file_path": "src/terminal/state/mod.rs", "rank": 86, "score": 30.12455882180157 }, { "content": "\t\tffi::TK_F7 => Some(KeyCode::F7),\n\n\t\tffi::TK_F8 => Some(KeyCode::F8),\n\n\t\tffi::TK_F9 => Some(KeyCode::F9),\n\n\t\tffi::TK_F10 => Some(KeyCode::F10),\n\n\t\tffi::TK_F11 => Some(KeyCode::F11),\n\n\t\tffi::TK_F12 => Some(KeyCode::F12),\n\n\t\tffi::TK_PAUSE => Some(KeyCode::Pause),\n\n\t\tffi::TK_INSERT => Some(KeyCode::Insert),\n\n\t\tffi::TK_HOME => Some(KeyCode::Home),\n\n\t\tffi::TK_PAGEUP => Some(KeyCode::PageUp),\n\n\t\tffi::TK_DELETE => Some(KeyCode::Delete),\n\n\t\tffi::TK_END => Some(KeyCode::End),\n\n\t\tffi::TK_PAGEDOWN => Some(KeyCode::PageDown),\n\n\t\tffi::TK_RIGHT => Some(KeyCode::Right),\n\n\t\tffi::TK_LEFT => Some(KeyCode::Left),\n\n\t\tffi::TK_DOWN => Some(KeyCode::Down),\n\n\t\tffi::TK_UP => Some(KeyCode::Up),\n\n\t\tffi::TK_KP_DIVIDE => Some(KeyCode::NumDivide),\n\n\t\tffi::TK_KP_MULTIPLY => Some(KeyCode::NumMultiply),\n\n\t\tffi::TK_KP_MINUS => Some(KeyCode::NumMinus),\n", "file_path": "src/terminal/mod.rs", "rank": 87, "score": 30.12455882180157 }, { "content": "\tR,\n\n\tS,\n\n\tT,\n\n\tU,\n\n\tV,\n\n\tW,\n\n\tX,\n\n\tY,\n\n\tZ,\n\n\t/// Top-row `1/!` key.\n\n\tRow1,\n\n\t/// Top-row `2/@` key.\n\n\tRow2,\n\n\t/// Top-row `3/#` key.\n\n\tRow3,\n\n\t/// Top-row `4/$` key.\n\n\tRow4,\n\n\t/// Top-row `5/%` key.\n\n\tRow5,\n\n\t/// Top-row `6/^` key.\n", "file_path": "src/terminal/input.rs", "rank": 88, "score": 29.778334241687645 }, { "content": "\t\tffi::TK_S => Some(KeyCode::S),\n\n\t\tffi::TK_T => Some(KeyCode::T),\n\n\t\tffi::TK_U => Some(KeyCode::U),\n\n\t\tffi::TK_V => Some(KeyCode::V),\n\n\t\tffi::TK_W => Some(KeyCode::W),\n\n\t\tffi::TK_X => Some(KeyCode::X),\n\n\t\tffi::TK_Y => Some(KeyCode::Y),\n\n\t\tffi::TK_Z => Some(KeyCode::Z),\n\n\t\tffi::TK_1 => Some(KeyCode::Row1),\n\n\t\tffi::TK_2 => Some(KeyCode::Row2),\n\n\t\tffi::TK_3 => Some(KeyCode::Row3),\n\n\t\tffi::TK_4 => Some(KeyCode::Row4),\n\n\t\tffi::TK_5 => Some(KeyCode::Row5),\n\n\t\tffi::TK_6 => Some(KeyCode::Row6),\n\n\t\tffi::TK_7 => Some(KeyCode::Row7),\n\n\t\tffi::TK_8 => Some(KeyCode::Row8),\n\n\t\tffi::TK_9 => Some(KeyCode::Row9),\n\n\t\tffi::TK_0 => Some(KeyCode::Row0),\n\n\t\tffi::TK_ENTER => Some(KeyCode::Enter),\n\n\t\tffi::TK_ESCAPE => Some(KeyCode::Escape),\n", "file_path": "src/terminal/mod.rs", "rank": 89, "score": 27.873529304815566 }, { "content": "\t\tKeyCode::S => ffi::TK_S,\n\n\t\tKeyCode::T => ffi::TK_T,\n\n\t\tKeyCode::U => ffi::TK_U,\n\n\t\tKeyCode::V => ffi::TK_V,\n\n\t\tKeyCode::W => ffi::TK_W,\n\n\t\tKeyCode::X => ffi::TK_X,\n\n\t\tKeyCode::Y => ffi::TK_Y,\n\n\t\tKeyCode::Z => ffi::TK_Z,\n\n\t\tKeyCode::Row1 => ffi::TK_1,\n\n\t\tKeyCode::Row2 => ffi::TK_2,\n\n\t\tKeyCode::Row3 => ffi::TK_3,\n\n\t\tKeyCode::Row4 => ffi::TK_4,\n\n\t\tKeyCode::Row5 => ffi::TK_5,\n\n\t\tKeyCode::Row6 => ffi::TK_6,\n\n\t\tKeyCode::Row7 => ffi::TK_7,\n\n\t\tKeyCode::Row8 => ffi::TK_8,\n\n\t\tKeyCode::Row9 => ffi::TK_9,\n\n\t\tKeyCode::Row0 => ffi::TK_0,\n\n\t\tKeyCode::Enter => ffi::TK_ENTER,\n\n\t\tKeyCode::Escape => ffi::TK_ESCAPE,\n", "file_path": "src/terminal/state/mod.rs", "rank": 90, "score": 27.873529304815563 }, { "content": "\t\tSize{\n\n\t\t\twidth: width,\n\n\t\t\theight: height,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl fmt::Display for Size {\n\n\tfn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n\t\twrite!(formatter, \"{}x{}\", self.width, self.height)\n\n\t}\n\n}\n\n\n\n\n\n/// A rectangle, described by its four corners and a size.\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub struct Rect {\n\n\t/// The top-left corner.\n\n\tpub top_left: Point,\n\n\t/// The top-right corner.\n", "file_path": "src/geometry.rs", "rank": 91, "score": 26.824750402910556 }, { "content": "\tpub fn icon<T: AsRef<Path>>(mut self, icon: T) -> Self {self.icon = Some(icon.as_ref().to_str().unwrap().to_string()); self}\n\n\n\n\t/// Whether the terminal window should be resizeable.\n\n\t///\n\n\t/// Default: `false`.\n\n\tpub fn resizeable (mut self, resizeable: bool) -> Self {self.resizeable = Some(resizeable) ; self}\n\n\n\n\t/// Whether to enforce fullscreen mode.\n\n\t///\n\n\t/// Default: `false`.\n\n\tpub fn fullscreen (mut self, fullscreen: bool) -> Self {self.fullscreen = Some(fullscreen) ; self}\n\n}\n\n\n\nimpl Input {\n\n\t/// Construct an `input` [configuration](http://foo.wyrd.name/en:bearlibterminal:reference:configuration#library_configuration) section override segment\n\n\t/// with all elements equal to `None`.\n\n\tpub fn empty() -> Input {\n\n\t\tInput{\n\n\t\t\tprecise_mouse: None,\n\n\t\t\tmouse_cursor: None,\n", "file_path": "src/terminal/config/section.rs", "rank": 92, "score": 26.141555584386115 }, { "content": "\t\tmatch event {\n\n\t\t\tEvent::Resize{width, height} => {\n\n\t\t\t\tterminal::print_xy(0, 0, &*&format!(\"Width: {}\\nHeight: {}\", width, height));\n\n\t\t\t\tterminal::refresh();\n\n\t\t\t},\n\n\t\t\tEvent::Close | Event::KeyPressed{key: KeyCode::Escape, ctrl: _, shift: _} => break,\n\n\t\t\t_ => (),\n\n\t\t}\n\n\t}\n\n\tterminal::close();\n\n}\n", "file_path": "examples/simple.rs", "rank": 93, "score": 24.82717249138051 }, { "content": "\tMouseMiddle,\n\n\tMouseFourth,\n\n\tMouseFifth,\n\n}\n\n\n\n/// A single input event.\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub enum Event {\n\n\t/// Terminal window closed.\n\n\tClose,\n\n\t/// Terminal window resized. Needs to have `window.resizeable = true` to occur.\n\n\t///\n\n\t/// Note, that, as of [`40e6253`](https://bitbucket.org/cfyzium/bearlibterminal/commits/40e625311f0cccc43b94633add4dec0d6b77c2b7),\n\n\t/// the terminal window is cleared when resized.\n\n\tResize{\n\n\t\t/// Width the terminal was resized to.\n\n\t\twidth: i32,\n\n\t\t/// Heigth the terminal was resized to.\n\n\t\theight: i32,\n\n\t},\n", "file_path": "src/terminal/input.rs", "rank": 94, "score": 24.518052479962613 }, { "content": "\tRow6,\n\n\t/// Top-row `7/&` key.\n\n\tRow7,\n\n\t/// Top-row `8/*` key.\n\n\tRow8,\n\n\t/// Top-row `9/(` key.\n\n\tRow9,\n\n\t/// Top-row `0/)` key.\n\n\tRow0,\n\n\t/// Top-row &#96;/~ key.\n\n\tGrave,\n\n\t/// Top-row `-/_` key.\n\n\tMinus,\n\n\t/// Top-row `=/+` key.\n\n\tEquals,\n\n\t/// Second-row `[/{` key.\n\n\tLeftBracket,\n\n\t/// Second-row `]/}` key.\n\n\tRightBracket,\n\n\t/// Second-row `\\/|` key.\n", "file_path": "src/terminal/input.rs", "rank": 95, "score": 22.913086044964817 }, { "content": "use bear_lib_terminal_sys as ffi;\n\n\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum HAlign {\n\n Left,\n\n Center,\n\n Right,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum VAlign {\n\n Top,\n\n Middle,\n\n Bottom,\n\n}\n\n\n\npub struct Alignment{\n\n h: HAlign,\n\n v: VAlign,\n\n}\n", "file_path": "src/terminal/alignment.rs", "rank": 96, "score": 22.329673109947606 }, { "content": "\n\n\t/// Tile alignment area \\[cells\\].\n\n\t///\n\n\t/// Default: `1x1`.\n\n\tpub fn spacing (mut self, spacing: Size) -> Self {self.spacing = Some(spacing) ; self}\n\n}\n\n\n\n\n\nimpl ConfigPart for Bitmap {\n\n\tfn to_config_str(&self) -> String {\n\n\t\tformat!(\"{}: {}{}{}{}{}{}{}{}{};\", self.origin, escape_config_string(&self.path),\n\n\t\t\tmatch self.size {\n\n\t\t\t\tNone => \"\".to_string(),\n\n\t\t\t\tSome(ref size) => format!(\", size={}\", size),\n\n\t\t\t},\n\n\t\t\tmatch self.resize {\n\n\t\t\t\tNone => \"\".to_string(),\n\n\t\t\t\tSome(ref resize) => format!(\", resize={}\", resize),\n\n\t\t\t},\n\n\t\t\tmatch self.resize_filter {\n", "file_path": "src/terminal/config/font.rs", "rank": 97, "score": 22.145645984112182 }, { "content": "pub struct Window {\n\n\tsize: Option<Size>,\n\n\tcellsize: Option<Cellsize>,\n\n\ttitle: Option<String>,\n\n\ticon: Option<String>,\n\n\tresizeable: Option<bool>,\n\n\tfullscreen: Option<bool>,\n\n}\n\n\n\n/// The `input` [configuration](http://foo.wyrd.name/en:bearlibterminal:reference:configuration#library_configuration) section repr.\n\n///\n\n/// `None` values will not override current ones.\n\n///\n\n/// See [`terminal::set()`](../fn.set.html).\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\npub struct Input {\n\n\tprecise_mouse: Option<bool>,\n\n\tmouse_cursor: Option<bool>,\n\n\tcursor_symbol: Option<char>,\n\n\tcursor_blink_rate: Option<i32>,\n", "file_path": "src/terminal/config/section.rs", "rank": 98, "score": 21.880214575260016 }, { "content": "}\n\n\n\n/// How to aspect-change when resizing a bitmap.\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum ResizeMode {\n\n\tStretch,\n\n\tFit,\n\n\tCrop,\n\n}\n\n\n\n/// Per-tileset tile alignment.\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum Align {\n\n\tCenter,\n\n\tTopLeft,\n\n\tBottomLeft,\n\n\tTopRight,\n\n\tBottomRight,\n\n}\n\n\n", "file_path": "src/terminal/config/font.rs", "rank": 99, "score": 21.156080938588328 } ]
Rust
src/main.rs
mkb2091/msolve
7ccdda6626ada1cc3abdf0e1abb7cd32f3618e4c
#[cfg(feature = "cli")] mod cli { #[cfg(not(feature = "std"))] compile_error!("`std` feature is required for cli"); use std::io::BufRead; use std::io::Write; use clap::Clap; #[derive(Clap, Copy, Clone)] #[clap(version = "1.0")] struct Opts { #[clap(short, long)] verify_uniqueness: bool, #[clap(short, long)] count_steps: bool, #[clap(subcommand)] mode: Mode, } #[derive(Clap, Copy, Clone)] enum Mode { Solve(Solve), Select(Select), Difficulty, CountSolutions(CountSolutions), #[cfg(feature = "generate")] Generate(Generate), ListTechniques, Info, } #[derive(Clap, Copy, Clone)] struct Solve { #[clap(short, long, default_value = "1")] count: usize, } #[derive(Clap, Copy, Clone)] struct Select { #[clap(short, long)] invert: bool, } #[derive(Clap, Copy, Clone)] struct CountSolutions { n: usize, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct Generate { #[clap(subcommand)] mode: GenerateMode, #[clap(short, long)] display_score: bool, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] enum GenerateMode { Once(GenerateOnce), Continuous(GenerateContinuous), } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct GenerateOnce { cells_to_remove: usize, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct GenerateContinuous { #[clap(short, long)] n: Option<std::num::NonZeroUsize>, } fn score_sudoku(sudoku: &msolve::Sudoku, opts: &Opts) -> Option<i32> { sudoku.difficulty(opts.count_steps) } pub fn main() { let opts: Opts = Opts::parse(); let stdin = std::io::stdin(); let mut input = stdin.lock(); let mut buffer = String::with_capacity(82); let stdout = std::io::stdout(); let mut output_handle = stdout.lock(); let mut info = [0; 3]; #[cfg(feature = "rand")] let mut rng = rand::thread_rng(); #[cfg(feature = "generate")] if let Mode::Generate(generate) = opts.mode { if let GenerateMode::Continuous(continuous) = generate.mode { let n = continuous.n.map(|n| n.get()).unwrap_or(0); let mut counter = 0; for (sudoku, score) in msolve::Sudoku::generate(rand::thread_rng(), opts.count_steps) { if generate.display_score { let _ = output_handle.write_all(&score.to_string().as_bytes()); let _ = output_handle.write_all(b";"); } let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); if n != 0 { counter += 1; if counter >= n { return; } } } } } while let Ok(result) = input.read_line(&mut buffer) { if result == 0 { break; } let sudoku = buffer.parse::<msolve::Sudoku>().unwrap(); match opts.mode { Mode::Solve(solve) => { if opts.verify_uniqueness { if let Some(solution) = sudoku.solve_unique() { let _ = output_handle.write_all(&solution.to_bytes()); let _ = output_handle.write_all(b"\n"); } } else { for solution in sudoku.iter().take(solve.count) { let _ = output_handle.write_all(&solution.to_bytes()); let _ = output_handle.write_all(b"\n"); } } } Mode::Select(select) => { let mut does_match = if opts.verify_uniqueness { sudoku.has_single_solution() } else { sudoku.has_solution() }; if select.invert { does_match = !does_match; } if does_match { let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } } Mode::Difficulty => { if let Some(difficulty) = score_sudoku(&sudoku, &opts) { let _ = output_handle.write_all(&difficulty.to_string().as_bytes()); let _ = output_handle.write_all(b";"); let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } } Mode::CountSolutions(n) => { let count = sudoku.count_solutions(n.n); let _ = output_handle.write_all(&count.to_string().as_bytes()); let _ = output_handle.write_all(b";"); let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } #[cfg(feature = "generate")] Mode::Generate(generate) => { if let GenerateMode::Once(once) = generate.mode { let (sudoku, score) = sudoku.generate_from_seed( &mut rng, once.cells_to_remove, opts.count_steps, ); if generate.display_score { let _ = output_handle.write_all(&score.to_string().as_bytes()); let _ = output_handle.write_all(b";"); } let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } else { unimplemented!() } } Mode::ListTechniques => { for (explanation, state) in sudoku.list_techniques().iter() { let _ = output_handle.write_all(&explanation.as_bytes()); let _ = output_handle.write_all(b"\n"); let _ = output_handle.write_all(&state.to_pencilmark_bytes()); let _ = output_handle.write_all(b"\n"); std::thread::sleep(std::time::Duration::from_secs(1)); } } Mode::Info => { info[sudoku.count_solutions(2)] += 1; } } buffer.clear(); } if let Mode::Info = opts.mode { println!( "0 Solutions: {}, 1 Solution: {}, 2+ Solutions: {}", info[0], info[1], info[2] ); } } } fn main() { #[cfg(feature = "cli")] cli::main() }
#[cfg(feature = "cli")] mod cli { #[cfg(not(feature = "std"))] compile_error!("`std` feature is required for cli"); use std::io::BufRead; use std::io::Write; use clap::Clap; #[derive(Clap, Copy, Clone)] #[clap(version = "1.0")] struct Opts { #[clap(short, long)] verify_uniqueness: bool, #[clap(short, long)] count_steps: bool, #[clap(subcommand)] mode: Mode, } #[derive(Clap, Copy, Clone)] enum Mode { Solve(Solve), Select(Select), Difficulty, CountSolutions(CountSolutions), #[cfg(feature = "generate")] Generate(Generate), ListTechniques, Info, } #[derive(Clap, Copy, Clone)] struct Solve { #[clap(short, long, default_value = "1")] count: usize, } #[derive(Clap, Copy, Clone)] struct Select { #[clap(short, long)] invert: bool, } #[derive(Clap, Copy, Clone)] struct CountSolutions { n: usize, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct Generate { #[clap(subcommand)] mode: GenerateMode, #[clap(short, long)] display_score: bool, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] enum GenerateMode { Once(GenerateOnce), Continuous(GenerateContinuous), } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct GenerateOnce { cells_to_remove: usize, } #[cfg(feature = "generate")] #[derive(Clap, Copy, Clone)] struct GenerateContinuous { #[clap(short, long)] n: Option<std::num::NonZeroUsize>, } fn score_sudoku(sudoku: &msolve::Sudoku, opts: &Opts) -> Option<i32> { sudoku.difficulty(opts.count_steps) } pub fn main() { let opts: Opts = Opts::parse(); let stdin = std::io::stdin(); let mut input = stdin.lock(); let mut buffer = String::with_capacity(82); let stdout = std::io::stdout(); let mut output_handle = stdout.
} fn main() { #[cfg(feature = "cli")] cli::main() }
lock(); let mut info = [0; 3]; #[cfg(feature = "rand")] let mut rng = rand::thread_rng(); #[cfg(feature = "generate")] if let Mode::Generate(generate) = opts.mode { if let GenerateMode::Continuous(continuous) = generate.mode { let n = continuous.n.map(|n| n.get()).unwrap_or(0); let mut counter = 0; for (sudoku, score) in msolve::Sudoku::generate(rand::thread_rng(), opts.count_steps) { if generate.display_score { let _ = output_handle.write_all(&score.to_string().as_bytes()); let _ = output_handle.write_all(b";"); } let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); if n != 0 { counter += 1; if counter >= n { return; } } } } } while let Ok(result) = input.read_line(&mut buffer) { if result == 0 { break; } let sudoku = buffer.parse::<msolve::Sudoku>().unwrap(); match opts.mode { Mode::Solve(solve) => { if opts.verify_uniqueness { if let Some(solution) = sudoku.solve_unique() { let _ = output_handle.write_all(&solution.to_bytes()); let _ = output_handle.write_all(b"\n"); } } else { for solution in sudoku.iter().take(solve.count) { let _ = output_handle.write_all(&solution.to_bytes()); let _ = output_handle.write_all(b"\n"); } } } Mode::Select(select) => { let mut does_match = if opts.verify_uniqueness { sudoku.has_single_solution() } else { sudoku.has_solution() }; if select.invert { does_match = !does_match; } if does_match { let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } } Mode::Difficulty => { if let Some(difficulty) = score_sudoku(&sudoku, &opts) { let _ = output_handle.write_all(&difficulty.to_string().as_bytes()); let _ = output_handle.write_all(b";"); let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } } Mode::CountSolutions(n) => { let count = sudoku.count_solutions(n.n); let _ = output_handle.write_all(&count.to_string().as_bytes()); let _ = output_handle.write_all(b";"); let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } #[cfg(feature = "generate")] Mode::Generate(generate) => { if let GenerateMode::Once(once) = generate.mode { let (sudoku, score) = sudoku.generate_from_seed( &mut rng, once.cells_to_remove, opts.count_steps, ); if generate.display_score { let _ = output_handle.write_all(&score.to_string().as_bytes()); let _ = output_handle.write_all(b";"); } let _ = output_handle.write_all(&sudoku.to_bytes()); let _ = output_handle.write_all(b"\n"); } else { unimplemented!() } } Mode::ListTechniques => { for (explanation, state) in sudoku.list_techniques().iter() { let _ = output_handle.write_all(&explanation.as_bytes()); let _ = output_handle.write_all(b"\n"); let _ = output_handle.write_all(&state.to_pencilmark_bytes()); let _ = output_handle.write_all(b"\n"); std::thread::sleep(std::time::Duration::from_secs(1)); } } Mode::Info => { info[sudoku.count_solutions(2)] += 1; } } buffer.clear(); } if let Mode::Info = opts.mode { println!( "0 Solutions: {}, 1 Solution: {}, 2+ Solutions: {}", info[0], info[1], info[2] ); } }
function_block-function_prefixed
[ { "content": "fn bench_solving(sudoku: Option<&String>, solver: Solver, mode: Mode) -> usize {\n\n let solution_count = match mode {\n\n Mode::SolveOne => 1,\n\n Mode::SolveUnique => 2,\n\n };\n\n match solver {\n\n Solver::MSolve => {\n\n if let Ok(sudoku) = msolve::Sudoku::from_str(sudoku.unwrap()) {\n\n sudoku.count_solutions(solution_count)\n\n } else {\n\n 0\n\n }\n\n }\n\n Solver::RustSudoku => {\n\n if let Ok(sudoku) = sudoku::Sudoku::from_str_line(sudoku.unwrap()) {\n\n sudoku.count_at_most(solution_count)\n\n } else {\n\n 0\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 1, "score": 78227.98467985669 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Mode {\n\n SolveOne,\n\n SolveUnique,\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 2, "score": 74758.97771958691 }, { "content": "fn generate<T>(rng: &mut T, count_steps: bool) -> (Sudoku, SudokuScore)\n\nwhere\n\n T: rand::Rng + rand_core::RngCore,\n\n{\n\n let mut sudoku = Sudoku::empty();\n\n let cell_distribution = rand::distributions::Uniform::new(0, 81);\n\n while (sudoku.solved_squares & consts::SOLVED_SUDOKU) != consts::SOLVED_SUDOKU {\n\n let index = cell_distribution.sample(rng);\n\n if sudoku.solved_squares & (1 << index) != 0 {\n\n continue;\n\n }\n\n let mut temp = sudoku;\n\n let mut value = temp.cells[index];\n\n debug_assert_ne!(value, 0);\n\n let chosen_value_index = rng.gen_range(0, value.count_ones());\n\n let mut i = get_last_digit!(value, usize);\n\n for _ in 0..chosen_value_index {\n\n i = get_last_digit!(value, usize);\n\n }\n\n temp.cells[index] = 1 << i;\n", "file_path": "src/gen.rs", "rank": 3, "score": 72406.95751629698 }, { "content": "pub fn minimise(\n\n sudoku: Sudoku,\n\n old_score: Option<SudokuScore>,\n\n count_steps: bool,\n\n) -> Option<(Sudoku, SudokuScore)> {\n\n let mut old: Option<(Sudoku, SudokuScore)> = old_score\n\n .or_else(|| sudoku.difficulty(count_steps))\n\n .map(|old_score| (sudoku, old_score));\n\n let mut changed = false;\n\n let mut removable: u128 = u128::MAX;\n\n while let Some((old_sudoku, old_score)) = old {\n\n let mut best_score = old_score;\n\n let mut best_sudoku: Option<Sudoku> = None;\n\n let mut temp = old_sudoku.solved_squares & consts::SOLVED_SUDOKU & removable;\n\n let mut array = old_sudoku.to_array();\n\n while temp != 0 {\n\n let square = get_last_digit!(temp, usize);\n\n let old_value = array[square];\n\n debug_assert_ne!(old_value, 0);\n\n array[square] = 0;\n", "file_path": "src/gen.rs", "rank": 4, "score": 65457.12705687442 }, { "content": "pub fn generate_from_seed<T>(\n\n sudoku: Sudoku,\n\n rng: &mut T,\n\n cells_to_remove: usize,\n\n count_steps: bool,\n\n) -> (Sudoku, SudokuScore)\n\nwhere\n\n T: rand::Rng + rand_core::RngCore,\n\n{\n\n let mut array = sudoku.to_array();\n\n let mut solved_squares = sudoku.solved_squares & consts::SOLVED_SUDOKU;\n\n let desired_solved_count = solved_squares\n\n .count_ones()\n\n .saturating_sub(cells_to_remove as u32);\n\n while solved_squares.count_ones() > desired_solved_count || !Sudoku::from(array).has_solution()\n\n {\n\n let solved_index = rng.gen_range(0, solved_squares.count_ones() as usize);\n\n let mut temp = solved_squares;\n\n let mut i = get_last_digit!(temp, usize);\n\n for _ in 0..solved_index {\n", "file_path": "src/gen.rs", "rank": 5, "score": 58056.85189321442 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let paths = [\n\n (\"top2365\", true), // http://magictour.free.fr/top2365\n\n (\"sudoku17\", true), // https://staffhome.ecm.uwa.edu.au/~00013890/sudoku17\n\n (\"kaggle.txt\", true), // https://www.kaggle.com/bryanpark/sudoku\n\n (\"gen_puzzles\", true), // http://www.enjoysudoku.com/gen_puzzles.zip\n\n (\"forum_hardest_1905\", true), // http://forum.enjoysudoku.com/the-hardest-sudokus-new-thread-t6539-600.html#p277835\n\n (\"hardest_to_solve\", false), // Top 1000 hardest to solve for msolve from forum_hardest_1905\n\n (\"hardest_to_verify\", false), // Top 1000 hardest to solve unique for msolve from forum_hardest_1905\n\n (\"most_difficult\", false), // Top 100 hardest sudokus to verify across all lists and generated ones\n\n (\"serg_benchmark\", true), // http://sites.google.com/site/sergsudoku/benchmark.zip\n\n ];\n\n\n\n for (path, shuffle) in paths.iter() {\n\n let file_in =\n\n std::fs::File::open(format!(\"bench_sudokus/{}\", path)).expect(\"Failed to open file\");\n\n let mut buf = std::io::BufReader::new(file_in);\n\n\n\n let mut sudokus = Vec::<String>::new();\n\n let mut line = String::with_capacity(81);\n", "file_path": "benches/benchmarks.rs", "rank": 6, "score": 56311.80218935014 }, { "content": "#[derive(Default)]\n\nstruct DifficultyRecording {\n\n step_count: usize,\n\n apply_number_count: usize,\n\n scan_count: usize,\n\n hidden_single_count: usize,\n\n}\n\n\n\nimpl solution_iterator::TechniqueRecording for DifficultyRecording {\n\n type Output = usize;\n\n fn record_step(&mut self, _: &Sudoku) {\n\n self.step_count += 1;\n\n }\n\n fn record_apply_number(&mut self, _: usize, _: &Sudoku) {\n\n self.apply_number_count += 1;\n\n }\n\n fn record_scan(&mut self, _: &Sudoku) {\n\n self.scan_count += 1;\n\n }\n\n fn record_hidden_single(&mut self, _: usize, _: &Sudoku) {\n\n self.hidden_single_count += 1;\n\n }\n\n fn get_recording(&self) -> usize {\n\n self.step_count + self.apply_number_count + self.scan_count + self.hidden_single_count\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 56291.88980730713 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Solver {\n\n MSolve,\n\n RustSudoku,\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 8, "score": 37382.466561491994 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[derive(Default)]\n\nstruct FullRecording {\n\n techniques: Vec<(String, Sudoku)>,\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nimpl solution_iterator::TechniqueRecording for FullRecording {\n\n type Output = Vec<(String, Sudoku)>;\n\n fn record_step(&mut self, _: &Sudoku) {}\n\n fn record_apply_number(&mut self, square: usize, state: &Sudoku) {\n\n let mut explanation = \"Found naked single: R\".to_string();\n\n explanation.push(b\"123456789\"[square / 9] as char);\n\n explanation.push('C');\n\n explanation.push(b\"123456789\"[square % 9] as char);\n\n self.techniques.push((explanation, *state))\n\n }\n\n fn record_scan(&mut self, state: &Sudoku) {\n\n self.techniques.push((\"Scanned\".to_string(), *state))\n\n }\n\n fn record_hidden_single(&mut self, square: usize, state: &Sudoku) {\n\n let mut explanation = \"Found hidden single: R\".to_string();\n", "file_path": "src/lib.rs", "rank": 9, "score": 35894.49373196173 }, { "content": "fn mutate(\n\n sudoku: Sudoku,\n\n old_score: Option<SudokuScore>,\n\n count_steps: bool,\n\n) -> Option<(Sudoku, SudokuScore)> {\n\n let old_score = old_score.or_else(|| sudoku.difficulty(count_steps));\n\n if !(count_steps || sudoku.has_single_solution()) {\n\n return None;\n\n }\n\n let mut best: Option<(Sudoku, SudokuScore)> = None;\n\n let mut temp = sudoku.solved_squares & consts::SOLVED_SUDOKU;\n\n let mut array = sudoku.to_array();\n\n while temp != 0 {\n\n let s1 = get_last_digit!(temp, usize);\n\n let old = array[s1];\n\n array[s1] = 0;\n\n let mut temp2 = !sudoku.solved_squares & consts::SOLVED_SUDOKU;\n\n while temp2 != 0 {\n\n let s2 = get_last_digit!(temp2, usize);\n\n let mut temp3 = sudoku.cells[s2];\n", "file_path": "src/gen.rs", "rank": 10, "score": 34725.47736806887 }, { "content": "pub trait TechniqueRecording: Default {\n\n fn record_step(&mut self, _: &Sudoku) {}\n\n fn record_apply_number(&mut self, _: usize, _: &Sudoku) {}\n\n fn record_scan(&mut self, _: &Sudoku) {}\n\n fn record_hidden_single(&mut self, _: usize, _: &Sudoku) {}\n\n type Output;\n\n fn get_recording(&self) -> Self::Output;\n\n}\n\n\n\npub struct SolutionIterator<T: TechniqueRecording> {\n\n #[cfg(feature = \"alloc\")]\n\n routes: SudokuBackTrackingVec,\n\n recording: T,\n\n}\n\n\n\nimpl<T> SolutionIterator<T>\n\nwhere\n\n T: TechniqueRecording,\n\n{\n\n pub fn new(mut sudoku: Sudoku) -> Self {\n", "file_path": "src/solution_iterator.rs", "rank": 11, "score": 28889.370732948333 }, { "content": "#[cfg(all(feature = \"smallvec\", feature = \"alloc\"))]\n\ntype SudokuBackTrackingVec = smallvec::SmallVec<[Sudoku; 10]>;\n", "file_path": "src/solution_iterator.rs", "rank": 22, "score": 10543.747738590058 }, { "content": " }\n\n\n\n pub fn solved_cell_count(&self) -> usize {\n\n (self.solved_squares & consts::SOLVED_SUDOKU).count_ones() as usize\n\n }\n\n #[cfg(feature = \"generate\")]\n\n pub fn generate<T>(rng: T, count_steps: bool) -> gen::SudokuGenerator<T>\n\n where\n\n T: rand::Rng + rand_core::RngCore,\n\n {\n\n gen::SudokuGenerator::new(rng, count_steps)\n\n }\n\n #[cfg(feature = \"generate\")]\n\n pub fn generate_from_seed<T>(\n\n self,\n\n rng: &mut T,\n\n cells_to_remove: usize,\n\n count_steps: bool,\n\n ) -> (Self, i32)\n\n where\n", "file_path": "src/lib.rs", "rank": 23, "score": 14.753225621390957 }, { "content": "compile_error!(\"`std` feature is required for rand\");\n\n\n\n#[cfg(all(not(feature = \"alloc\"), feature = \"smallvec\"))]\n\ncompile_error!(\"`std` feature is required for smallvec\");\n\n\n\n#[cfg(not(feature = \"alloc\"))]\n\ncompile_error!(\"`alloc` feature is currently required\");\n\n\n\npub mod solution_iterator;\n\n\n\nuse core::convert::From;\n\nuse core::convert::TryInto;\n\nuse core::str::FromStr;\n\n\n\n#[derive(Default)]\n", "file_path": "src/lib.rs", "rank": 24, "score": 14.149773569128591 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\n#[cfg(all(feature = \"alloc\", not(feature = \"std\")))]\n\nextern crate alloc;\n\n\n\n#[cfg(all(feature = \"alloc\", not(feature = \"std\")))]\n\nuse alloc::vec::Vec;\n\n\n\n#[cfg(all(feature = \"alloc\", not(feature = \"std\")))]\n\nuse alloc::string::*;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nuse core::prelude::v1::*;\n\n\n\nmod consts;\n\n\n\n#[cfg(feature = \"generate\")]\n\npub mod gen;\n\n\n\n#[cfg(all(not(feature = \"std\"), feature = \"rand\"))]\n", "file_path": "src/lib.rs", "rank": 25, "score": 13.119027374177175 }, { "content": "\n\n true\n\n }\n\n #[quickcheck]\n\n fn to_array_returns_inputs_below_10(input: Sudoku) -> bool {\n\n input.data[..] == msolve::Sudoku::from(&input.data).to_array()[..]\n\n }\n\n\n\n #[cfg(feature = \"generate\")]\n\n #[quickcheck]\n\n fn generate_from_seed_has_single_solution(input: Sudoku, n: u8, count_steps: bool) -> bool {\n\n let sudoku = msolve::Sudoku::from(input.data);\n\n sudoku\n\n .generate_from_seed(&mut rand::thread_rng(), n as usize, count_steps)\n\n .0\n\n .has_single_solution()\n\n }\n\n\n\n #[cfg(feature = \"generate\")]\n\n #[quickcheck]\n\n fn generated_has_single_solution(count: u8, count_steps: bool) -> bool {\n\n msolve::Sudoku::generate(rand::thread_rng(), count_steps)\n\n .take(count as usize)\n\n .all(|(sudoku, _)| sudoku.has_single_solution())\n\n }\n\n}\n", "file_path": "tests/tests.rs", "rank": 26, "score": 12.82082741255514 }, { "content": " pub fn difficulty(self, count_steps: bool) -> Option<i32> {\n\n let mut difficulty = -(self.solved_cell_count() as i32);\n\n if count_steps {\n\n let mut iter = solution_iterator::SolutionIterator::<DifficultyRecording>::new(\n\n Self::from(self.to_array()),\n\n );\n\n if iter.next().is_none() || iter.next().is_some() {\n\n return None;\n\n }\n\n difficulty += iter.get_recording() as i32;\n\n }\n\n Some(difficulty)\n\n }\n\n\n\n #[cfg(feature = \"alloc\")]\n\n pub fn list_techniques(self) -> Vec<(String, Sudoku)> {\n\n let mut iter = solution_iterator::SolutionIterator::<FullRecording>::new(self);\n\n iter.next();\n\n iter.next();\n\n iter.get_recording()\n", "file_path": "src/lib.rs", "rank": 27, "score": 12.745875758952094 }, { "content": " pub fn solve_unique(self) -> Option<Self> {\n\n let mut iterator = self.iter();\n\n iterator.next().xor(iterator.next())\n\n }\n\n /**\n\n Counts the number of solutions, up to maximum of n\n\n */\n\n #[inline]\n\n pub fn count_solutions(self, n: usize) -> usize {\n\n self.iter().take(n).count()\n\n }\n\n\n\n /**\n\n Check whether the sudoku has exactly one solution without returning the solution\n\n */\n\n #[inline]\n\n pub fn has_single_solution(self) -> bool {\n\n self.count_solutions(2) == 1\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 12.075602135343988 }, { "content": "# msolve\n\n[![Travis CI](https://api.travis-ci.org/mkb2091/msolve.svg?branch=master)](https://travis-ci.org/github/mkb2091/msolve/builds)\n\n\n\nA WIP sudoku solving library\n\n\n\n# Goals\n\n\n\nImprove performance, potentially via using SIMD, and or GPU\n\n\n\nAdd sudoku difficulty grading, potentially by counting the number of iterations needed to solve\n\n\n\nAdd sudoku generation from a seed sudoku \n\n\n\n# Usage \n\n\n\nTo get the first solution to each sudoku in input: msolve solve < sudokufile.txt > output.txt\n\n\n\nTo get the solution for each uniquely solvable sudoku in input: msolve -u < sudokufile.txt > output.txt\n\n\n\nTo get the first N solutions to each sudoku in input: msolve solve -c N < sudokufile.txt > output.txt\n\n\n\nTo get information about the sudoku file: msolve info < sudokufile.txt\n\n\n\nExample output for info:\n\n 0 Solutions: 486451, 1 Solution: 763, 2+ Solutions: 12786\n\n\n\nTo list all puzzles with a single unique solution: msolve select -v < sudokufile.txt > output.txt\n\n\n\nTo list all puzzles with at least one unique solution: msolve select < sudokufile.txt > output.txt\n\n\n", "file_path": "README.md", "rank": 29, "score": 10.724176384388558 }, { "content": " #[inline]\n\n pub fn has_solution(self) -> bool {\n\n self.count_solutions(1) == 1\n\n }\n\n\n\n /**\n\n Returns an empty sudoku grid, alternative to Sudoku::from([0; 81]) or Sudoku::from(vec![])\n\n */\n\n #[inline]\n\n pub const fn empty() -> Self {\n\n Self {\n\n cells: [consts::SUDOKU_MAX; 81],\n\n solved_squares: 0,\n\n }\n\n }\n\n\n\n /**\n\n Estimates the difficulty\n\n */\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 30, "score": 10.024359892058795 }, { "content": "\n\n/**\n\nStructure that represents a sudoku\n\n*/\n\n#[derive(Copy, Clone)]\n\npub struct Sudoku {\n\n cells: [u16; 81],\n\n solved_squares: u128,\n\n}\n\n\n\nimpl Sudoku {\n\n /**\n\n Remove the value at the chosen square from the set of options of each cell in the sudoku\n\n */\n\n #[inline(always)]\n\n fn apply_number(&mut self, square: usize) {\n\n debug_assert!(square < 81);\n\n if square >= 81 {\n\n unsafe { core::hint::unreachable_unchecked() }\n\n }\n", "file_path": "src/lib.rs", "rank": 31, "score": 9.52053858017267 }, { "content": " }\n\n #[derive(Clone, Debug)]\n\n struct Sudoku {\n\n data: Vec<u8>,\n\n }\n\n impl quickcheck::Arbitrary for Sudoku {\n\n fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Self {\n\n let mut data = Vec::<u8>::with_capacity(81);\n\n for _ in 0..81 {\n\n data.push(u8::arbitrary(g) % 10);\n\n }\n\n Self { data }\n\n }\n\n }\n\n #[quickcheck]\n\n fn random_sudoku_solve(input: Sudoku) -> bool {\n\n let sudoku = msolve::Sudoku::from(input.data);\n\n sudoku.solve_one();\n\n sudoku.to_array();\n\n sudoku.to_bytes();\n", "file_path": "tests/tests.rs", "rank": 32, "score": 9.157203572824933 }, { "content": " assert_eq!(msolve::Sudoku::empty().count_solutions(1000), 1000);\n\n }\n\n\n\n #[quickcheck]\n\n fn random_array_solve(input: Vec<u32>) -> bool {\n\n let sudoku = msolve::Sudoku::from(input);\n\n sudoku.solve_one();\n\n sudoku.to_array();\n\n sudoku.to_bytes();\n\n\n\n true\n\n }\n\n #[quickcheck]\n\n fn random_string_solve(input: String) -> bool {\n\n if let Ok(sudoku) = input.parse::<msolve::Sudoku>() {\n\n sudoku.solve_one();\n\n sudoku.to_array();\n\n sudoku.to_bytes();\n\n }\n\n true\n", "file_path": "tests/tests.rs", "rank": 33, "score": 9.08416782564585 }, { "content": " array[square] = self.cells[square].trailing_zeros() as u8 + 1;\n\n }\n\n array\n\n }\n\n\n\n pub fn to_bytes(&self) -> [u8; 81] {\n\n let mut chars = [b'.'; 81];\n\n let mut temp = self.solved_squares;\n\n while temp != 0 {\n\n let square = get_last_digit!(temp, usize);\n\n if square >= 81 {\n\n break;\n\n }\n\n chars[square] = (b\"123456789\")[self.cells[square].trailing_zeros() as usize];\n\n }\n\n chars\n\n }\n\n\n\n pub fn to_pencilmark_bytes(&self) -> [u8; 1605] {\n\n const INNER_ROW_LENGTH: usize = ((3 * 3 + 2) + 1) * 3 + 6;\n", "file_path": "src/lib.rs", "rank": 34, "score": 8.425245934839474 }, { "content": "use crate::{consts, get_last_digit, Sudoku};\n\n\n\n#[cfg(all(feature = \"alloc\", not(feature = \"std\")))]\n\nuse alloc::vec::Vec;\n\n\n\n#[cfg(all(feature = \"smallvec\", feature = \"alloc\"))]\n", "file_path": "src/solution_iterator.rs", "rank": 35, "score": 8.380758288708265 }, { "content": "#[cfg(test)]\n\nextern crate quickcheck;\n\n#[cfg(test)]\n\n#[macro_use(quickcheck)]\n\nextern crate quickcheck_macros;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn test_file(path: &str) {\n\n use std::io::BufRead;\n\n let file_in = std::fs::File::open(path).expect(\"Failed to open file\");\n\n let mut buf = std::io::BufReader::new(file_in);\n\n let mut line = String::with_capacity(81);\n\n while buf.read_line(&mut line).unwrap() > 0 {\n\n if let Ok(sudoku) = sudoku::Sudoku::from_str_line(&line) {\n\n if let Some(solution) = sudoku.solve_unique() {\n\n assert_eq!(\n\n &solution.to_bytes()[..],\n\n &line\n\n .parse::<msolve::Sudoku>()\n", "file_path": "tests/tests.rs", "rank": 36, "score": 8.258646335977263 }, { "content": "/** Max 9 bit number */\n\npub const SUDOKU_MAX: u16 = (1 << 9) - 1;\n\n\n\npub const SOLVED_SUDOKU: u128 = (1 << 81) - 1;\n\n\n\n/*\n\nAfter solving this many squares, do not use pointing pairs\n\n*/\n\npub const SCANNING_CUTOFF: u32 = 40;\n", "file_path": "src/consts.rs", "rank": 37, "score": 7.821668204069505 }, { "content": " recording: T::default(),\n\n }\n\n }\n\n pub fn get_recording(&self) -> T::Output {\n\n self.recording.get_recording()\n\n }\n\n}\n\n\n\nimpl<T> Iterator for SolutionIterator<T>\n\nwhere\n\n T: TechniqueRecording,\n\n{\n\n type Item = Sudoku;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n #[cfg(feature = \"alloc\")]\n\n 'outer: while let Some(mut state) = self.routes.pop() {\n\n self.recording.record_step(&state);\n\n if state.solved_squares.count_ones() == 81 {\n\n return Some(state);\n\n }\n", "file_path": "src/solution_iterator.rs", "rank": 38, "score": 7.803954104783953 }, { "content": " };\n\n }\n\n }\n\n }\n\n sudokus.shuffle(&mut rand::thread_rng());\n\n let mut sudoku_iter = sudokus.iter().cycle();\n\n for mode in &[Mode::SolveOne, Mode::SolveUnique] {\n\n for solver in &[Solver::MSolve, Solver::RustSudoku] {\n\n c.bench_function(&format!(\"{}_{:?}_{:?}\", path, solver, mode), |b| {\n\n b.iter(|| {\n\n criterion::black_box(bench_solving(sudoku_iter.next(), *solver, *mode));\n\n })\n\n });\n\n }\n\n }\n\n }\n\n let worlds_hardest_sudoku: [u8; 81] = criterion::black_box([\n\n 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 6, 0, 0, 0, 0, 0, 0, 7, 0, 0, 9, 0, 2, 0, 0, 0, 5, 0,\n\n 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 4, 5, 7, 0, 0, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0,\n\n 0, 6, 8, 0, 0, 8, 5, 0, 0, 0, 1, 0, 0, 9, 0, 0, 0, 0, 4, 0, 0,\n", "file_path": "benches/benchmarks.rs", "rank": 39, "score": 7.603288653286823 }, { "content": " // If more than 3 digits can only be in intersection, then there is no solution\n\n }\n\n }\n\n sudoku_check &= temp_total;\n\n }\n\n self.cells = sudoku;\n\n sudoku_check == consts::SUDOKU_MAX\n\n }\n\n\n\n /**\n\n Convert the sudoku into a [u8; 81] containing the numerical form of each solved square\n\n */\n\n pub fn to_array(&self) -> [u8; 81] {\n\n let mut array = [0; 81];\n\n let mut temp = self.solved_squares;\n\n while temp != 0 {\n\n let square = get_last_digit!(temp, usize);\n\n if square >= 81 {\n\n break;\n\n }\n", "file_path": "src/lib.rs", "rank": 40, "score": 7.504326702907149 }, { "content": " let mut temp = sudoku.solved_squares;\n\n let mut valid = true;\n\n while temp != 0 {\n\n let square = get_last_digit!(temp, usize);\n\n if sudoku.cells[square].is_power_of_two() {\n\n sudoku.apply_number(square);\n\n } else {\n\n valid = false;\n\n break;\n\n }\n\n }\n\n #[cfg(feature = \"alloc\")]\n\n let mut routes = SudokuBackTrackingVec::with_capacity(10);\n\n #[cfg(feature = \"alloc\")]\n\n if valid && sudoku.scan() {\n\n routes.push(sudoku);\n\n }\n\n Self {\n\n #[cfg(feature = \"alloc\")]\n\n routes,\n", "file_path": "src/solution_iterator.rs", "rank": 41, "score": 7.491075339731481 }, { "content": " i = get_last_digit!(temp, usize);\n\n }\n\n debug_assert_ne!(array[i], 0);\n\n array[i] = 0;\n\n\n\n solved_squares -= 1 << i;\n\n }\n\n\n\n let mut sudoku = Sudoku::from(array);\n\n sudoku.scan();\n\n let cell_distribution = rand::distributions::Uniform::new(0, 81);\n\n while (sudoku.solved_squares & consts::SOLVED_SUDOKU) != consts::SOLVED_SUDOKU {\n\n let index = cell_distribution.sample(rng);\n\n if sudoku.solved_squares & (1 << index) != 0 {\n\n continue;\n\n }\n\n let mut temp = sudoku;\n\n let mut value = temp.cells[index];\n\n debug_assert_ne!(value, 0);\n\n let chosen_value_index = rng.gen_range(0, value.count_ones());\n", "file_path": "src/gen.rs", "rank": 42, "score": 7.181820830481771 }, { "content": " (\n\n sudoku,\n\n sudoku.difficulty(count_steps).unwrap_or_else(|| {\n\n debug_assert!(false);\n\n i32::MIN\n\n }),\n\n )\n\n })\n\n}\n\n\n\npub struct SudokuGenerator<T>\n\nwhere\n\n T: rand::Rng + rand_core::RngCore,\n\n{\n\n rng: T,\n\n current: Option<(Sudoku, SudokuScore)>,\n\n count_steps: bool,\n\n}\n\n\n\nimpl<T> SudokuGenerator<T>\n", "file_path": "src/gen.rs", "rank": 43, "score": 7.156406180746016 }, { "content": " });\n\n c.bench_function(\"solved_sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::from(&solved_sudoku).solve_one());\n\n })\n\n });\n\n c.bench_function(\"empty_sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::empty().solve_one());\n\n })\n\n });\n\n c.bench_function(\"first 1000 solutions to empty_sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::empty().count_solutions(1000));\n\n })\n\n });\n\n #[cfg(feature = \"generate\")]\n\n for count_steps in [true, false].iter() {\n\n let string = if *count_steps {\n\n \"Counting Steps\"\n", "file_path": "benches/benchmarks.rs", "rank": 44, "score": 6.3882029108104055 }, { "content": " let not_value = !self.cells[square];\n\n for i in &consts::CELLS_TO_CHANGE[square] {\n\n self.cells[*i as usize] &= not_value;\n\n }\n\n\n\n debug_assert_eq!(self.cells[square], !not_value);\n\n self.solved_squares |= 1 << square;\n\n }\n\n\n\n /**\n\n Check what values the row, column and square it is in and compare them\n\n */\n\n fn hidden_singles(&mut self, square: usize) -> Result<bool, ()> {\n\n debug_assert!(square < 81);\n\n if square >= 81 {\n\n unsafe { core::hint::unreachable_unchecked() }\n\n }\n\n let value = self.cells[square];\n\n self.cells[square] = 0;\n\n let row_start = square / 9 * 9;\n", "file_path": "src/lib.rs", "rank": 45, "score": 6.279856563436298 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\nextern crate rand;\n\nextern crate sudoku;\n\n\n\nuse rand::prelude::*;\n\n\n\nuse criterion::Criterion;\n\nuse std::io::BufRead;\n\nuse std::str::FromStr;\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 46, "score": 6.15348720939026 }, { "content": "use crate::*;\n\n\n\npub const fn cells_in_house(square: usize) -> [u8; 20] {\n\n let column_start = square % 9;\n\n let row_start = square - column_start;\n\n let box_start = square / 3 % 3 * 3 + square / 27 * 27;\n\n let mut squares_to_change: u128 = 0;\n\n squares_to_change |= ((1 << 9) - 1) << row_start;\n\n squares_to_change |= (1\n\n + (1 << 9)\n\n + (1 << 18)\n\n + (1 << 27)\n\n + (1 << 36)\n\n + (1 << 45)\n\n + (1 << 54)\n\n + (1 << 63)\n\n + (1 << 72))\n\n << column_start;\n\n squares_to_change |= (0b111 + (0b111 << 9) + (0b111 << 18)) << box_start;\n\n squares_to_change &= !(1 << square);\n", "file_path": "src/consts.rs", "rank": 47, "score": 6.123743318685014 }, { "content": " let mut i = get_last_digit!(value, usize);\n\n for _ in 0..chosen_value_index {\n\n i = get_last_digit!(value, usize);\n\n }\n\n temp.cells[index] = 1 << i;\n\n temp.apply_number(index);\n\n temp.scan();\n\n match temp.count_solutions(2) {\n\n 2 => sudoku = temp,\n\n 1 => {\n\n sudoku = temp;\n\n break;\n\n }\n\n 0 => sudoku.cells[index] -= 1 << i,\n\n _ => {\n\n debug_assert!(false, \"More than 2 returned from count_solutions(2)\");\n\n }\n\n }\n\n }\n\n minimise(sudoku, None, count_steps).unwrap_or_else(|| {\n", "file_path": "src/gen.rs", "rank": 48, "score": 5.849186668227585 }, { "content": " }\n\n}\n\n\n\nimpl PartialEq for Sudoku {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.solved_squares == other.solved_squares && self.cells[..] == other.cells[..]\n\n }\n\n}\n\n\n\nimpl<T: TryInto<u32> + Copy> From<&[T]> for Sudoku {\n\n fn from(sudoku_array: &[T]) -> Self {\n\n Self::import(sudoku_array.iter().map(|x| (*x).try_into().ok()))\n\n }\n\n}\n\nimpl<T: TryInto<u32> + Copy> From<&[T; 81]> for Sudoku {\n\n #[inline]\n\n fn from(sudoku_array: &[T; 81]) -> Self {\n\n Self::from(&sudoku_array[..])\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 49, "score": 5.77558964415625 }, { "content": " {\n\n self.recording.record_apply_number(square, &state);\n\n if state.solved_squares.count_ones() == 80 {\n\n state.solved_squares |= 1 << square;\n\n return Some(state);\n\n }\n\n state.apply_number(square);\n\n } else {\n\n let possible_values = state.cells[square].count_ones();\n\n if possible_values < min.1 {\n\n min = (square, possible_values);\n\n }\n\n }\n\n }\n\n debug_assert!(min.1 <= 9);\n\n if state.solved_squares.count_ones() >= consts::SCANNING_CUTOFF || {\n\n self.recording.record_scan(&state);\n\n state.scan()\n\n } {\n\n let mut value = state.cells[min.0];\n", "file_path": "src/solution_iterator.rs", "rank": 50, "score": 5.772264819719631 }, { "content": " let mut min: (usize, u32) = (0, u32::MAX);\n\n let mut temp = !state.solved_squares;\n\n loop {\n\n let square = get_last_digit!(temp, usize);\n\n if square >= 81 {\n\n break;\n\n }\n\n if state.cells[square] == 0 {\n\n continue 'outer;\n\n }\n\n if state.cells[square].is_power_of_two()\n\n || match state.hidden_singles(square).ok() {\n\n Some(result) => {\n\n if result {\n\n self.recording.record_hidden_single(square, &state);\n\n };\n\n result\n\n }\n\n None => continue 'outer,\n\n }\n", "file_path": "src/solution_iterator.rs", "rank": 51, "score": 5.4527501872505315 }, { "content": " let mut sudoku = self.cells;\n\n let mut sudoku_check = consts::SUDOKU_MAX;\n\n for floor_number in (0..3).map(|x| x * 27) {\n\n let mut only = [0; 9];\n\n let mut intersections = [0_u16; 9]; // Intersection\n\n for i in 0..9 {\n\n intersections[i] = sudoku[floor_number + i * 3]\n\n | sudoku[floor_number + i * 3 + 1]\n\n | sudoku[floor_number + i * 3 + 2];\n\n only[i] = intersections[i] * (intersections[i].count_ones() <= 3) as u16;\n\n }\n\n let (resultant_mask, only) = generate_masks_from_intersections(intersections, only);\n\n\n\n let mut temp_total = 0;\n\n for (i, (row, only_row)) in resultant_mask.iter().zip(only.iter()).enumerate() {\n\n temp_total |= row;\n\n let row =\n\n row & [consts::SUDOKU_MAX, *only_row][(only_row.count_ones() == 3) as usize];\n\n sudoku[floor_number + i * 3] &= row;\n\n sudoku[floor_number + i * 3 + 1] &= row;\n", "file_path": "src/lib.rs", "rank": 52, "score": 5.424340673565299 }, { "content": "#![no_main]\n\nuse libfuzzer_sys::fuzz_target;\n\n\n\nfuzz_target!(|data: [[u8; 27]; 3]| {\n\n let data = unsafe { std::mem::transmute::<[[u8; 27]; 3], [u8; 81]>(data) };\n\n let sudoku = msolve::Sudoku::from(data);\n\n for solution in sudoku.iter().take(2) {\n\n assert!(solution.to_array().iter().all(|x| *x <= 9 && *x != 0));\n\n assert!(solution.to_bytes().iter().all(|x| *x != b'.'));\n\n }\n\n});\n", "file_path": "fuzz/fuzz_targets/fuzz_target_1.rs", "rank": 53, "score": 5.346059831078178 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(current) = self.current {\n\n self.current = mutate(current.0, Some(current.1), self.count_steps);\n\n }\n\n if self.current.is_none() {\n\n self.current = Some(generate(&mut self.rng, self.count_steps));\n\n }\n\n self.current\n\n }\n\n}\n", "file_path": "src/gen.rs", "rank": 54, "score": 5.085217932176352 }, { "content": "where\n\n T: rand::Rng + rand_core::RngCore,\n\n{\n\n pub fn new(rng: T, count_steps: bool) -> Self\n\n where\n\n T: rand::Rng + rand_core::RngCore,\n\n {\n\n SudokuGenerator {\n\n rng,\n\n current: None,\n\n count_steps,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Iterator for SudokuGenerator<T>\n\nwhere\n\n T: rand::Rng + rand_core::RngCore,\n\n{\n\n type Item = (Sudoku, SudokuScore);\n", "file_path": "src/gen.rs", "rank": 55, "score": 5.016408140766467 }, { "content": " } else {\n\n \"Without Counting Steps\"\n\n };\n\n c.bench_function(&format!(\"Generate first {}\", string), |b| {\n\n b.iter(|| {\n\n criterion::black_box(\n\n &msolve::Sudoku::generate(rand::thread_rng(), *count_steps)\n\n .next()\n\n .unwrap(),\n\n );\n\n })\n\n });\n\n let mut generator = msolve::Sudoku::generate(rand::thread_rng(), *count_steps);\n\n c.bench_function(&format!(\"Generate puzzle {}\", string), |b| {\n\n b.iter(|| {\n\n criterion::black_box(&generator.next().unwrap());\n\n })\n\n });\n\n }\n\n}\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/benchmarks.rs", "rank": 56, "score": 4.9646979039816035 }, { "content": " let mut squares_to_change_array = [0; 20];\n\n\n\n let mut i = 0;\n\n while i < 20 {\n\n squares_to_change_array[i] = get_last_digit!(squares_to_change, u8);\n\n i += 1;\n\n }\n\n squares_to_change_array\n\n}\n\n\n\npub const CELLS_TO_CHANGE: [[u8; 20]; 81] = {\n\n let mut data = [[0; 20]; 81];\n\n let mut i = 0;\n\n while i < 81 {\n\n data[i] = cells_in_house(i);\n\n i += 1;\n\n }\n\n data\n\n};\n\n\n", "file_path": "src/consts.rs", "rank": 57, "score": 4.796510174293275 }, { "content": "impl<T: TryInto<u32> + Copy> From<[T; 81]> for Sudoku {\n\n #[inline]\n\n fn from(sudoku_array: [T; 81]) -> Self {\n\n Self::from(&sudoku_array[..])\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nimpl<T: TryInto<u32> + Copy> From<Vec<T>> for Sudoku {\n\n #[inline]\n\n fn from(sudoku_array: Vec<T>) -> Self {\n\n Self::from(&sudoku_array[..])\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nimpl<T: TryInto<u32> + Copy> From<&Vec<T>> for Sudoku {\n\n #[inline]\n\n fn from(sudoku_array: &Vec<T>) -> Self {\n\n Self::from(&sudoku_array[..])\n", "file_path": "src/lib.rs", "rank": 58, "score": 4.533982092298384 }, { "content": " .unwrap()\n\n .solve_unique()\n\n .unwrap()\n\n .to_array()[..]\n\n );\n\n } else if let Ok(msolve_sudoku) = line.parse::<msolve::Sudoku>() {\n\n assert!(msolve_sudoku.solve_unique().is_none());\n\n assert_eq!(\n\n sudoku.count_at_most(100),\n\n msolve_sudoku.count_solutions(100)\n\n );\n\n }\n\n }\n\n line.clear();\n\n }\n\n }\n\n #[test]\n\n fn top2365() {\n\n test_file(\"bench_sudokus/top2365\");\n\n }\n", "file_path": "tests/tests.rs", "rank": 59, "score": 4.474971433637727 }, { "content": " }\n\n }\n\n let (resultant_mask, only) = generate_masks_from_intersections(intersections, only);\n\n\n\n let mut temp_total = 0;\n\n\n\n for column_number in 0..3 {\n\n for layer in 0..3 {\n\n let i = column_number * 3 + layer;\n\n let column = resultant_mask[i];\n\n let only_column = only[i];\n\n temp_total |= column;\n\n let column = column\n\n & [consts::SUDOKU_MAX, only_column]\n\n [(only_column.count_ones() == 3) as usize];\n\n sudoku[tower_number + layer * 27 + column_number] &= column;\n\n sudoku[tower_number + layer * 27 + column_number + 9] &= column;\n\n sudoku[tower_number + layer * 27 + column_number + 18] &= column;\n\n\n\n sudoku_check *= (only_column.count_ones() <= 3) as u16;\n", "file_path": "src/lib.rs", "rank": 60, "score": 4.337053981376052 }, { "content": " temp.apply_number(index);\n\n temp.scan();\n\n match temp.count_solutions(2) {\n\n 2 => sudoku = temp,\n\n 1 => {\n\n sudoku = temp;\n\n break;\n\n }\n\n 0 => sudoku.cells[index] -= 1 << i,\n\n _ => {\n\n debug_assert!(false, \"More than 2 returned from count_solutions(2)\");\n\n }\n\n }\n\n }\n\n minimise(sudoku, None, count_steps).unwrap_or_else(|| {\n\n (\n\n sudoku,\n\n sudoku.difficulty(count_steps).unwrap_or_else(|| {\n\n debug_assert!(false);\n\n i32::MIN\n\n }),\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 61, "score": 4.2907369702276625 }, { "content": " self.cells[square] = value;\n\n Ok(false) // Don't yet know enough information to determine which value it must be\n\n } else if (value & needed).is_power_of_two() {\n\n self.cells[square] = value & needed;\n\n Ok(needed != value) // It can be the value it is needed to be\n\n } else {\n\n Err(()) // It has to be multiple different values, sudoku cannot be solved\n\n }\n\n }\n\n\n\n fn scan(&mut self) -> bool {\n\n fn generate_masks_from_intersections(\n\n isec: [u16; 9],\n\n mut only: [u16; 9],\n\n ) -> ([u16; 9], [u16; 9]) {\n\n only[0] |= isec[0] & !((isec[1] | isec[2]) & (isec[3] | isec[6]));\n\n only[1] |= isec[1] & !((isec[0] | isec[2]) & (isec[4] | isec[7]));\n\n only[2] |= isec[2] & !((isec[0] | isec[1]) & (isec[5] | isec[8]));\n\n\n\n only[3] |= isec[3] & !((isec[4] | isec[5]) & (isec[0] | isec[6]));\n", "file_path": "src/lib.rs", "rank": 62, "score": 4.08619519210554 }, { "content": " T: rand::Rng + rand_core::RngCore,\n\n {\n\n gen::generate_from_seed(self, rng, cells_to_remove, count_steps)\n\n }\n\n\n\n fn import<T: Iterator<Item = Option<u32>>>(square_iterator: T) -> Self {\n\n let mut sudoku = Self::empty();\n\n for (i, int) in square_iterator\n\n .enumerate()\n\n .take(81)\n\n .filter_map(|(i, item)| {\n\n item.filter(|x| *x <= 9)\n\n .and_then(|x| x.checked_sub(1))\n\n .map(|x| (i, x))\n\n })\n\n {\n\n sudoku.cells[i] = 1 << int;\n\n sudoku.solved_squares |= 1 << i;\n\n }\n\n sudoku\n", "file_path": "src/lib.rs", "rank": 63, "score": 3.9813761364245197 }, { "content": " }\n\n /**\n\n Returns an iterator over all solutions\n\n */\n\n #[inline]\n\n pub fn iter(self) -> solution_iterator::QuickSolutionIterator {\n\n solution_iterator::QuickSolutionIterator::new(self)\n\n }\n\n /**\n\n Get the first solution.\n\n */\n\n #[inline]\n\n pub fn solve_one(self) -> Option<Self> {\n\n self.iter().next()\n\n }\n\n\n\n /**\n\n Returns the first solution if it is uniquely solvable, otherwise returns None\n\n */\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 64, "score": 3.9364066632065304 }, { "content": " sudoku[floor_number + i * 3 + 2] &= row;\n\n\n\n sudoku_check *= (only_row.count_ones() <= 3) as u16;\n\n // If more than 3 digits can only be in intersection, then there is no solution\n\n }\n\n sudoku_check &= temp_total;\n\n }\n\n if sudoku_check != consts::SUDOKU_MAX {\n\n return false;\n\n }\n\n for tower_number in (0..3).map(|x| x * 3) {\n\n let mut only = [0; 9];\n\n let mut intersections = [0_u16; 9]; // Intersection\n\n for column in 0..3 {\n\n for layer in 0..3 {\n\n let i = column * 3 + layer;\n\n intersections[i] = sudoku[tower_number + layer * 27 + column]\n\n | sudoku[tower_number + layer * 27 + column + 9]\n\n | sudoku[tower_number + layer * 27 + column + 18];\n\n only[i] = intersections[i] * (intersections[i].count_ones() <= 3) as u16;\n", "file_path": "src/lib.rs", "rank": 65, "score": 3.7249207285420587 }, { "content": " while buf.read_line(&mut line).unwrap() > 0 {\n\n if let Ok(sudoku) = sudoku::Sudoku::from_str_line(&line) {\n\n sudokus.push((&sudoku.to_str_line()).to_string());\n\n }\n\n line.clear();\n\n }\n\n if *shuffle {\n\n for sudoku_string in sudokus.iter_mut() {\n\n let mut sudoku = sudoku::Sudoku::from_str_line(&*sudoku_string).unwrap();\n\n sudoku.shuffle();\n\n *sudoku_string = sudoku.to_string()\n\n }\n\n while sudokus.len() < 50000 {\n\n let len = sudokus.len();\n\n for i in 0..len {\n\n sudokus.push(sudokus[i].clone());\n\n sudokus[i] = {\n\n let mut sudoku = sudoku::Sudoku::from_str_line(&sudokus[i]).unwrap();\n\n sudoku.shuffle();\n\n sudoku.to_string()\n", "file_path": "benches/benchmarks.rs", "rank": 66, "score": 3.6272008634049673 }, { "content": " while value != 0 {\n\n let i = get_last_digit!(value, u16);\n\n let mut new = state;\n\n new.cells[min.0] = 1 << i;\n\n self.recording.record_apply_number(min.0, &state);\n\n new.apply_number(min.0);\n\n self.routes.push(new);\n\n }\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct NoRecording {}\n\n\n\nimpl TechniqueRecording for NoRecording {\n\n type Output = ();\n\n fn get_recording(&self) {}\n\n}\n\n\n\npub type QuickSolutionIterator = SolutionIterator<NoRecording>;\n", "file_path": "src/solution_iterator.rs", "rank": 67, "score": 3.6150910007968187 }, { "content": " let new = Sudoku::from(&array);\n\n if let Some(new_score) = new.difficulty(count_steps) {\n\n if new_score > best_score && (count_steps || new.has_single_solution()) {\n\n best_score = new_score;\n\n best_sudoku = Some(new);\n\n changed = true;\n\n if !count_steps {\n\n array[square] = old_value;\n\n break;\n\n }\n\n }\n\n } else {\n\n removable -= 1 << square;\n\n }\n\n array[square] = old_value;\n\n }\n\n if best_sudoku.is_none() && changed {\n\n debug_assert!(old.is_some());\n\n return old;\n\n }\n\n old = best_sudoku.map(|best_sudoku| (best_sudoku, best_score));\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 68, "score": 3.54657987074564 }, { "content": "#[cfg(all(not(feature = \"smallvec\"), feature = \"alloc\"))]\n\ntype SudokuBackTrackingVec = Vec<Sudoku>;\n\n\n", "file_path": "src/solution_iterator.rs", "rank": 69, "score": 3.3401557881083925 }, { "content": " let index = inner_row_start + column * 4 + column / 3 * 3;\n\n output_grid[index] = (output_digits.0)[(digits & masks.0 != 0) as usize];\n\n output_grid[index + 1] = (output_digits.1)[(digits & masks.1 != 0) as usize];\n\n output_grid[index + 2] = (output_digits.2)[(digits & masks.2 != 0) as usize];\n\n output_grid[index + 3] = b'|';\n\n output_grid[index + 4] = b' ';\n\n output_grid[index + 5] = b' ';\n\n output_grid[index + 6] = b'|';\n\n }\n\n\n\n output_grid[inner_row_start + INNER_ROW_LENGTH - 1] = b'\\n';\n\n }\n\n for (ptr, value) in output_grid[row_start + INNER_ROW_LENGTH * 3..]\n\n .iter_mut()\n\n .zip(FORMAT_ROW.iter())\n\n {\n\n *ptr = *value;\n\n }\n\n }\n\n output_grid\n", "file_path": "src/lib.rs", "rank": 70, "score": 3.3074163385597863 }, { "content": "use crate::*;\n\n\n", "file_path": "src/gen.rs", "rank": 71, "score": 3.1030639632216603 }, { "content": " const OUTER_ROW_LENGTH: usize = INNER_ROW_LENGTH * 4 + 1;\n\n const TOTAL_LENGTH: usize = OUTER_ROW_LENGTH * 9 + INNER_ROW_LENGTH * 2;\n\n const FORMAT_ROW: [u8; 85] = *b\"---+---+---+ +---+---+---+ +---+---+---\\n\\n---+---+---+ +---+---+---+ +---+---+---\\n\";\n\n let mut output_grid = [b'!'; TOTAL_LENGTH]; // '!' makes it easier to spot mistakes\n\n for row in 0..9 {\n\n let row_start = row * OUTER_ROW_LENGTH + row / 3 * (INNER_ROW_LENGTH);\n\n for inner_row in 0..3 {\n\n let inner_row_start = row_start + inner_row * INNER_ROW_LENGTH;\n\n let masks = (\n\n 1 << (inner_row * 3),\n\n 1 << (inner_row * 3 + 1),\n\n 1 << (inner_row * 3 + 2),\n\n );\n\n let output_digits = [\n\n (b\" 1\", b\" 2\", b\" 3\"),\n\n (b\" 4\", b\" 5\", b\" 6\"),\n\n (b\" 7\", b\" 8\", b\" 9\"),\n\n ][inner_row];\n\n for column in 0..9 {\n\n let digits = self.cells[row * 9 + column];\n", "file_path": "src/lib.rs", "rank": 72, "score": 2.962355750163212 }, { "content": " explanation.push(b\"123456789\"[square / 9] as char);\n\n explanation.push('C');\n\n explanation.push(b\"123456789\"[square % 9] as char);\n\n self.techniques.push((explanation, *state))\n\n }\n\n fn get_recording(&self) -> Self::Output {\n\n let mut result = self.techniques.clone();\n\n result.dedup_by_key(|(_, sudoku)| *sudoku);\n\n result\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! get_last_digit {\n\n ($x:ident, $value_type:ty) => {{\n\n let value = $x.trailing_zeros();\n\n $x -= 1 << value;\n\n value as $value_type\n\n }};\n\n}\n", "file_path": "src/lib.rs", "rank": 73, "score": 2.8564500115443945 }, { "content": " ]);\n\n c.bench_function(\"easy_8802\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::from(&easy_8802).solve_one());\n\n })\n\n });\n\n c.bench_function(\"World's Hardest Sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::from(&worlds_hardest_sudoku).solve_one());\n\n })\n\n });\n\n c.bench_function(\"hardbrute_sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::from(&hardbrute_sudoku).solve_one());\n\n })\n\n });\n\n c.bench_function(\"random17_sudoku\", |b| {\n\n b.iter(|| {\n\n criterion::black_box(&msolve::Sudoku::from(&random17_sudoku).solve_one());\n\n })\n", "file_path": "benches/benchmarks.rs", "rank": 74, "score": 2.506080899870919 }, { "content": " .to_array()[..]\n\n );\n\n assert_eq!(\n\n &solutions_str[..],\n\n &msolve::Sudoku::from(&sudoku)\n\n .solve_unique()\n\n .unwrap()\n\n .to_bytes()[..]\n\n );\n\n }\n\n\n\n #[test]\n\n fn hardbrute_test() {\n\n let sudoku: [u8; 81] = [\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 8, 5, 0, 0, 1, 0, 2, 0, 0, 0, 0, 0, 0,\n\n 0, 5, 0, 7, 0, 0, 0, 0, 0, 4, 0, 0, 0, 1, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0,\n\n 0, 0, 0, 7, 3, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 9,\n\n ];\n\n let solution: [u8; 81] = [\n\n 9, 8, 7, 6, 5, 4, 3, 2, 1, 2, 4, 6, 1, 7, 3, 9, 8, 5, 3, 5, 1, 9, 2, 8, 7, 4, 6, 1, 2,\n", "file_path": "tests/tests.rs", "rank": 75, "score": 1.9998683986387646 }, { "content": " 2, 6, 4, 7, 1, 5, 8, 3, 9, 1, 3, 7, 8, 9, 2, 6, 4, 5, 5, 9, 8, 4, 3, 6, 2, 7, 1, 4, 2,\n\n 3, 1, 7, 8, 5, 9, 6, 8, 1, 6, 5, 4, 9, 7, 2, 3, 7, 5, 9, 6, 2, 3, 4, 1, 8, 3, 7, 5, 2,\n\n 8, 1, 9, 6, 4, 9, 8, 2, 3, 6, 4, 1, 5, 7, 6, 4, 1, 9, 5, 7, 3, 8, 2,\n\n ];\n\n assert!(msolve::Sudoku::from(&sudoku).has_single_solution());\n\n assert_eq!(\n\n &solution[..],\n\n &msolve::Sudoku::from(&sudoku)\n\n .solve_unique()\n\n .unwrap()\n\n .to_array()[..]\n\n );\n\n }\n\n #[test]\n\n fn easy_8802_test() {\n\n let sudoku: [u8; 81] = [\n\n 0, 5, 0, 4, 0, 0, 9, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 5, 9, 0, 0, 0, 0, 7, 6, 3, 0, 0, 7,\n\n 5, 0, 0, 0, 0, 0, 4, 4, 1, 0, 0, 0, 0, 7, 9, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 9,\n\n 0, 0, 2, 7, 1, 7, 0, 0, 0, 0, 5, 4, 0, 6, 0, 0, 2, 0, 0, 0, 0, 0, 0,\n\n ];\n", "file_path": "tests/tests.rs", "rank": 76, "score": 1.9345009038022094 }, { "content": " ]);\n\n let hardbrute_sudoku: [u8; 81] = criterion::black_box([\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 8, 5, 0, 0, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0,\n\n 5, 0, 7, 0, 0, 0, 0, 0, 4, 0, 0, 0, 1, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0,\n\n 0, 7, 3, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 9,\n\n ]);\n\n let easy_8802: [u8; 81] = criterion::black_box([\n\n 0, 5, 0, 4, 0, 0, 9, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 5, 9, 0, 0, 0, 0, 7, 6, 3, 0, 0, 7, 5,\n\n 0, 0, 0, 0, 0, 4, 4, 1, 0, 0, 0, 0, 7, 9, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 9, 0, 0,\n\n 2, 7, 1, 7, 0, 0, 0, 0, 5, 4, 0, 6, 0, 0, 2, 0, 0, 0, 0, 0, 0,\n\n ]);\n\n let random17_sudoku: [u8; 81] = criterion::black_box([\n\n 0, 0, 0, 7, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 3, 0, 2, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 6, 0, 0, 0, 5, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 1, 8, 0, 0, 0, 0, 8, 1,\n\n 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 5, 0, 0, 4, 0, 0, 0, 0, 3, 0, 0,\n\n ]);\n\n let solved_sudoku: [u8; 81] = criterion::black_box([\n\n 8, 1, 2, 7, 5, 3, 6, 4, 9, 9, 4, 3, 6, 8, 2, 1, 7, 5, 6, 7, 5, 4, 9, 1, 2, 8, 3, 1, 5, 4,\n\n 2, 3, 7, 8, 9, 6, 3, 6, 9, 8, 4, 5, 7, 2, 1, 2, 8, 7, 1, 6, 9, 5, 3, 4, 5, 2, 1, 9, 7, 4,\n\n 3, 6, 8, 4, 3, 8, 5, 2, 6, 9, 1, 7, 7, 9, 6, 3, 1, 8, 4, 5, 2,\n", "file_path": "benches/benchmarks.rs", "rank": 77, "score": 1.8732713472948366 }, { "content": " 8, 5, 3, 7, 6, 9, 4, 6, 3, 4, 8, 9, 2, 1, 5, 7, 7, 9, 5, 4, 6, 1, 8, 3, 2, 5, 1, 9, 2,\n\n 8, 6, 4, 7, 3, 4, 7, 2, 3, 1, 9, 5, 6, 8, 8, 6, 3, 7, 4, 5, 2, 1, 9,\n\n ];\n\n assert!(msolve::Sudoku::from(&sudoku).has_single_solution());\n\n assert_eq!(\n\n &solution[..],\n\n &msolve::Sudoku::from(&sudoku)\n\n .solve_unique()\n\n .unwrap()\n\n .to_array()[..]\n\n );\n\n }\n\n #[test]\n\n fn random17_test() {\n\n let sudoku: [u8; 81] = [\n\n 0, 0, 0, 7, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 3, 0, 2, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 5, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 1, 8, 0, 0, 0, 0,\n\n 8, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 5, 0, 0, 4, 0, 0, 0, 0, 3, 0, 0,\n\n ];\n\n let solution: [u8; 81] = [\n", "file_path": "tests/tests.rs", "rank": 78, "score": 1.8732713472948366 }, { "content": " #[test]\n\n fn worlds_hardest_test() {\n\n let sudoku: [u8; 81] = [\n\n 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 6, 0, 0, 0, 0, 0, 0, 7, 0, 0, 9, 0, 2, 0, 0, 0, 5,\n\n 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 4, 5, 7, 0, 0, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 1, 0,\n\n 0, 0, 0, 6, 8, 0, 0, 8, 5, 0, 0, 0, 1, 0, 0, 9, 0, 0, 0, 0, 4, 0, 0,\n\n ];\n\n let solution: [u8; 81] = [\n\n 8, 1, 2, 7, 5, 3, 6, 4, 9, 9, 4, 3, 6, 8, 2, 1, 7, 5, 6, 7, 5, 4, 9, 1, 2, 8, 3, 1, 5,\n\n 4, 2, 3, 7, 8, 9, 6, 3, 6, 9, 8, 4, 5, 7, 2, 1, 2, 8, 7, 1, 6, 9, 5, 3, 4, 5, 2, 1, 9,\n\n 7, 4, 3, 6, 8, 4, 3, 8, 5, 2, 6, 9, 1, 7, 7, 9, 6, 3, 1, 8, 4, 5, 2,\n\n ];\n\n let solutions_str =\n\n b\"812753649943682175675491283154237896369845721287169534521974368438526917796318452\";\n\n assert!(msolve::Sudoku::from(&sudoku).has_single_solution());\n\n assert_eq!(\n\n &solution[..],\n\n &msolve::Sudoku::from(&sudoku)\n\n .solve_unique()\n\n .unwrap()\n", "file_path": "tests/tests.rs", "rank": 79, "score": 1.8157988698766991 }, { "content": " while temp3 != 0 {\n\n let value = get_last_digit!(temp3, u8);\n\n array[s2] = value;\n\n let sudoku = Sudoku::from(&array);\n\n if sudoku.has_single_solution() {\n\n best = minimise(\n\n sudoku,\n\n best.map(|(_, score)| score).or(old_score),\n\n count_steps,\n\n )\n\n .or(best)\n\n };\n\n }\n\n array[s2] = 0;\n\n }\n\n array[s1] = old;\n\n }\n\n best\n\n}\n\n\n", "file_path": "src/gen.rs", "rank": 80, "score": 1.6044821257902306 }, { "content": " let solution: [u8; 81] = [\n\n 6, 5, 3, 4, 8, 2, 9, 1, 7, 1, 2, 7, 6, 9, 3, 8, 4, 5, 9, 8, 4, 5, 1, 7, 6, 3, 2, 2, 7,\n\n 5, 8, 3, 9, 1, 6, 4, 4, 1, 8, 2, 5, 6, 7, 9, 3, 3, 6, 9, 1, 7, 4, 5, 2, 8, 5, 3, 6, 9,\n\n 4, 8, 2, 7, 1, 7, 9, 1, 3, 2, 5, 4, 8, 6, 8, 4, 2, 7, 6, 1, 3, 5, 9,\n\n ];\n\n assert!(msolve::Sudoku::from(&sudoku).has_single_solution());\n\n assert_eq!(\n\n &solution[..],\n\n &msolve::Sudoku::from(&sudoku)\n\n .solve_unique()\n\n .unwrap()\n\n .to_array()[..]\n\n );\n\n }\n\n #[test]\n\n fn empty_has_multiple_solutions() {\n\n assert_eq!(false, msolve::Sudoku::empty().has_single_solution());\n\n }\n\n #[test]\n\n fn can_find_first_1000_solutions_to_empty() {\n", "file_path": "tests/tests.rs", "rank": 81, "score": 1.5536359944603122 } ]
Rust
src/lib.rs
ldm0/wasm_nn
da6149a8e1d4c22a1c1496058401ea44aeadfc4d
mod data; mod nn; use std::mem; use std::slice; use once_cell::sync::Lazy; use std::sync::Mutex; use ndarray::prelude::*; use ndarray::{array, Array, Array1, Array3, Axis, Zip}; use data::Data; use nn::Network; #[derive(Default)] struct MetaData { fc_size: u32, num_classes: u32, descent_rate: f32, regular_rate: f32, } #[derive(Default)] struct CriticalSection(MetaData, Data, Network); extern "C" { fn log_u64(num: u32); fn draw_point(x: u32, y: u32, label_ratio: f32); } static DATA: Lazy<Mutex<CriticalSection>> = Lazy::new(|| Mutex::default()); #[no_mangle] pub fn alloc(size: u32) -> *mut u8 { let mut buffer: Vec<u8> = Vec::with_capacity(size as usize); let buffer_ptr = buffer.as_mut_ptr(); mem::forget(buffer); buffer_ptr } #[no_mangle] pub fn free(buffer_ptr: *mut u8, size: u32) { let _ = unsafe { Vec::from_raw_parts(buffer_ptr, 0, size as usize) }; } #[no_mangle] pub fn init( data_radius: f32, data_spin_span: f32, data_num: u32, num_classes: u32, data_gen_rand_max: f32, network_gen_rand_max: f32, fc_size: u32, descent_rate: f32, regular_rate: f32, ) { let ref mut tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, data, network) = tmp; metadata.fc_size = fc_size; metadata.num_classes = num_classes; metadata.descent_rate = descent_rate; metadata.regular_rate = regular_rate; data.init( num_classes, data_num / num_classes, data_radius, data_spin_span, data_gen_rand_max, ); const PLANE_DIMENSION: u32 = 2; network.init(PLANE_DIMENSION, fc_size, num_classes, network_gen_rand_max); } #[no_mangle] pub fn train() -> f32 { let ref mut tmp = *DATA.lock().unwrap(); let CriticalSection(ref metadata, ref data, ref mut network) = *tmp; let regular_rate = metadata.regular_rate; let descent_rate = metadata.descent_rate; let (fc_layer, softmax) = network.forward_propagation(&data.points); let (dw1, db1, dw2, db2) = network.back_propagation( &data.points, &fc_layer, &softmax, &data.labels, regular_rate, ); let loss = network.loss(&softmax, &data.labels, regular_rate); network.descent(&dw1, &db1, &dw2, &db2, descent_rate); let (data_loss, regular_loss) = loss; data_loss + regular_loss } #[no_mangle] pub fn draw_prediction(canvas: *mut u8, width: u32, height: u32, span_least: f32) { let width = width as usize; let height = height as usize; let ref tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, _, network) = tmp; let num_classes = metadata.num_classes as usize; let r: Array1<f32> = Array::linspace(0f32, 200f32, num_classes); let g: Array1<f32> = Array::linspace(0f32, 240f32, num_classes); let b: Array1<f32> = Array::linspace(0f32, 255f32, num_classes); let span_per_pixel = span_least / width.min(height) as f32; let span_height = height as f32 * span_per_pixel; let span_width = width as f32 * span_per_pixel; let width_max = span_width / 2f32; let width_min = -span_width / 2f32; let height_max = span_height / 2f32; let height_min = -span_height / 2f32; let x_axis: Array1<f32> = Array::linspace(width_min, width_max, width); let y_axis: Array1<f32> = Array::linspace(height_min, height_max, height); let mut grid: Array3<f32> = Array::zeros((height, width, 2)); for y in 0..height { for x in 0..width { let coord = array![x_axis[[x]], y_axis[[y]]]; let mut slice = grid.slice_mut(s![y, x, ..]); slice.assign(&coord); } } let xys = grid.into_shape((height * width, 2)).unwrap(); let (_, softmax) = network.forward_propagation(&xys); let mut labels: Array1<usize> = Array::zeros(height * width); for (y, row) in softmax.axis_iter(Axis(0)).enumerate() { let mut maxx = 0 as usize; let mut max = row[[0]]; for (x, col) in row.iter().enumerate() { if *col > max { maxx = x; max = *col; } } labels[[y]] = maxx; } let grid_label = labels.into_shape((height, width)).unwrap(); let canvas_size = width * height * 4; let canvas: &mut [u8] = unsafe { slice::from_raw_parts_mut(canvas, canvas_size) }; for y in 0..height { for x in 0..width { canvas[4 * (y * width + x) + 0] = r[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 1] = g[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 2] = b[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 3] = 0xFF as u8; } } } #[no_mangle] pub fn draw_points(width: u32, height: u32, span_least: f32) { let ref tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, data, _) = tmp; let num_classes = metadata.num_classes as f32; let pixel_per_span = width.min(height) as f32 / span_least; let labels = &data.labels; let points = &data.points; let points_x = points.index_axis(Axis(1), 0); let points_y = points.index_axis(Axis(1), 1); Zip::from(labels) .and(points_x) .and(points_y) .apply(|&label, &x, &y| { let x = (x * pixel_per_span) as i64 + width as i64 / 2; let y = (y * pixel_per_span) as i64 + height as i64 / 2; if !(x >= width as i64 || x < 0 || y >= height as i64 || y < 0) { let x = x as u32; let y = y as u32; let label_ratio = label as f32 / num_classes; unsafe { draw_point(x, y, label_ratio); } } }); } #[cfg(test)] mod kernel_test { use super::*; static POINT_DRAW_TIMES: Lazy<Mutex<u32>> = Lazy::new(|| Mutex::new(0)); #[no_mangle] extern "C" fn draw_point(_: u32, _: u32, _: f32) { *POINT_DRAW_TIMES.lock().unwrap() += 1; } use std::f32::consts::PI; const DATA_GEN_RADIUS: f32 = 1f32; const SPIN_SPAN: f32 = PI; const NUM_CLASSES: u32 = 3; const DATA_NUM: u32 = 300; const FC_SIZE: u32 = 100; const REGULAR_RATE: f32 = 0.001f32; const DESCENT_RATE: f32 = 1f32; const DATA_GEN_RAND_MAX: f32 = 0.25f32; const NETWORK_GEN_RAND_MAX: f32 = 0.1f32; #[test] fn test_all() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); let loss_before: f32 = train(); for _ in 0..50 { let loss = train(); assert!(loss < loss_before * 1.1f32); } } #[test] fn test_buffer_allocation() { let buffer = alloc(114514); free(buffer, 114514); } #[test] fn test_draw_prediction() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); let width = 100; let height = 100; let buffer = alloc(width * height * 4); draw_prediction(buffer, width, height, 2f32); free(buffer, width * height * 4); } #[test] fn test_draw_points() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(1, 1, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(1, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(1, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(100, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(10000000, 1000000, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); } }
mod data; mod nn; use std::mem; use std::slice; use once_cell::sync::Lazy; use std::sync::Mutex; use ndarray::prelude::*; use ndarray::{array, Array, Array1, Array3, Axis, Zip}; use data::Data; use nn::Network; #[derive(Default)] struct MetaData { fc_size: u32, num_classes: u32, descent_rate: f32, regular_rate: f32, } #[derive(Default)] struct CriticalSection(MetaData, Data, Network); extern "C" { fn log_u64(num: u32); fn draw_point(x: u32, y: u32, label_ratio: f32); } static DATA: Lazy<Mutex<CriticalSection>> = Lazy::new(|| Mutex::default()); #[no_mangle] pub fn alloc(size: u32) -> *mut u8 { let mut buffer: Vec<u8> = Vec::with_capacity(size as usize); let buffer_ptr = buffer.as_mut_ptr(); mem::forget(buffer); buffer_ptr } #[no_mangle] pub fn free(buffer_ptr: *mut u8, size: u32) { let _ = unsafe { Vec::from_raw_parts(buffer_ptr, 0, size as usize) }; } #[no_mangle] pub fn init( data_radius: f32, data_spin_span: f32, data_num: u32, num_classes: u32, data_gen_rand_max: f32, network_gen_rand_max: f32, fc_size: u32, descent_rate: f32, regular_rate: f32, ) { let ref mut tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, data, network) = tmp; metadata.fc_size = fc_size; metadata.num_classes = num_classes; metadata.descent_rate = descent_rate; metadata.regular_rate = regular_rate; data.init( num_classes, data_num / num_classes, data_radius, data_spin_span, data_gen_rand_max, ); const PLANE_DIMENSION: u32 = 2; network.init(PLANE_DIMENSION, fc_size, num_classes, network_gen_rand_max); } #[no_mangle] pub fn train() -> f32 { let ref mut tmp = *DATA.lock().unwrap(); let CriticalSection(ref metadata, ref data, ref mut network) = *tmp; let regular_rate = metadata.regular_rate; let descent_rate = metadata.descent_rate; let (fc_layer, softmax) = network.forward_propagation(&data.points); let (dw1, db1, dw2, db2) = network.back_propagation( &data.points, &fc_layer, &softmax, &data.labels, regular_rate, ); let loss = network.loss(&softmax, &data.labels, regular_rate); network.descent(&dw1, &db1, &dw2, &db2, descent_rate); let (data_loss, regular_loss) = loss; data_loss + regular_loss } #[no_mangle] pub fn draw_prediction(canvas: *mut u8, width: u32, height: u32, span_least: f32) { let width = width as usize; let height = height as usize; let ref tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, _, network) = tmp; let num_classes = metadata.num_classes as usize; let r: Array1<f32> = Array::linspace(0f32, 200f32, num_classes); let g: Array1<f32> = Array::linspace(0f32, 240f32, num_classes); let b: Array1<f32> = Array::linspace(0f32, 255f32, num_classes); let span_per_pixel = span_least / width.min(height) as f32; let span_height = height as f32 * span_per_pixel; let span_width = width as f32 * span_per_pixel; let width_max = span_width / 2f32; let width_min = -span_width / 2f32; let height_max = span_height / 2f32; let height_min = -span_height / 2f32; let x_axis: Array1<f32> = Array::linspace(width_min, width_max, width); let y_axis: Array1<f32> = Array::linspace(height_min, height_max, height); let mut grid: Array3<f32> = Array::zeros((height, width, 2)); for y in 0..height { for x in 0..width { let coord = array![x_axis[[x]], y_axis[[y]]]; let mut slice = grid.slice_mut(s![y, x, ..]); slice.assign(&coord); } } let xys = grid.into_shape((height * width, 2)).unwrap(); let (_, softmax) = network.forward_propagation(&xys); let mut labels: Array1<usize> = Array::zeros(height * width); for (y, row) in softmax.axis_iter(Axis(0)).enumerate() { let mut maxx = 0 as usize; let mut max = row[[0]]; for (x, col) in row.iter().enumerate() { if *col > max { maxx = x; max = *col; } } labels[[y]] = maxx; } let grid_label = labels.into_shape((height, width)).unwrap(); let canvas_size = width * height * 4; let canvas: &mut [u8] = unsafe { slice::from_raw_parts_mut(canvas, canvas_size) }; for y in 0..height { for x in 0..width { canvas[4 * (y * width + x) + 0] = r[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 1] = g[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 2] = b[[grid_label[[y, x]]]] as u8; canvas[4 * (y * width + x) + 3] = 0xFF as u8; } } } #[no_mangle] pub fn draw_points(width: u32, height: u32, span_least: f32) { let ref tmp = *DATA.lock().unwrap(); let CriticalSection(metadata, data, _) = tmp; let num_classes = metadata.num_classes as f32; let pixel_per_span = width.min(height) as f32 / span_least; let labels = &data.labels; let points = &data.points; let points_x = points.index_axis(Axis(1), 0); let points_y = points.index_axis(Axis(1), 1); Zip::from(labels) .and(points_x) .and(points_y) .apply(|&label, &x, &y| { let x = (x * pixel_per_span) as i64 + width as i64 / 2; let y = (y * pixel_per_span) as i64 + height as i64 / 2; if !(x >= width as i64 || x < 0 || y >= height as i64 || y < 0) { let x = x as u32; let y = y as u32; let label_ratio = label as f32 / num_classes; unsafe { draw_point(x, y, label_ratio); } } }); } #[cfg(test)] mod kernel_test { use super::*; static POINT_DRAW_TIMES: Lazy<Mutex<u32>> = Lazy::new(|| Mutex::new(0)); #[no_mangle] extern "C" fn draw_point(_: u32, _: u32, _: f32) { *POINT_DRAW_TIMES.lock().unwrap() += 1; } use std::f32::consts::PI; const DATA_GEN_RADIUS: f32 = 1f32; const SPIN_SPAN: f32 = PI; const NUM_CLASSES: u32 = 3; const DATA_NUM: u32 = 300; const FC_SIZE: u32 = 100; const REGULAR_RATE: f32 = 0.001f32; const DESCENT_RATE: f32 = 1f32; const DATA_GEN_RAND_MAX: f32 = 0.25f32; const NETWORK_GEN_RAND_MAX: f32 = 0.1f32; #[test] fn test_all() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); let loss_before: f32 = train(); for _ in 0..50 { let loss = train(); assert!(loss < loss_before * 1.1f32); }
ck().unwrap() = 0; draw_points(1, 1, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(1, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(1, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(100, 100, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); *POINT_DRAW_TIMES.lock().unwrap() = 0; draw_points(10000000, 1000000, DATA_GEN_RADIUS * 2f32 * 1.1f32); assert_eq!(DATA_NUM, *POINT_DRAW_TIMES.lock().unwrap()); } }
} #[test] fn test_buffer_allocation() { let buffer = alloc(114514); free(buffer, 114514); } #[test] fn test_draw_prediction() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); let width = 100; let height = 100; let buffer = alloc(width * height * 4); draw_prediction(buffer, width, height, 2f32); free(buffer, width * height * 4); } #[test] fn test_draw_points() { init( DATA_GEN_RADIUS, SPIN_SPAN, DATA_NUM, NUM_CLASSES, DATA_GEN_RAND_MAX, NETWORK_GEN_RAND_MAX, FC_SIZE, DESCENT_RATE, REGULAR_RATE, ); *POINT_DRAW_TIMES.lo
random
[ { "content": "use ndarray::prelude::*;\n\nuse ndarray::{stack, Array, Array1, Array2, Axis}; // for matrices\n\n\n\nuse ndarray_rand::rand_distr::StandardNormal; // for randomness\n\nuse ndarray_rand::RandomExt; // for randomness\n\nuse rand::rngs::SmallRng;\n\nuse rand::SeedableRng; // for from_seed // for randomness\n\n\n\nuse std::f32::consts::PI; // for math functions\n\n\n\n/// point data with labels\n\n#[derive(Default)]\n\npub struct Data {\n\n pub points: Array2<f32>, // points position\n\n pub labels: Array1<u32>, // points labels\n\n}\n\n\n\nimpl Data {\n\n // num_sample: num of data for each label class\n\n // radius: radius of the circle of data points position\n", "file_path": "src/data.rs", "rank": 8, "score": 18581.64263603975 }, { "content": " // span: each data arm ratate span\n\n pub fn init(\n\n &mut self,\n\n num_classes: u32,\n\n num_samples: u32,\n\n radius: f32,\n\n span: f32,\n\n rand_max: f32,\n\n ) {\n\n // For array creating convenience\n\n let num_classes = num_classes as usize;\n\n let num_samples = num_samples as usize;\n\n\n\n let num_data = num_classes * num_samples;\n\n self.points = Array::zeros((num_data, 2));\n\n self.labels = Array::zeros(num_data);\n\n for i in 0..num_classes {\n\n let rho = Array::linspace(0f32, radius, num_samples);\n\n let begin = i as f32 * (2f32 * PI / num_classes as f32);\n\n\n", "file_path": "src/data.rs", "rank": 9, "score": 18581.050570904998 }, { "content": " let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let mut rng = SmallRng::from_seed(seed);\n\n let theta = Array::linspace(begin, begin - span, num_samples)\n\n // will be changed later to use span to generate randomness to avoid points flickering\n\n + Array::<f32, _>::random_using(num_samples, StandardNormal, &mut rng) * rand_max;\n\n\n\n let xs = (theta.mapv(f32::sin) * &rho)\n\n .into_shape((num_samples, 1))\n\n .unwrap();\n\n let ys = (theta.mapv(f32::cos) * &rho)\n\n .into_shape((num_samples, 1))\n\n .unwrap();\n\n let mut class_points = self\n\n .points\n\n .slice_mut(s![i * num_samples..(i + 1) * num_samples, ..]);\n\n class_points.assign(&stack![Axis(1), xs, ys]);\n\n let mut class_labels = self\n\n .labels\n\n .slice_mut(s![i * num_samples..(i + 1) * num_samples]);\n\n class_labels.fill(i as u32);\n\n // or:\n\n //class_labels.assign(&(Array::ones(num_samples) * i));\n\n }\n\n }\n\n}\n", "file_path": "src/data.rs", "rank": 10, "score": 18578.79840419713 }, { "content": " let exp_scores = scores.mapv(f32::exp);\n\n let softmax = &exp_scores / &exp_scores.sum_axis(Axis(1)).insert_axis(Axis(1));\n\n // println!(\"{:#?}\", softmax);\n\n (fc_layer, softmax)\n\n }\n\n\n\n pub fn loss(\n\n &self,\n\n softmax: &Array2<f32>,\n\n labels: &Array1<u32>,\n\n regular_rate: f32,\n\n ) -> (f32, f32) {\n\n let num_data = softmax.nrows();\n\n let mut probs_correct: Array1<f32> = Array::zeros(num_data);\n\n Zip::from(&mut probs_correct)\n\n .and(softmax.genrows())\n\n .and(labels)\n\n .apply(|prob_correct, prob, &label| {\n\n *prob_correct = prob[label as usize];\n\n });\n", "file_path": "src/nn.rs", "rank": 11, "score": 17948.290049026415 }, { "content": "use ndarray::{Array, Array1, Array2, Axis, Zip}; // for matrices\n\n\n\nuse ndarray_rand::rand_distr::StandardNormal; // for randomness\n\nuse ndarray_rand::RandomExt; // for randomness\n\nuse rand::rngs::SmallRng;\n\nuse rand::SeedableRng; // for from_seed // for randomness\n\n\n\n/**\n\n * single layer neural network\n\n */\n\n#[derive(Default)]\n\npub struct Network {\n\n pub w1: Array2<f32>,\n\n pub b1: Array2<f32>,\n\n pub w2: Array2<f32>,\n\n pub b2: Array2<f32>,\n\n}\n\n\n\nimpl Network {\n\n pub fn init(&mut self, input_size: u32, fc_size: u32, output_size: u32, rand_max: f32) {\n", "file_path": "src/nn.rs", "rank": 12, "score": 17947.134748715915 }, { "content": " dscore[[labels[[i]] as usize]] -= 1f32;\n\n }\n\n dscores /= num_data as f32;\n\n let dact2 = dscores;\n\n let dfc_layer = dact2.dot(&self.w2.t());\n\n let mut dact1 = dfc_layer.clone();\n\n Zip::from(&mut dact1).and(fc_layer).apply(|act1, &fc| {\n\n if fc == 0f32 {\n\n *act1 = 0f32;\n\n }\n\n });\n\n\n\n let dw2 = fc_layer.t().dot(&dact2) + regular_rate * &self.w2;\n\n let db2 = dact2.sum_axis(Axis(0)).insert_axis(Axis(0));\n\n let dw1 = points.t().dot(&dact1) + regular_rate * &self.w1;\n\n let db1 = dact1.sum_axis(Axis(0)).insert_axis(Axis(0));\n\n (dw1, db1, dw2, db2)\n\n }\n\n}\n", "file_path": "src/nn.rs", "rank": 13, "score": 17946.498396370025 }, { "content": " let infos = probs_correct.mapv(|x| -f32::ln(x));\n\n //println!(\"{:#?}\", &probs_correct);\n\n let data_loss = infos.mean().unwrap();\n\n let regular_loss =\n\n 0.5f32 * regular_rate * ((&self.w1 * &self.w1).sum() + (&self.w2 * &self.w2).sum());\n\n //println!(\"data loss: {} regular loss: {}\", data_loss, regular_loss);\n\n (data_loss, regular_loss)\n\n }\n\n\n\n pub fn back_propagation(\n\n &self,\n\n points: &Array2<f32>,\n\n fc_layer: &Array2<f32>,\n\n softmax: &Array2<f32>,\n\n labels: &Array1<u32>,\n\n regular_rate: f32,\n\n ) -> (Array2<f32>, Array2<f32>, Array2<f32>, Array2<f32>) {\n\n let num_data = softmax.nrows();\n\n let mut dscores = softmax.clone();\n\n for (i, mut dscore) in dscores.axis_iter_mut(Axis(0)).enumerate() {\n", "file_path": "src/nn.rs", "rank": 14, "score": 17945.148139333018 }, { "content": " pub fn descent(\n\n &mut self,\n\n dw1: &Array2<f32>,\n\n db1: &Array2<f32>,\n\n dw2: &Array2<f32>,\n\n db2: &Array2<f32>,\n\n descent_rate: f32,\n\n ) {\n\n let rate = descent_rate;\n\n self.w1 -= &(rate * dw1);\n\n self.b1 -= &(rate * db1);\n\n self.w2 -= &(rate * dw2);\n\n self.b2 -= &(rate * db2);\n\n }\n\n\n\n pub fn forward_propagation(&self, points: &Array2<f32>) -> (Array2<f32>, Array2<f32>) {\n\n let act1 = &points.dot(&self.w1) + &self.b1;\n\n let fc_layer = act1.mapv(|x| x.max(0f32)); // relu process\n\n let act2 = &fc_layer.dot(&self.w2) + &self.b2;\n\n let scores = act2;\n", "file_path": "src/nn.rs", "rank": 15, "score": 17943.380015686515 }, { "content": " let input_size = input_size as usize;\n\n let fc_size = fc_size as usize;\n\n let output_size = output_size as usize;\n\n // according to rand::rngs/mod.rs line 121\n\n let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let mut rng = SmallRng::from_seed(seed);\n\n *self = Network {\n\n w1: Array::random_using((input_size, fc_size), StandardNormal, &mut rng) * rand_max,\n\n w2: Array::random_using((fc_size, output_size), StandardNormal, &mut rng) * rand_max,\n\n b1: Array::random_using((1, fc_size), StandardNormal, &mut rng) * rand_max,\n\n b2: Array::random_using((1, output_size), StandardNormal, &mut rng) * rand_max,\n\n /* random version but commented because strange behaviour of random in wasm leads to panic\n\n w1: Array::ones((input_size, fc_size)) * rand_max,\n\n w2: Array::ones((fc_size, output_size)) * rand_max,\n\n b1: Array::ones((1, fc_size)) * rand_max,\n\n b2: Array::ones((1, output_size)) * rand_max,\n\n */\n\n }\n\n }\n\n\n", "file_path": "src/nn.rs", "rank": 16, "score": 17942.415580233963 }, { "content": "// Ratio: [0., 1.)\n\nfunction hue(ratio) {\n\n let rgb = \"\";\n\n let hue = 6 * ratio;\n\n let integer_part = Math.floor(hue);\n\n let fractal_part = Math.round((hue - integer_part) * 255);\n\n switch (integer_part) {\n\n case 0: rgb = \"#\" + \"FF\" + zero_padding(fractal_part) + \"00\"; break;\n\n case 1: rgb = \"#\" + zero_padding(255 - fractal_part) + \"FF\" + \"00\"; break;\n\n case 2: rgb = \"#\" + \"00\" + \"FF\" + zero_padding(fractal_part); break;\n\n case 3: rgb = \"#\" + \"00\" + zero_padding(255 - fractal_part) + \"FF\"; break;\n\n case 4: rgb = \"#\" + zero_padding(fractal_part) + \"00\" + \"FF\"; break;\n\n case 5: rgb = \"#\" + \"FF\" + \"00\" + zero_padding(255 - fractal_part); break;\n\n }\n\n return rgb;\n\n}\n\n\n\n// Number to zero-padding hex string\n\nfunction zero_padding(num) {\n\n let result = Math.round(num).toString(16);\n\n if (result.length < 2)\n\n result = \"0\" + result;\n\n return result;\n\n}\n\n\n\nasync function main(){\n\n const canvas_width = 256;\n\n const canvas_height = 256;\n\n const canvas_buffer_size = canvas_width * canvas_height * 4;\n\n const data_span_radius = 1.;\n\n\n\n const canvas = document.getElementById(\"main_canvas\");\n\n const canvas_context = canvas.getContext(\"2d\");\n\n\n\n const control_button = document.getElementById(\"control_button\");\n\n const loss_reveal = document.getElementById(\"loss_reveal\");\n\n const input_data_spin_span = document.getElementById(\"input_data_spin_span\");\n\n const input_data_num = document.getElementById(\"input_data_num\");\n\n const input_num_classes = document.getElementById(\"input_num_classes\");\n\n const input_data_gen_rand_max = document.getElementById(\"input_data_gen_rand_max\");\n\n const input_network_gen_rand_max = document.getElementById(\"input_network_gen_rand_max\");\n\n const input_fc_size = document.getElementById(\"input_fc_size\");\n\n const input_descent_rate = document.getElementById(\"input_descent_rate\");\n\n const input_regular_rate = document.getElementById(\"input_regular_rate\");\n\n\n\n function get_settings() {\n\n let settings = [\n\n parseFloat(input_data_spin_span.value),\n\n parseInt(input_data_num.value),\n\n parseInt(input_num_classes.value),\n\n parseFloat(input_data_gen_rand_max.value),\n\n parseFloat(input_network_gen_rand_max.value),\n\n parseInt(input_fc_size.value),\n\n parseFloat(input_descent_rate.value),\n\n parseFloat(input_regular_rate.value),\n\n ];\n\n return settings;\n\n }\n\n\n\n function envs() {\n\n // For debug\n\n function log_u64(x) {\n\n console.log(x);\n\n }\n\n\n\n function draw_point(x, y, label) {\n\n canvas_context.beginPath();\n\n canvas_context.arc(x, y, 2, 0, 2 * Math.PI);\n\n canvas_context.fillStyle = hue(label) + \"7f\";\n\n canvas_context.fill();\n\n }\n\n let env = {\n\n log_u64,\n\n draw_point,\n\n };\n\n return env;\n\n }\n\n\n\n const kernel_stream = await fetch(\"../wasm/wasm_nn.wasm\");\n\n const kernel = await WebAssembly.instantiateStreaming(kernel_stream, { env: envs()});\n\n \n\n const {alloc: kernel_alloc, free: kernel_free} = kernel.instance.exports;\n\n const {\n\n init: kernel_init,\n\n train: kernel_train,\n\n draw_prediction: kernel_draw_prediction,\n\n draw_points: kernel_draw_points\n\n } = kernel.instance.exports;\n\n const {memory} = kernel.instance.exports;\n\n\n\n // Alloc graphic buffer\n\n // Should not freed because you don't know when the drawing completes\n\n // Maybe not completed forever...\n\n //kernel_free(canvas_buffer_ptr, buffer_size);\n\n const canvas_buffer_ptr = kernel_alloc(canvas_buffer_size);\n\n\n\n function draw_frame() {\n\n // multiply 1.1 for spadding\n\n kernel_draw_prediction(canvas_buffer_ptr, canvas_width, canvas_height, data_span_radius * 2);\n\n const canvas_buffer_array = new Uint8ClampedArray(memory.buffer, canvas_buffer_ptr, canvas_buffer_size);\n\n const canvas_image_data = new ImageData(canvas_buffer_array, canvas_width, canvas_height)\n\n canvas_context.putImageData(canvas_image_data, 0, 0);\n\n\n\n kernel_draw_points(canvas_width, canvas_height, data_span_radius * 2);\n\n }\n\n\n\n function nninit(settings) {\n\n // Gen data for training. Check source code of kernel for parameter meaning\n\n kernel_init(\n\n data_span_radius,\n\n settings[0],\n\n settings[1],\n\n settings[2],\n\n settings[3],\n\n settings[4],\n\n settings[5],\n\n settings[6],\n\n settings[7],\n\n );\n\n // draw a fram to avoid blank canvas \n\n draw_frame();\n\n }\n\n\n\n nninit(get_settings());\n\n\n\n {\n\n let run = false;\n\n\n\n {\n\n let counter = 0;\n\n function nnloop() {\n\n if (run) {\n\n let loss = kernel_train();\n\n if (counter >= 10) {\n\n counter = 0;\n\n loss_reveal.innerText = \"loss: \" + loss;\n\n window.requestAnimationFrame(draw_frame);\n\n }\n\n setTimeout(nnloop, 0);\n\n ++counter;\n\n }\n\n }\n\n }\n\n\n\n function nnstart() {\n\n run = true;\n\n nnloop();\n\n }\n\n\n\n function nnstop() {\n\n run = false;\n\n }\n\n }\n\n\n\n {\n\n let run = false;\n\n let current_settings = get_settings();\n\n\n\n control_button.onclick = () => {\n\n if (run) {\n\n run = false;\n\n control_button.innerText = \"run\";\n\n nnstop();\n\n } else {\n\n run = true;\n\n control_button.innerText = \"stop\";\n\n let new_settings = get_settings();\n\n if (JSON.stringify(current_settings) !== JSON.stringify(new_settings)) {\n\n current_settings = new_settings;\n\n nninit(current_settings);\n\n }\n\n nnstart();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "js/wasm_nn.js", "rank": 17, "score": 14607.59250417224 }, { "content": "## Rust + WebAssembly + Neural Network\n\n\n\n![demo pic](./pic/wasm_nn_6arm.png)\n\n\n\nTry to run Neural Network on web browser.\n\n\n\n**Attention: you need a http server to run locally. Because cross-origin requests are not supported for the file protocol scheme.**\n\n\n", "file_path": "README.md", "rank": 18, "score": 10190.703298789453 }, { "content": "@echo off\n\ncargo build --release --target=wasm32-unknown-unknown\n\ncopy target\\wasm32-unknown-unknown\\release\\wasm_nn.wasm .\\wasm\\wasm_nn.wasm", "file_path": "build.bat", "rank": 29, "score": 2.264444981467749 }, { "content": ":: just use a http server because wasm module cannot be get with file request\n\nemrun .", "file_path": "run.bat", "rank": 30, "score": 2.008855707528869 } ]
Rust
rend3/src/renderer/shaders.rs
Noxime/rend3
8270d40cb7522a1212b1b99ab768cc525786f04f
use crate::{ datatypes::ShaderHandle, list::{ShaderSourceType, SourceShaderDescriptor}, registry::ResourceRegistry, ShaderError, }; use parking_lot::RwLock; use shaderc::{CompileOptions, Compiler, OptimizationLevel, ResolvedInclude, SourceLanguage, TargetEnv}; use std::{borrow::Cow, future::Future, path::Path, sync::Arc, thread, thread::JoinHandle}; use wgpu::{Device, ShaderFlags, ShaderModule, ShaderModuleDescriptor, ShaderSource}; pub type ShaderCompileResult = Result<Arc<ShaderModule>, ShaderError>; const BUILTIN_SHADERS: include_dir::Dir = include_dir::include_dir!("./shaders"); pub struct ShaderManager { shader_thread: Option<JoinHandle<()>>, sender: flume::Sender<CompileCommand>, registry: RwLock<ResourceRegistry<Arc<ShaderModule>>>, } impl ShaderManager { pub fn new(device: Arc<Device>) -> Arc<Self> { let (sender, receiver) = flume::unbounded(); let shader_thread = Some( thread::Builder::new() .name("rend3 shader-compilation".into()) .spawn(move || compile_shader_loop(device, receiver)) .unwrap(), ); let registry = RwLock::new(ResourceRegistry::new()); Arc::new(Self { shader_thread, sender, registry, }) } pub fn allocate(&self) -> ShaderHandle { ShaderHandle(self.registry.read().allocate()) } pub fn allocate_async_insert(self: &Arc<Self>, args: SourceShaderDescriptor) -> impl Future<Output = ShaderHandle> { let handle = ShaderHandle(self.registry.read().allocate()); let fut = self.compile_shader(args); let this = Arc::clone(self); async move { let res = fut.await.unwrap(); this.registry.write().insert(handle.0, res); handle } } pub fn insert(&self, handle: ShaderHandle, shader: Arc<ShaderModule>) { self.registry.write().insert(handle.0, shader); } pub fn remove(&self, handle: ShaderHandle) { self.registry.write().remove(handle.0); } pub fn get(&self, handle: ShaderHandle) -> Arc<ShaderModule> { self.registry.read().get(handle.0).clone() } pub fn compile_shader(&self, args: SourceShaderDescriptor) -> impl Future<Output = ShaderCompileResult> { let (sender, receiver) = flume::bounded(1); self.sender.send(CompileCommand::Compile(args, sender)).unwrap(); async move { receiver.recv_async().await.unwrap() } } } impl Drop for ShaderManager { fn drop(&mut self) { self.sender.send(CompileCommand::Stop).unwrap(); self.shader_thread.take().unwrap().join().unwrap(); } } #[derive(Debug, Clone)] enum CompileCommand { Compile(SourceShaderDescriptor, flume::Sender<ShaderCompileResult>), Stop, } fn compile_shader_loop(device: Arc<Device>, receiver: flume::Receiver<CompileCommand>) { let mut compiler = shaderc::Compiler::new().unwrap(); while let Ok(command) = receiver.recv() { match command { CompileCommand::Compile(args, sender) => { let result = compile_shader(&mut compiler, &device, &args); sender.send(result).unwrap(); } CompileCommand::Stop => return, } } } fn compile_shader(compiler: &mut Compiler, device: &Device, args: &SourceShaderDescriptor) -> ShaderCompileResult { span_transfer!(_ -> file_span, WARN, "Loading File"); tracing::debug!("Compiling shader {:?}", args); let contents = match args.source { ShaderSourceType::File(ref file) => { std::fs::read_to_string(file).map_err(|e| ShaderError::FileError(e, args.clone()))? } ShaderSourceType::Builtin(ref file) => BUILTIN_SHADERS .get_file(file) .ok_or_else(|| ShaderError::Builtin(args.clone()))? .contents_utf8() .unwrap() .to_string(), ShaderSourceType::Value(ref code) => code.clone(), }; let file_name = match args.source { ShaderSourceType::File(ref file) | ShaderSourceType::Builtin(ref file) => &**file, ShaderSourceType::Value(_) => "./file", }; let builtin = matches!(args.source, ShaderSourceType::Builtin(_)); span_transfer!(file_span -> compile_span, WARN, "Shader Compilation"); let mut options = CompileOptions::new().unwrap(); options.set_generate_debug_info(); options.set_source_language(SourceLanguage::GLSL); options.set_target_env(TargetEnv::Vulkan, 0); options.set_optimization_level(match cfg!(debug_assertions) { true => OptimizationLevel::Zero, false => OptimizationLevel::Performance, }); for (key, value) in &args.defines { options.add_macro_definition(&key, value.as_deref()); } options.set_include_callback(|include, _ty, src, _depth| { let joined = Path::new(src) .parent() .ok_or_else(|| { format!( "Cannot find include <{}> relative to file {} as there is no parent directory", include, src ) })? .join(Path::new(include)); let contents = if builtin { let dedot = path_dedot::ParseDot::parse_dot(&joined).unwrap(); BUILTIN_SHADERS .get_file(dedot) .ok_or_else(|| { format!( "Error while locating builtin include <{}> from file {} for path {}", include, src, joined.display() ) })? .contents_utf8() .unwrap() .to_string() } else { std::fs::read_to_string(&joined).map_err(|e| { format!( "Error while loading include <{}> from file {} for path {}: {}", include, src, joined.display(), e ) })? }; Ok(ResolvedInclude { resolved_name: joined.to_string_lossy().to_string(), content: contents, }) }); let binary = compiler .compile_into_spirv(&contents, args.stage.into(), &file_name, "main", Some(&options)) .map_err(|e| ShaderError::CompileError(e, args.clone()))?; let bytes = binary.as_binary(); span_transfer!(compile_span -> module_create_span, WARN, "Create Shader Module"); let module = Arc::new(device.create_shader_module(&ShaderModuleDescriptor { label: None, source: ShaderSource::SpirV(Cow::Borrowed(bytes)), flags: ShaderFlags::VALIDATION, })); Ok(module) }
use crate::{ datatypes::ShaderHandle, list::{ShaderSourceType, SourceShaderDescriptor}, registry::ResourceRegistry, ShaderError, }; use parking_lot::RwLock; use shaderc::{CompileOptions, Compiler, OptimizationLevel, ResolvedInclude, SourceLanguage, TargetEnv}; use std::{borrow::Cow, future::Future, path::Path, sync::Arc, thread, thread::JoinHandle}; use wgpu::{Device, ShaderFlags, ShaderModule, ShaderModuleDescriptor, ShaderSource}; pub type ShaderCompileResult = Result<Arc<ShaderModule>, ShaderError>; const BUILTIN_SHADERS: include_dir::Dir = include_dir::include_dir!("./shaders"); pub struct ShaderManager { shader_thread: Option<JoinHandle<()>>, sender: flume::Sender<CompileCommand>, registry: RwLock<ResourceRegistry<Arc<ShaderModule>>>, } impl ShaderManager { pub fn new(device: Arc<Device>) -> Arc<Self> { let (sender, receiver) = flume::unbounded(); let shader_thread = Some( thread::Builder::new() .name("rend3 shader-compilation".into()) .spawn(move || compile_shader_loop(device, receiver)) .unwrap(), ); let registry = RwLock::new(ResourceRegistry::new()); Arc::new(Self { shader_thread, sender, registry, }) } pub fn allocate(&self) -> ShaderHandle { ShaderHandle(self.registry.read().allocate()) } pub fn allocate_async_insert(self: &Arc<Self>, args: SourceShaderDescriptor) -> impl Future<Output = ShaderHandle> { let handle = ShaderHandle(self.registry.read().allocate()); let fut = self.compile_shader(args); let this = Arc::clone(self); async move { let res = fut.await.unwrap(); this.registry.write().insert(handle.0, res); handle } } pub fn insert(&self, handle: ShaderHandle, shader: Arc<ShaderModule>) { self.registry.write().insert(handle.0, shader); } pub fn remove(&self, handle: ShaderHandle) { self.registry.write().remove(handle.0); } pub fn get(&self, handle: ShaderHandle) -> Arc<ShaderModule> { self.registry.read().get(handle.0).clone() } pub fn compile_shader(&self, args: SourceShaderDescriptor) -> impl Future<Output = ShaderCompileResult> { let (sender, receiver) = flume::bounded(1); self.sender.send(CompileCommand::Compile(args, sender)).unwrap(); async move { receiver.recv_async().await.unwrap() } } } impl Drop for ShaderManager { fn drop(&mut self) { self.sender.send(CompileCommand::Stop).unwrap(); self.shader_thread.take().unwrap().join().unwrap(); } } #[derive(Debug, Clone)] enum CompileCommand { Compile(SourceShaderDescriptor, flume::Sender<ShaderCompileResult>), Stop, } fn compile_shader_loop(device: Arc<Device>, receiver: flume::Receiver<CompileCommand>) { let mut compiler = shaderc::Compiler::new().unwrap(); while let Ok(command) = receiver.recv() { match command { CompileCommand::Compile(args, sender) => { let result = compile_shader(&mut com
ltin(args.clone()))? .contents_utf8() .unwrap() .to_string(), ShaderSourceType::Value(ref code) => code.clone(), }; let file_name = match args.source { ShaderSourceType::File(ref file) | ShaderSourceType::Builtin(ref file) => &**file, ShaderSourceType::Value(_) => "./file", }; let builtin = matches!(args.source, ShaderSourceType::Builtin(_)); span_transfer!(file_span -> compile_span, WARN, "Shader Compilation"); let mut options = CompileOptions::new().unwrap(); options.set_generate_debug_info(); options.set_source_language(SourceLanguage::GLSL); options.set_target_env(TargetEnv::Vulkan, 0); options.set_optimization_level(match cfg!(debug_assertions) { true => OptimizationLevel::Zero, false => OptimizationLevel::Performance, }); for (key, value) in &args.defines { options.add_macro_definition(&key, value.as_deref()); } options.set_include_callback(|include, _ty, src, _depth| { let joined = Path::new(src) .parent() .ok_or_else(|| { format!( "Cannot find include <{}> relative to file {} as there is no parent directory", include, src ) })? .join(Path::new(include)); let contents = if builtin { let dedot = path_dedot::ParseDot::parse_dot(&joined).unwrap(); BUILTIN_SHADERS .get_file(dedot) .ok_or_else(|| { format!( "Error while locating builtin include <{}> from file {} for path {}", include, src, joined.display() ) })? .contents_utf8() .unwrap() .to_string() } else { std::fs::read_to_string(&joined).map_err(|e| { format!( "Error while loading include <{}> from file {} for path {}: {}", include, src, joined.display(), e ) })? }; Ok(ResolvedInclude { resolved_name: joined.to_string_lossy().to_string(), content: contents, }) }); let binary = compiler .compile_into_spirv(&contents, args.stage.into(), &file_name, "main", Some(&options)) .map_err(|e| ShaderError::CompileError(e, args.clone()))?; let bytes = binary.as_binary(); span_transfer!(compile_span -> module_create_span, WARN, "Create Shader Module"); let module = Arc::new(device.create_shader_module(&ShaderModuleDescriptor { label: None, source: ShaderSource::SpirV(Cow::Borrowed(bytes)), flags: ShaderFlags::VALIDATION, })); Ok(module) }
piler, &device, &args); sender.send(result).unwrap(); } CompileCommand::Stop => return, } } } fn compile_shader(compiler: &mut Compiler, device: &Device, args: &SourceShaderDescriptor) -> ShaderCompileResult { span_transfer!(_ -> file_span, WARN, "Loading File"); tracing::debug!("Compiling shader {:?}", args); let contents = match args.source { ShaderSourceType::File(ref file) => { std::fs::read_to_string(file).map_err(|e| ShaderError::FileError(e, args.clone()))? } ShaderSourceType::Builtin(ref file) => BUILTIN_SHADERS .get_file(file) .ok_or_else(|| ShaderError::Bui
random
[]
Rust
src/imp/scintilla/mod_cocoa.rs
plyhun/plygui-scintilla
6543238d109c7cf44a8883d65f5bb4d12df0bd90
use crate::sdk::*; use plygui_cocoa::common::*; use std::os::raw::{c_int, c_long, c_ulong, c_void}; lazy_static! { static ref WINDOW_CLASS: RefClass = unsafe { register_window_class("PlyguiConsole", BASE_CLASS, |decl| { decl.add_method(sel!(setFrameSize:), set_frame_size as extern "C" fn(&mut Object, Sel, NSSize)); }) }; } pub type Scintilla = AMember<AControl<AScintilla<CocoaScintilla>>>; const BASE_CLASS: &str = "ScintillaView"; #[repr(C)] pub struct CocoaScintilla { base: CocoaControlBase<Scintilla>, fn_ptr: Option<extern "C" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void>, self_ptr: Option<*mut c_void>, } impl<O: crate::Scintilla> NewScintillaInner<O> for CocoaScintilla { fn with_uninit(u: &mut mem::MaybeUninit<O>) -> Self { let sc = Self { base: CocoaControlBase::with_params(*WINDOW_CLASS, set_frame_size_inner::<O>), fn_ptr: None, self_ptr: None, }; unsafe { let selfptr = u as *mut _ as *mut ::std::os::raw::c_void; (&mut *sc.base.control).set_ivar(IVAR, selfptr); } sc } } impl ScintillaInner for CocoaScintilla { fn new() -> Box<dyn crate::Scintilla> { let mut b: Box<mem::MaybeUninit<Scintilla>> = Box::new_uninit(); let ab = AMember::with_inner( AControl::with_inner( AScintilla::with_inner( <Self as NewScintillaInner<Scintilla>>::with_uninit(b.as_mut()), ) ), ); unsafe { b.as_mut_ptr().write(ab); b.assume_init() } } fn set_margin_width(&mut self, index: usize, width: isize) { if let Some(fn_ptr) = self.fn_ptr { (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETMARGINWIDTHN as i32, index as c_ulong, width as c_long); } } fn set_readonly(&mut self, readonly: bool) { if let Some(fn_ptr) = self.fn_ptr { (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETREADONLY as i32, if readonly { 1 } else { 0 }, 0); } } fn is_readonly(&self) -> bool { if let Some(fn_ptr) = self.fn_ptr { !(fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETREADONLY as i32, 0, 0).is_null() } else { true } } fn set_codepage(&mut self, cp: crate::Codepage) { if let Some(fn_ptr) = self.fn_ptr { ((fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETCODEPAGE as i32, cp as c_ulong, 0) as isize); } } fn codepage(&self) -> crate::Codepage { if let Some(fn_ptr) = self.fn_ptr { ((fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETCODEPAGE as i32, 0, 0) as isize).into() } else { Default::default() } } fn append_text(&mut self, text: &str) { self.set_codepage(crate::Codepage::Utf8); if let Some(fn_ptr) = self.fn_ptr { let len = text.len(); let tptr = text.as_bytes().as_ptr(); (fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_APPENDTEXT as i32, len as c_ulong, tptr as c_long); } } } impl ControlInner for CocoaScintilla { fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &dyn controls::Container, _x: i32, _y: i32, pw: u16, ph: u16) { unsafe { use scintilla_sys::{SCI_GETDIRECTFUNCTION, SCI_GETDIRECTPOINTER}; let fn_ptr: extern "C" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTFUNCTION wParam:0 lParam:0]; let self_ptr: *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTPOINTER wParam:0 lParam:0]; self.fn_ptr = Some(fn_ptr); self.self_ptr = Some(self_ptr); } self.measure(member, control, pw, ph); } fn on_removed_from_container(&mut self, _: &mut MemberBase, _: &mut ControlBase, _: &dyn controls::Container) { self.fn_ptr = None; self.self_ptr = None; unsafe { self.base.on_removed_from_container(); } } fn parent(&self) -> Option<&dyn controls::Member> { self.base.parent() } fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> { self.base.parent_mut() } fn root(&self) -> Option<&dyn controls::Member> { self.base.root() } fn root_mut(&mut self) -> Option<&mut dyn controls::Member> { self.base.root_mut() } #[cfg(feature = "markup")] fn fill_from_markup(&mut self, member: &mut MemberBase, control: &mut ControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) { fill_from_markup_base!(self, base, markup, registry, Scintilla, ["Scintilla"]); } } impl HasLayoutInner for CocoaScintilla { fn on_layout_changed(&mut self, _: &mut MemberBase) { self.base.invalidate(); } } impl Drawable for CocoaScintilla { fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) { self.base.draw(control.coords, control.measured); } fn measure(&mut self, _: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) { let old_size = control.measured; control.measured = match control.visibility { types::Visibility::Gone => (0, 0), _ => { let w = match control.layout.width { layout::Size::MatchParent => parent_width, layout::Size::Exact(w) => w, layout::Size::WrapContent => { 42 as u16 } }; let h = match control.layout.height { layout::Size::MatchParent => parent_height, layout::Size::Exact(h) => h, layout::Size::WrapContent => { 42 as u16 } }; (w, h) } }; (control.measured.0, control.measured.1, control.measured != old_size) } fn invalidate(&mut self, _: &mut MemberBase, _: &mut ControlBase) { self.base.invalidate(); } } impl HasNativeIdInner for CocoaScintilla { type Id = CocoaId; fn native_id(&self) -> Self::Id { self.base.control.into() } } impl HasSizeInner for CocoaScintilla { fn on_size_set(&mut self, _: &mut MemberBase, _: (u16, u16)) -> bool { self.base.invalidate(); true } } impl Spawnable for CocoaScintilla { fn spawn() -> Box<dyn controls::Control> { Self::new().into_control() } } impl HasVisibilityInner for CocoaScintilla { fn on_visibility_set(&mut self, _base: &mut MemberBase, value: types::Visibility) -> bool { self.base.on_set_visibility(value) } } impl MemberInner for CocoaScintilla {} extern "C" fn set_frame_size(this: &mut Object, sel: Sel, param: NSSize) { unsafe { let b = member_from_cocoa_id_mut::<Scintilla>(this).unwrap(); let b2 = member_from_cocoa_id_mut::<Scintilla>(this).unwrap(); (b.inner().inner().inner().base.resize_handler)(b2, sel, param) } } extern "C" fn set_frame_size_inner<O: crate::Scintilla>(this: &mut Scintilla, _: Sel, param: NSSize) { unsafe { let () = msg_send![super(this.inner_mut().inner_mut().inner_mut().base.control, Class::get(BASE_CLASS).unwrap()), setFrameSize: param]; this.call_on_size::<O>(param.width as u16, param.height as u16) } }
use crate::sdk::*; use plygui_cocoa::common::*; use std::os::raw::{c_int, c_long, c_ulong, c_void}; lazy_static! { static ref WINDOW_CLASS: RefClass = unsafe { register_window_class("PlyguiConsole", BASE_CLASS, |decl| { decl.add_method(sel!(setFrameSize:), set_frame_size as extern "C" fn(&mut Object, Sel, NSSize)); }) }; } pub type Scintilla = AMember<AControl<AScintilla<CocoaScintilla>>>; const BASE_CLASS: &str = "ScintillaView"; #[repr(C)] pub struct CocoaScintilla { base: CocoaControlBase<Scintilla>, fn_ptr: Option<extern "C" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void>, self_ptr: Option<*mut c_void>, } impl<O: crate::Scintilla> NewScintillaInner<O> for CocoaScintilla { fn with_uninit(u: &mut mem::MaybeUninit<O>) -> Self { let sc = Self { base: CocoaControlBase::with_params(*WINDOW_CLASS, set_frame_size_inner::<O>), fn_ptr: None, self_ptr: None, }; unsafe { let selfptr = u as *mut _ as *mut ::std::os::raw::c_void; (&mut *sc.base.control).set_ivar(IVAR, selfptr); } sc } } impl ScintillaInner for CocoaScintilla { fn new() -> Box<dyn crate::Scintilla> { let mut b: Box<mem::MaybeUninit<Scintilla>> = Box::new_uninit(); let ab = AMember::with_inner( AControl::with_inner( AScintilla::with_inner( <Self as NewScintillaInner<Scintilla>>::with_uninit(b.as_mut()), ) ), ); unsafe { b.as_mut_ptr().write(ab); b.assume_init() } } fn set_margin_width(&mut self, index: usize, width: isize) { if let Some(fn_ptr) = self.fn_ptr { (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETMARGINWIDTHN as i32, index as c_ulong, width as c_long); } } fn set_readonly(&mut self, readonly: bool) { if let Some(fn_ptr) = self.fn_ptr { (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETREADONLY as i32, if readonly { 1 } else { 0 }, 0); } } fn is_readonly(&self) -> bool { if let Some(fn_ptr) = self.fn_ptr { !(fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETREADONLY as i32, 0, 0).is_null() } else { true } } fn set_codepage(&mut self, cp: crate::Codepage) { if let Some(fn_ptr) = self.fn_ptr { ((fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETCODEPAGE as i32, cp as c_ulong, 0) as isize); } } fn codepage(&self) -> crate::Codepage { if let Some(fn_ptr) = self.fn_ptr { ((
e.on_set_visibility(value) } } impl MemberInner for CocoaScintilla {} extern "C" fn set_frame_size(this: &mut Object, sel: Sel, param: NSSize) { unsafe { let b = member_from_cocoa_id_mut::<Scintilla>(this).unwrap(); let b2 = member_from_cocoa_id_mut::<Scintilla>(this).unwrap(); (b.inner().inner().inner().base.resize_handler)(b2, sel, param) } } extern "C" fn set_frame_size_inner<O: crate::Scintilla>(this: &mut Scintilla, _: Sel, param: NSSize) { unsafe { let () = msg_send![super(this.inner_mut().inner_mut().inner_mut().base.control, Class::get(BASE_CLASS).unwrap()), setFrameSize: param]; this.call_on_size::<O>(param.width as u16, param.height as u16) } }
fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETCODEPAGE as i32, 0, 0) as isize).into() } else { Default::default() } } fn append_text(&mut self, text: &str) { self.set_codepage(crate::Codepage::Utf8); if let Some(fn_ptr) = self.fn_ptr { let len = text.len(); let tptr = text.as_bytes().as_ptr(); (fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_APPENDTEXT as i32, len as c_ulong, tptr as c_long); } } } impl ControlInner for CocoaScintilla { fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &dyn controls::Container, _x: i32, _y: i32, pw: u16, ph: u16) { unsafe { use scintilla_sys::{SCI_GETDIRECTFUNCTION, SCI_GETDIRECTPOINTER}; let fn_ptr: extern "C" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTFUNCTION wParam:0 lParam:0]; let self_ptr: *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTPOINTER wParam:0 lParam:0]; self.fn_ptr = Some(fn_ptr); self.self_ptr = Some(self_ptr); } self.measure(member, control, pw, ph); } fn on_removed_from_container(&mut self, _: &mut MemberBase, _: &mut ControlBase, _: &dyn controls::Container) { self.fn_ptr = None; self.self_ptr = None; unsafe { self.base.on_removed_from_container(); } } fn parent(&self) -> Option<&dyn controls::Member> { self.base.parent() } fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> { self.base.parent_mut() } fn root(&self) -> Option<&dyn controls::Member> { self.base.root() } fn root_mut(&mut self) -> Option<&mut dyn controls::Member> { self.base.root_mut() } #[cfg(feature = "markup")] fn fill_from_markup(&mut self, member: &mut MemberBase, control: &mut ControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) { fill_from_markup_base!(self, base, markup, registry, Scintilla, ["Scintilla"]); } } impl HasLayoutInner for CocoaScintilla { fn on_layout_changed(&mut self, _: &mut MemberBase) { self.base.invalidate(); } } impl Drawable for CocoaScintilla { fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) { self.base.draw(control.coords, control.measured); } fn measure(&mut self, _: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) { let old_size = control.measured; control.measured = match control.visibility { types::Visibility::Gone => (0, 0), _ => { let w = match control.layout.width { layout::Size::MatchParent => parent_width, layout::Size::Exact(w) => w, layout::Size::WrapContent => { 42 as u16 } }; let h = match control.layout.height { layout::Size::MatchParent => parent_height, layout::Size::Exact(h) => h, layout::Size::WrapContent => { 42 as u16 } }; (w, h) } }; (control.measured.0, control.measured.1, control.measured != old_size) } fn invalidate(&mut self, _: &mut MemberBase, _: &mut ControlBase) { self.base.invalidate(); } } impl HasNativeIdInner for CocoaScintilla { type Id = CocoaId; fn native_id(&self) -> Self::Id { self.base.control.into() } } impl HasSizeInner for CocoaScintilla { fn on_size_set(&mut self, _: &mut MemberBase, _: (u16, u16)) -> bool { self.base.invalidate(); true } } impl Spawnable for CocoaScintilla { fn spawn() -> Box<dyn controls::Control> { Self::new().into_control() } } impl HasVisibilityInner for CocoaScintilla { fn on_visibility_set(&mut self, _base: &mut MemberBase, value: types::Visibility) -> bool { self.bas
random
[ { "content": "fn event_handler<O: crate::Scintilla>(object: &mut QObject, event: &mut QEvent) -> bool {\n\n match unsafe { event.type_() } {\n\n QEventType::Resize => {\n\n if let Some(this) = cast_qobject_to_uimember_mut::<Scintilla>(object) {\n\n let size = unsafe { \n\n let size = Ptr::from_raw(event).static_downcast::<QResizeEvent>();\n\n let size = (\n\n \tutils::coord_to_size(size.size().width()), \n\n \tutils::coord_to_size(size.size().height())\n\n );\n\n this.inner_mut().base.measured = size;\n\n if let layout::Size::WrapContent = this.inner_mut().base.layout.width {\n\n this.inner_mut().inner_mut().inner_mut().base.widget.set_minimum_width(size.0 as i32); \n\n this.inner_mut().inner_mut().inner_mut().base.widget.set_maximum_width(size.0 as i32); \n\n }\n\n if let layout::Size::WrapContent = this.inner_mut().base.layout.height {\n\n this.inner_mut().inner_mut().inner_mut().base.widget.set_minimum_height(size.1 as i32); \n\n this.inner_mut().inner_mut().inner_mut().base.widget.set_maximum_height(size.1 as i32); \n\n }\n\n size\n\n };\n\n this.call_on_size::<O>(size.0, size.1);\n\n }\n\n }\n\n _ => {}\n\n }\n\n false\n\n}\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 0, "score": 172654.50349905546 }, { "content": "fn event_handler(object: &mut QObject, event: &mut QEvent) -> bool {\n\n unsafe {\n\n let ptr = object.property(PROPERTY.as_ptr() as *const i8).to_u_long_long();\n\n if ptr != 0 {\n\n let sc: &mut Scintilla = mem::transmute(ptr);\n\n match event.type_() {\n\n QEventType::Resize => {\n\n if sc.as_inner().as_inner().base.dirty {\n\n use plygui_api::controls::HasSize;\n\n\n\n sc.as_inner_mut().as_inner_mut().base.dirty = false;\n\n let (width, height) = sc.size();\n\n sc.call_on_size(width, height);\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n false\n\n }\n\n}\n\ndefault_impls_as!(Scintilla);\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 1, "score": 169462.44001247396 }, { "content": "fn event_handler(object: &mut QObject, event: &mut QEvent) -> bool {\n\n unsafe {\n\n let ptr = object.property(PROPERTY.as_ptr() as *const i8).to_u_long_long();\n\n if ptr != 0 {\n\n let sc: &mut Console = mem::transmute(ptr);\n\n match event.type_() {\n\n QEventType::Resize => {\n\n use plygui_api::controls::HasSize;\n\n\n\n let (width, height) = sc.size();\n\n sc.call_on_size(width, height);\n\n }\n\n _ => {}\n\n }\n\n }\n\n false\n\n }\n\n}\n\ndefault_impls_as!(Console);\n", "file_path": "src/legacy/console/lib_qt.rs", "rank": 2, "score": 150811.5653757036 }, { "content": "fn on_notify(_this: &GtkScintilla, _msg: i32, _notification: Ptr, _data: Ptr) {\n\n //let mut b = this.clone().upcast::<Widget>();\n\n //let notification = unsafe { &*(notification as *const SCNotification) };\n\n}\n\n\n\ndefault_impls_as!(Scintilla);\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 3, "score": 91740.69051794476 }, { "content": "fn on_notify(_this: &GtkScintillaSys, _msg: i32, _notification: Ptr, _data: Ptr) {\n\n //let mut b = this.clone().upcast::<Widget>();\n\n //let notification = unsafe { &*(notification as *const SCNotification) };\n\n}\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 4, "score": 90063.97892690083 }, { "content": "fn on_notify(_this: &GtkScintilla, _msg: i32, _notification: Ptr, _data: Ptr) {\n\n //let mut b = this.clone().upcast::<Widget>();\n\n //let notification = unsafe { &*(notification as *const SCNotification) };\n\n}\n", "file_path": "src/legacy/console/lib_gtk.rs", "rank": 5, "score": 84098.86441532067 }, { "content": "fn on_size_allocate<O: crate::Scintilla>(this: &::plygui_gtk::gtk::Widget, _allo: &::plygui_gtk::gtk::Rectangle) {\n\n use plygui_api::controls::HasSize;\n\n\n\n let mut ll = this.clone().upcast::<Widget>();\n\n let ll = cast_gtk_widget_to_member_mut::<Scintilla>(&mut ll).unwrap();\n\n\n\n let measured_size = ll.size();\n\n ll.call_on_size::<O>(measured_size.0 as u16, measured_size.1 as u16);\n\n}\n\n\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 6, "score": 61509.43301327384 }, { "content": "fn on_size_allocate(this: &::plygui_gtk::gtk::Widget, _allo: &::plygui_gtk::gtk::Rectangle) {\n\n use plygui_api::controls::HasSize;\n\n\n\n let mut ll = this.clone().upcast::<Widget>();\n\n let ll = cast_gtk_widget_to_member_mut::<Scintilla>(&mut ll).unwrap();\n\n\n\n let measured_size = ll.size();\n\n ll.call_on_size(measured_size.0 as u16, measured_size.1 as u16);\n\n}\n\n\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 7, "score": 48986.59404607074 }, { "content": "#[cfg(all(target_os = \"macos\", feature = \"cocoa_\"))]\n\ntype ConsoleNative = super::lib_cocoa::ConsoleCocoa;\n", "file_path": "src/legacy/console/common.rs", "rank": 8, "score": 38529.88103949837 }, { "content": "#[cfg(feature = \"qt5\")]\n\ntype ConsoleNative = super::lib_qt::ConsoleQt;\n", "file_path": "src/legacy/console/common.rs", "rank": 9, "score": 38529.88103949837 }, { "content": "#[cfg(all(target_os = \"windows\", feature = \"win32\"))]\n\ntype ConsoleNative = super::lib_win32::ConsoleWin32;\n", "file_path": "src/legacy/console/common.rs", "rank": 10, "score": 38529.88103949837 }, { "content": "#[cfg(feature = \"gtk3\")]\n\ntype ConsoleNative = super::lib_gtk::ConsoleGtk;\n\n\n\nuse std::sync::mpsc;\n\nuse std::{io, process, thread};\n\n\n\npub type Console = Member<Control<ConsoleImpl>>;\n\n\n\nconst NO_CONSOLE_NAME: &str = \"Plygui Unnamed Console\";\n\n\n", "file_path": "src/legacy/console/common.rs", "rank": 11, "score": 38529.88103949837 }, { "content": "fn on_size_allocate(this: &::plygui_gtk::gtk::Widget, _allo: &::plygui_gtk::gtk::Rectangle) {\n\n let mut ll = this.clone().upcast::<Widget>();\n\n if let Some(ll) = cast_gtk_widget_to_member_mut::<Console>(&mut ll) {\n\n use plygui_api::controls::HasSize;\n\n\n\n let measured_size = ll.size();\n\n ll.call_on_size(measured_size.0 as u16, measured_size.1 as u16);\n\n }\n\n}\n\n\n", "file_path": "src/legacy/console/lib_gtk.rs", "rank": 12, "score": 30335.71940930034 }, { "content": " fn set_codepage(&mut self, cp: super::Codepage);\n\n fn codepage(&self) -> super::Codepage;\n\n fn append_text(&mut self, text: &str);\n\n }\n\n constructor: {\n\n fn new() -> Box<dyn Scintilla>;\n\n }\n\n }\n\n}\n\nimpl<II: ScintillaInner, T: HasInner<I = II> + Abstract + 'static> ScintillaInner for T {\n\n default fn new() -> Box<dyn Scintilla> {\n\n <<Self as HasInner>::I as ScintillaInner>::new()\n\n }\n\n default fn set_margin_width(&mut self, index: usize, width: isize) {\n\n self.inner_mut().set_margin_width(index, width)\n\n }\n\n default fn set_readonly(&mut self, readonly: bool) {\n\n self.inner_mut().set_readonly(readonly)\n\n }\n\n default fn is_readonly(&self) -> bool {\n", "file_path": "src/api/scintilla.rs", "rank": 13, "score": 27878.20352877389 }, { "content": "use plygui_api::{\n\n controls::{Member, Control},\n\n sdk::{AControl, ControlInner, HasInner, AMember, Abstract},\n\n};\n\n\n\ndefine! {\n\n Scintilla: Control {\n\n outer: {\n\n fn set_margin_width(&mut self, index: usize, width: isize);\n\n fn set_readonly(&mut self, readonly: bool);\n\n fn is_readonly(&self) -> bool;\n\n /*fn set_codepage(&mut self, cp: Codepage); // if we manipulate UTF8 only, do we need this in public?\n\n fn codepage(&self) -> Codepage;*/\n\n \n\n fn append_text(&mut self, text: &str);\n\n }\n\n inner: {\n\n fn set_margin_width(&mut self, index: usize, width: isize);\n\n fn set_readonly(&mut self, readonly: bool);\n\n fn is_readonly(&self) -> bool;\n", "file_path": "src/api/scintilla.rs", "rank": 14, "score": 27873.315650139983 }, { "content": " self.inner().is_readonly()\n\n }\n\n default fn set_codepage(&mut self, cp: super::Codepage) {\n\n self.inner_mut().set_codepage(cp)\n\n }\n\n default fn codepage(&self) -> super::Codepage {\n\n self.inner().codepage()\n\n }\n\n default fn append_text(&mut self, text: &str) {\n\n self.inner_mut().append_text(text)\n\n }\n\n}\n\nimpl<T: ScintillaInner> Scintilla for AMember<AControl<AScintilla<T>>> {\n\n default fn set_margin_width(&mut self, index: usize, width: isize) {\n\n self.inner_mut().inner_mut().inner_mut().set_margin_width(index, width)\n\n }\n\n default fn set_readonly(&mut self, readonly: bool) {\n\n self.inner_mut().inner_mut().inner_mut().set_readonly(readonly)\n\n }\n\n default fn is_readonly(&self) -> bool {\n", "file_path": "src/api/scintilla.rs", "rank": 15, "score": 27870.99316959154 }, { "content": " self.inner().inner().inner().is_readonly()\n\n }\n\n default fn append_text(&mut self, text: &str) {\n\n self.inner_mut().inner_mut().inner_mut().append_text(text)\n\n }\n\n default fn as_scintilla(& self) -> & dyn Scintilla { self } \n\n default fn as_scintilla_mut (& mut self) -> & mut dyn Scintilla { self } \n\n default fn into_scintilla (self : Box < Self >) -> Box < dyn Scintilla > { self }\n\n}\n\nimpl<T: ScintillaInner> NewScintilla for AMember<AControl<AScintilla<T>>> {\n\n fn new() -> Box<dyn Scintilla> {\n\n T::new()\n\n }\n\n}\n", "file_path": "src/api/scintilla.rs", "rank": 16, "score": 27860.8431721833 }, { "content": "#[cfg(all(target_os = \"windows\", feature = \"win32\"))]\n\npub(crate) mod mod_win32;\n\n#[cfg(all(target_os = \"windows\", feature = \"win32\"))]\n\npub use mod_win32::{Scintilla, WindowsScintilla as ScintillaControl};\n\n\n\n\n\n#[cfg(all(target_os = \"macos\", feature = \"cocoa_\"))]\n\npub(crate) mod mod_cocoa;\n\n#[cfg(all(target_os = \"macos\", feature = \"cocoa_\"))]\n\npub use mod_cocoa::{Scintilla, CocoaScintilla as ScintillaControl};\n\n\n\n#[cfg(feature = \"qt5\")]\n\npub(crate) mod mod_qt;\n\n#[cfg(feature = \"qt5\")]\n\npub use mod_qt::{Scintilla, QtScintilla as ScintillaControl};\n\n\n\n\n\n#[cfg(feature = \"gtk3\")]\n\npub(crate) mod mod_gtk;\n\n#[cfg(feature = \"gtk3\")]\n\npub use mod_gtk::{Scintilla, GtkScintilla as ScintillaControl};\n", "file_path": "src/imp/scintilla/mod.rs", "rank": 17, "score": 26836.097601002693 }, { "content": "use super::*;\n\n\n\n#[cfg(all(target_os = \"windows\", feature = \"win32\"))]\n\npub(crate) mod lib_win32;\n\n#[cfg(all(target_os = \"windows\", feature = \"win32\"))]\n\npub use self::lib_win32::Scintilla;\n\n\n\n#[cfg(all(target_os = \"macos\", feature = \"cocoa_\"))]\n\npub(crate) mod lib_cocoa;\n\n#[cfg(all(target_os = \"macos\", feature = \"cocoa_\"))]\n\npub use self::lib_cocoa::Scintilla;\n\n\n\n#[cfg(feature = \"qt5\")]\n\npub(crate) mod lib_qt;\n\n#[cfg(feature = \"qt5\")]\n\npub use self::lib_qt::Scintilla;\n\n\n\n#[cfg(feature = \"gtk3\")]\n\npub(crate) mod lib_gtk;\n\n#[cfg(feature = \"gtk3\")]\n\npub use self::lib_gtk::Scintilla;\n", "file_path": "src/legacy/scintilla/mod.rs", "rank": 18, "score": 26835.937403625474 }, { "content": "use super::development as scintilla_dev;\n\nuse super::*;\n\n\n\nuse plygui_qt::common::*;\n\nuse scintilla_sys::*;\n\n\n\npub type Scintilla = Member<Control<ScintillaQt>>;\n\n\n\n#[repr(C)]\n\npub struct ScintillaQt {\n\n base: QtControlBase<Scintilla, ScintillaEditBase>,\n\n h_command: (bool, SlotSCNotificationPtr<'static>),\n\n}\n\n\n\nimpl scintilla_dev::ScintillaInner for ScintillaQt {\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n unsafe {\n\n let _ = self.base.widget.as_mut().send(SCI_SETMARGINWIDTHN as u32, index, width);\n\n }\n\n }\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 19, "score": 25917.905762363127 }, { "content": "use super::development as scintilla_dev;\n\nuse super::*;\n\n\n\nuse plygui_cocoa::common::*;\n\n\n\nuse std::os::raw::{c_int, c_long, c_ulong, c_void};\n\n\n\nlazy_static! {\n\n static ref WINDOW_CLASS: RefClass = unsafe {\n\n register_window_class(\"PlyguiConsole\", BASE_CLASS, |decl| {\n\n decl.add_method(sel!(setFrameSize:), set_frame_size as extern \"C\" fn(&mut Object, Sel, NSSize));\n\n })\n\n };\n\n}\n\n\n\npub type Scintilla = Member<Control<ScintillaCocoa>>;\n\n\n\nconst BASE_CLASS: &str = \"ScintillaView\";\n\n\n\n#[repr(C)]\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 21, "score": 25915.773016725147 }, { "content": "\t b.assume_init()\n\n }\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n unsafe {\n\n let _ = self.base.widget.send(SCI_SETMARGINWIDTHN as u32, index, width);\n\n }\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n unsafe {\n\n let _ = self.base.widget.send(SCI_SETREADONLY as u32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n }\n\n fn is_readonly(&self) -> bool {\n\n unsafe { self.base.widget.send(SCI_GETREADONLY, 0, 0) as usize == 1 }\n\n }\n\n fn set_codepage(&mut self, cp: crate::Codepage) {\n\n unsafe {\n\n let _ = self.base.widget.send(SCI_SETCODEPAGE, cp as usize, 0);\n\n }\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 22, "score": 25911.505076218167 }, { "content": " )\n\n ),\n\n );\n\n unsafe {\n\n\t b.as_mut_ptr().write(ab);\n\n\t b.assume_init()\n\n }\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_SETMARGINWIDTHN as u32, index as u64, width as i64);\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_SETREADONLY as u32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n fn is_readonly(&self) -> bool {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_GETREADONLY as u32, 0, 0) == 1\n\n }\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 23, "score": 25911.266637218552 }, { "content": "impl ScintillaInner for WindowsScintilla {\n\n fn new() -> Box<dyn crate::Scintilla> { \n\n let mut b: Box<mem::MaybeUninit<Scintilla>> = Box::new_uninit();\n\n let ab = AMember::with_inner(\n\n AControl::with_inner(\n\n AScintilla::with_inner(\n\n <Self as NewScintillaInner<Scintilla>>::with_uninit(b.as_mut()),\n\n )\n\n ),\n\n );\n\n unsafe {\n\n\t b.as_mut_ptr().write(ab);\n\n\t b.assume_init()\n\n }\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_SETMARGINWIDTHN as i32, index as c_ulong, width as c_long);\n\n }\n\n }\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 25, "score": 25911.064952505403 }, { "content": "use crate::sdk::*;\n\n\n\nuse plygui_qt::common::*;\n\nuse scintilla_sys::*;\n\n\n\npub type Scintilla = AMember<AControl<AScintilla<QtScintilla>>>;\n\n\n\n#[repr(C)]\n\npub struct QtScintilla {\n\n base: QtControlBase<Scintilla, QScintillaEditBase>,\n\n h_command: (bool, QBox<SlotOfSCNotification>),\n\n}\n\n\n\nimpl<O: crate::Scintilla> NewScintillaInner<O> for QtScintilla {\n\n fn with_uninit(u: &mut mem::MaybeUninit<O>) -> Self {\n\n let sc = Self {\n\n base: QtControlBase::with_params( QScintillaEditBase::new(), event_handler::<O>),\n\n h_command: (false, unsafe { SlotOfSCNotification::new(NullPtr, move |_| {}) }),\n\n };\n\n unsafe {\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 26, "score": 25910.891996260147 }, { "content": " MemberFunctions::new(_as_any, _as_any_mut, _as_member, _as_member_mut),\n\n ));\n\n b\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_SETMARGINWIDTHN as i32, index as c_ulong, width as c_long);\n\n }\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_SETREADONLY as i32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n }\n\n fn is_readonly(&self) -> bool {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n !(fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_GETREADONLY as i32, 0, 0).is_null()\n\n } else {\n\n true\n\n }\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 28, "score": 25910.356602672433 }, { "content": "\n\n unsafe {\n\n let selfptr = b.as_mut() as *mut _ as *mut ::std::os::raw::c_void;\n\n (&mut *b.as_inner_mut().as_inner_mut().base.control).set_ivar(IVAR, selfptr);\n\n }\n\n b\n\n }\n\n\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETMARGINWIDTHN as i32, index as c_ulong, width as c_long);\n\n }\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETREADONLY as i32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n }\n\n fn is_readonly(&self) -> bool {\n\n if let Some(fn_ptr) = self.fn_ptr {\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 29, "score": 25909.409958819237 }, { "content": " fn set_margin_width(&mut self, index: usize, width: isize) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_SETMARGINWIDTHN as u32, index as u64, width as i64);\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_SETREADONLY as u32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n fn is_readonly(&self) -> bool {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_GETREADONLY as u32, 0, 0) == 1\n\n }\n\n fn set_codepage(&mut self, cp: super::Codepage) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_SETCODEPAGE as u32, cp as isize as u64, 0);\n\n }\n\n fn codepage(&self) -> super::Codepage {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n (widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_GETCODEPAGE as u32, 0, 0) as isize).into()\n\n }\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 30, "score": 25909.199979837744 }, { "content": "use crate::sdk::*;\n\n\n\nuse plygui_gtk::common::*;\n\n\n\nuse scintilla_sys::{self, Ptr, /*SCNotification,*/ Scintilla as GtkScintillaSys, ScintillaExt};\n\n\n\nuse std::str;\n\n\n\npub type Scintilla = AMember<AControl<AScintilla<GtkScintilla>>>;\n\n\n\n#[repr(C)]\n\npub struct GtkScintilla {\n\n base: GtkControlBase<Scintilla>,\n\n}\n\n\n\nimpl<O: crate::Scintilla> NewScintillaInner<O> for GtkScintilla {\n\n fn with_uninit(u: &mut mem::MaybeUninit<O>) -> Self {\n\n let mut sc = Self {\n\n base: GtkControlBase::with_gtk_widget(GtkScintillaSys::new().upcast::<Widget>()),\n\n };\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 31, "score": 25909.172225506576 }, { "content": " }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n unsafe {\n\n let _ = self.base.widget.as_mut().send(SCI_SETREADONLY as u32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n }\n\n fn is_readonly(&self) -> bool {\n\n unsafe { self.base.widget.as_ref().send(SCI_GETREADONLY, 0, 0) as usize == 1 }\n\n }\n\n fn set_codepage(&mut self, cp: super::Codepage) {\n\n unsafe {\n\n let _ = self.base.widget.as_mut().send(SCI_SETCODEPAGE, cp as usize, 0);\n\n }\n\n }\n\n fn codepage(&self) -> super::Codepage {\n\n unsafe { (self.base.widget.as_ref().send(SCI_GETCODEPAGE, 0, 0) as isize).into() }\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(super::Codepage::Utf8);\n\n let len = text.len();\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 32, "score": 25909.097631796503 }, { "content": " let ptr = u as *const _ as u64;\n\n let qo: &QObject = &sc.base.widget.static_upcast();\n\n qo.set_property(PROPERTY.as_ptr() as *const i8, &QVariant::from_u64(ptr));\n\n sc.base.widget.notify().connect(&sc.h_command.1);\n\n }\n\n sc\n\n }\n\n}\n\nimpl ScintillaInner for QtScintilla {\n\n fn new() -> Box<dyn crate::Scintilla> { \n\n let mut b: Box<mem::MaybeUninit<Scintilla>> = Box::new_uninit();\n\n let ab = AMember::with_inner(\n\n AControl::with_inner(\n\n AScintilla::with_inner(\n\n <Self as NewScintillaInner<Scintilla>>::with_uninit(b.as_mut()),\n\n )\n\n ),\n\n );\n\n unsafe {\n\n\t b.as_mut_ptr().write(ab);\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 33, "score": 25908.174674028913 }, { "content": "use super::development as scintilla_dev;\n\n\n\nuse plygui_win32::common::*;\n\nuse scintilla_sys::{Scintilla_RegisterClasses, Scintilla_ReleaseResources};\n\n\n\nuse std::os::raw::{c_int, c_long, c_ulong, c_void as r_void};\n\nuse std::sync::atomic::Ordering;\n\n\n\nlazy_static! {\n\n pub static ref WINDOW_CLASS: Vec<u16> = OsStr::new(\"Scintilla\").encode_wide().chain(Some(0).into_iter()).collect::<Vec<_>>();\n\n}\n\n\n\npub type Scintilla = Member<Control<ScintillaWin32>>;\n\n\n\n#[repr(C)]\n\npub struct ScintillaWin32 {\n\n base: WindowsControlBase<Scintilla>,\n\n\n\n fn_ptr: Option<extern \"C\" fn(*mut r_void, c_int, c_ulong, c_long) -> *mut r_void>,\n\n self_ptr: Option<*mut r_void>,\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 34, "score": 25907.67205881035 }, { "content": "use crate::sdk::*;\n\n\n\nuse plygui_win32::common::*;\n\nuse scintilla_sys::{Scintilla_RegisterClasses, Scintilla_ReleaseResources};\n\n\n\nuse std::os::raw::{c_int, c_long, c_ulong, c_void as r_void};\n\nuse std::sync::atomic::Ordering;\n\nuse std::sync::atomic::AtomicUsize;\n\n\n\nstatic GLOBAL_COUNT: AtomicUsize = AtomicUsize::new(0);\n\n\n\nlazy_static! {\n\n pub static ref WINDOW_CLASS: Vec<u16> = OsStr::new(\"Scintilla\").encode_wide().chain(Some(0).into_iter()).collect::<Vec<_>>();\n\n}\n\n\n\npub type Scintilla = AMember<AControl<AScintilla<WindowsScintilla>>>;\n\n\n\n#[repr(C)]\n\npub struct WindowsScintilla {\n\n base: WindowsControlBase<Scintilla>,\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 35, "score": 25907.647734596452 }, { "content": " unsafe fn native_id(&self) -> Self::Id {\n\n self.base.widget.clone().into()\n\n }\n\n}\n\n\n\nimpl HasSizeInner for ScintillaGtk {\n\n fn on_size_set(&mut self, _: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n self.base.widget().set_size_request(width as i32, height as i32);\n\n true\n\n }\n\n}\n\n\n\nimpl HasVisibilityInner for ScintillaGtk {\n\n fn on_visibility_set(&mut self, _: &mut MemberBase, _: types::Visibility) -> bool {\n\n self.base.invalidate()\n\n }\n\n}\n\n\n\nimpl MemberInner for ScintillaGtk {}\n\n\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 37, "score": 25907.480304638364 }, { "content": "//use super::development as scintilla_dev;\n\n\n\nuse plygui_gtk::common::*;\n\n\n\nuse scintilla_sys::{self, Ptr, /*SCNotification,*/ Scintilla as GtkScintilla, ScintillaExt};\n\n\n\nuse std::str;\n\n\n\npub type Scintilla = Member<Control<ScintillaGtk>>;\n\n\n\n#[repr(C)]\n\npub struct ScintillaGtk {\n\n base: GtkControlBase<Scintilla>,\n\n}\n\n\n\nimpl super::development::ScintillaInner for ScintillaGtk {\n\n fn new() -> Box<super::Scintilla> {\n\n let sc = GtkScintilla::new();\n\n let mut sc = Box::new(Member::with_inner(\n\n Control::with_inner(\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 38, "score": 25907.13034034516 }, { "content": " this.set_layout_width(layout::Size::Exact(height));\n\n self.base.invalidate();\n\n true\n\n }\n\n}\n\n\n\nimpl HasVisibilityInner for ScintillaCocoa {\n\n fn on_visibility_set(&mut self, _base: &mut MemberBase, value: types::Visibility) -> bool {\n\n self.base.on_set_visibility(value)\n\n }\n\n}\n\n\n\nimpl MemberInner for ScintillaCocoa {}\n\n\n\nextern \"C\" fn set_frame_size(this: &mut Object, _: Sel, param: NSSize) {\n\n unsafe {\n\n let sp = member_from_cocoa_id_mut::<Scintilla>(this).unwrap();\n\n let () = msg_send![super(sp.as_inner_mut().as_inner_mut().base.control, Class::get(BASE_CLASS).unwrap()), setFrameSize: param];\n\n sp.call_on_size(param.width as u16, param.height as u16);\n\n }\n\n}\n\ndefault_impls_as!(Scintilla);\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 39, "score": 25907.030423347627 }, { "content": " fn new() -> Box<super::Scintilla> {\n\n let sc = ScintillaEditBase::new();\n\n let mut sc = Box::new(Member::with_inner(\n\n Control::with_inner(\n\n ScintillaQt {\n\n base: QtControlBase::with_params(sc, event_handler),\n\n h_command: (false, SlotSCNotificationPtr::new(move |_| {})),\n\n },\n\n (),\n\n ),\n\n MemberFunctions::new(_as_any, _as_any_mut, _as_member, _as_member_mut),\n\n ));\n\n unsafe {\n\n use plygui_qt::qt_core::cpp_utils::StaticCast;\n\n let ptr = sc.as_ref() as *const _ as u64;\n\n let qo: &mut QObject = sc.as_inner_mut().as_inner_mut().base.widget.static_cast_mut();\n\n qo.set_property(PROPERTY.as_ptr() as *const i8, &QVariant::new0(ptr));\n\n }\n\n sc.as_inner().as_inner().base.widget.signals().notify().connect(&sc.as_inner().as_inner().h_command.1);\n\n sc\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 40, "score": 25905.450210674564 }, { "content": " {\n\n let ptr = u as *mut _ as *mut c_void;\n\n sc.base.set_pointer(ptr);\n\n }\n\n {\n\n let sc: Object = Object::from(sc.base.widget.clone()).into();\n\n let sc = sc.downcast::<GtkScintillaSys>().unwrap();\n\n sc.connect_notify(on_notify);\n\n }\n\n Object::from(sc.base.widget.clone()).downcast::<Widget>().unwrap().connect_size_allocate(on_size_allocate::<O>);\n\n sc\n\n }\n\n}\n\nimpl ScintillaInner for GtkScintilla {\n\n fn new() -> Box<dyn crate::Scintilla> { \n\n let mut b: Box<mem::MaybeUninit<Scintilla>> = Box::new_uninit();\n\n let ab = AMember::with_inner(\n\n AControl::with_inner(\n\n AScintilla::with_inner(\n\n <Self as NewScintillaInner<Scintilla>>::with_uninit(b.as_mut()),\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 41, "score": 25904.830254809458 }, { "content": " use super::NewScintilla;\n\n\n\n Scintilla::new().into_control()\n\n}\n\n\n\nunsafe extern \"system\" fn handler(hwnd: windef::HWND, msg: minwindef::UINT, wparam: minwindef::WPARAM, lparam: minwindef::LPARAM, _: usize, param: usize) -> isize {\n\n let sc: &mut Scintilla = mem::transmute(param);\n\n let ww = winuser::GetWindowLongPtrW(hwnd, winuser::GWLP_USERDATA);\n\n if ww == 0 {\n\n winuser::SetWindowLongPtrW(hwnd, winuser::GWLP_USERDATA, param as isize);\n\n }\n\n match msg {\n\n winuser::WM_SIZE => {\n\n let width = lparam as u16;\n\n let height = (lparam >> 16) as u16;\n\n\n\n sc.call_on_size(width, height);\n\n }\n\n _ => {}\n\n }\n\n commctrl::DefSubclassProc(hwnd, msg, wparam, lparam)\n\n}\n\n\n\ndefault_impls_as!(Scintilla);\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 42, "score": 25904.715000241205 }, { "content": "\n\n fn_ptr: Option<extern \"C\" fn(*mut r_void, c_int, c_ulong, c_long) -> *mut r_void>,\n\n self_ptr: Option<*mut r_void>,\n\n}\n\nimpl<O: crate::Scintilla> NewScintillaInner<O> for WindowsScintilla {\n\n fn with_uninit(_: &mut mem::MaybeUninit<O>) -> Self {\n\n if GLOBAL_COUNT.fetch_add(1, Ordering::SeqCst) < 1 {\n\n unsafe {\n\n if Scintilla_RegisterClasses(hinstance() as *mut r_void) == 0 {\n\n panic!(\"Cannot register Scintilla Win32 class\");\n\n }\n\n }\n\n }\n\n\t\tSelf {\n\n base: WindowsControlBase::with_handler(Some(handler::<O>)),\n\n fn_ptr: None,\n\n self_ptr: None,\n\n }\n\n }\n\n}\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 43, "score": 25904.579633861773 }, { "content": "pub struct ScintillaCocoa {\n\n base: CocoaControlBase<Scintilla>,\n\n\n\n fn_ptr: Option<extern \"C\" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void>,\n\n self_ptr: Option<*mut c_void>,\n\n}\n\n\n\nimpl scintilla_dev::ScintillaInner for ScintillaCocoa {\n\n fn new() -> Box<super::Scintilla> {\n\n let mut b = Box::new(Member::with_inner(\n\n Control::with_inner(\n\n ScintillaCocoa {\n\n base: CocoaControlBase::with_params(*WINDOW_CLASS),\n\n fn_ptr: None,\n\n self_ptr: None,\n\n },\n\n (),\n\n ),\n\n MemberFunctions::new(_as_any, _as_any_mut, _as_member, _as_member_mut),\n\n ));\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 44, "score": 25903.980172972348 }, { "content": " fn root_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.root_mut().map(|m| m.as_member_mut())\n\n }\n\n}\n\n\n\nimpl HasNativeIdInner for GtkScintilla {\n\n type Id = GtkWidget;\n\n\n\n fn native_id(&self) -> Self::Id {\n\n self.base.widget.clone().into()\n\n }\n\n}\n\n\n\nimpl HasSizeInner for GtkScintilla {\n\n fn on_size_set(&mut self, _: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n self.base.widget().set_size_request(width as i32, height as i32);\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 45, "score": 25903.688185481602 }, { "content": " self.base.invalidate();\n\n }\n\n }\n\n}\n\n\n\nimpl HasNativeIdInner for ScintillaWin32 {\n\n type Id = Hwnd;\n\n\n\n unsafe fn native_id(&self) -> Self::Id {\n\n self.base.hwnd.into()\n\n }\n\n}\n\n\n\nimpl HasSizeInner for ScintillaWin32 {\n\n fn on_size_set(&mut self, base: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n use plygui_api::controls::HasLayout;\n\n\n\n let this = base.as_any_mut().downcast_mut::<Scintilla>().unwrap();\n\n this.set_layout_width(layout::Size::Exact(width));\n\n this.set_layout_width(layout::Size::Exact(height));\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 47, "score": 25903.310021183934 }, { "content": " }\n\n fn invalidate(&mut self, _: &mut MemberBase, _: &mut ControlBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\nimpl HasNativeIdInner for ScintillaCocoa {\n\n type Id = CocoaId;\n\n\n\n unsafe fn native_id(&self) -> Self::Id {\n\n self.base.control.into()\n\n }\n\n}\n\n\n\nimpl HasSizeInner for ScintillaCocoa {\n\n fn on_size_set(&mut self, base: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n use plygui_api::controls::HasLayout;\n\n\n\n let this = base.as_any_mut().downcast_mut::<Scintilla>().unwrap();\n\n this.set_layout_width(layout::Size::Exact(width));\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 48, "score": 25903.30426815796 }, { "content": " fn set_readonly(&mut self, readonly: bool) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n (fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_SETREADONLY as i32, if readonly { 1 } else { 0 }, 0);\n\n }\n\n }\n\n fn is_readonly(&self) -> bool {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n !(fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_GETREADONLY as i32, 0, 0).is_null()\n\n } else {\n\n true\n\n }\n\n }\n\n fn set_codepage(&mut self, cp: crate::Codepage) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_SETCODEPAGE as i32, cp as c_ulong, 0) as isize);\n\n }\n\n }\n\n fn codepage(&self) -> crate::Codepage {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_GETCODEPAGE as i32, 0, 0) as isize).into()\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 49, "score": 25903.097021577763 }, { "content": " fn on_size_set(&mut self, _: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n unsafe { self.base.widget.set_fixed_size_2a(width as i32, height as i32); }\n\n true\n\n }\n\n}\n\nimpl MemberInner for QtScintilla {}\n\n\n\nimpl Drawable for QtScintilla {\n\n fn draw(&mut self, member: &mut MemberBase, control: &mut ControlBase) {\n\n self.base.draw(member, control);\n\n }\n\n fn measure(&mut self, _member: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => parent_width as i32,\n\n layout::Size::Exact(w) => w as i32,\n\n layout::Size::WrapContent => 42, // TODO min size\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 50, "score": 25902.782764098607 }, { "content": " }\n\n}\n\nimpl HasVisibilityInner for ScintillaQt {\n\n fn on_visibility_set(&mut self, _: &mut MemberBase, value: types::Visibility) -> bool {\n\n self.base.set_visibility(value);\n\n self.base.invalidate()\n\n }\n\n}\n\nimpl HasSizeInner for ScintillaQt {\n\n fn on_size_set(&mut self, _: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n self.base.widget.set_fixed_size((width as i32, height as i32));\n\n true\n\n }\n\n}\n\nimpl MemberInner for ScintillaQt {}\n\n\n\nimpl Drawable for ScintillaQt {\n\n fn draw(&mut self, member: &mut MemberBase, control: &mut ControlBase) {\n\n self.base.draw(member, control);\n\n }\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 51, "score": 25902.735571822166 }, { "content": " }\n\n}\n\n\n\nimpl HasSizeInner for WindowsScintilla {\n\n fn on_size_set(&mut self, base: &mut MemberBase, (width, height): (u16, u16)) -> bool {\n\n use plygui_api::controls::HasLayout;\n\n\n\n let this = base.as_any_mut().downcast_mut::<Scintilla>().unwrap();\n\n this.set_layout_width(layout::Size::Exact(width));\n\n this.set_layout_width(layout::Size::Exact(height));\n\n self.base.invalidate();\n\n true\n\n }\n\n}\n\nimpl HasVisibilityInner for WindowsScintilla {\n\n fn on_visibility_set(&mut self, base: &mut MemberBase, visibility: types::Visibility) -> bool {\n\n let hwnd = self.base.hwnd;\n\n if !hwnd.is_null() {\n\n unsafe {\n\n winuser::ShowWindow(self.base.hwnd, if visibility == types::Visibility::Visible { winuser::SW_SHOW } else { winuser::SW_HIDE });\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 52, "score": 25902.664147846583 }, { "content": " self.base.dirty = false;\n\n self.draw(member, control);\n\n }\n\n fn on_removed_from_container(&mut self, _member: &mut MemberBase, _control: &mut ControlBase, _: &dyn controls::Container) {}\n\n}\n\n\n\nimpl HasNativeIdInner for QtScintilla {\n\n type Id = QtId;\n\n\n\n fn native_id(&self) -> Self::Id {\n\n QtId::from(unsafe { self.base.widget.static_upcast::<QObject>().as_raw_ptr() } as *mut QObject)\n\n }\n\n}\n\nimpl HasVisibilityInner for QtScintilla {\n\n fn on_visibility_set(&mut self, _: &mut MemberBase, value: types::Visibility) -> bool {\n\n self.base.set_visibility(value);\n\n self.base.invalidate()\n\n }\n\n}\n\nimpl HasSizeInner for QtScintilla {\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 53, "score": 25902.290208623206 }, { "content": " }\n\n self.on_layout_changed(base);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl MemberInner for WindowsScintilla {}\n\n\n\nimpl Drawable for WindowsScintilla {\n\n fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) {\n\n if let Some((x, y)) = control.coords {\n\n unsafe {\n\n winuser::SetWindowPos(self.base.hwnd, ptr::null_mut(), x, y, control.measured.0 as i32, control.measured.1 as i32, 0);\n\n }\n\n }\n\n }\n\n fn measure(&mut self, _member: &mut MemberBase, control: &mut ControlBase, w: u16, h: u16) -> (u16, u16, bool) {\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 55, "score": 25900.788709810524 }, { "content": " fn set_codepage(&mut self, cp: crate::Codepage) {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_SETCODEPAGE as u32, cp as isize as u64, 0);\n\n }\n\n fn codepage(&self) -> crate::Codepage {\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n (widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_GETCODEPAGE as u32, 0, 0) as isize).into()\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(crate::Codepage::Utf8);\n\n let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintillaSys>().unwrap().send_message(scintilla_sys::SCI_APPENDTEXT as u32, len as u64, tptr as i64);\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for GtkScintilla {\n\n fn on_layout_changed(&mut self, _: &mut MemberBase) {\n\n self.base.invalidate();\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 57, "score": 25900.078256551842 }, { "content": " ScintillaGtk {\n\n base: GtkControlBase::with_gtk_widget(sc.upcast::<Widget>()),\n\n },\n\n (),\n\n ),\n\n MemberFunctions::new(_as_any, _as_any_mut, _as_member, _as_member_mut),\n\n ));\n\n\n\n {\n\n let ptr = sc.as_ref() as *const _ as *mut c_void;\n\n sc.as_inner_mut().as_inner_mut().base.set_pointer(ptr);\n\n }\n\n {\n\n let sc: Object = Object::from(sc.as_inner_mut().as_inner_mut().base.widget.clone()).into();\n\n let sc = sc.downcast::<GtkScintilla>().unwrap();\n\n sc.connect_notify(on_notify);\n\n }\n\n Object::from(sc.as_inner_mut().as_inner_mut().base.widget.clone()).downcast::<Widget>().unwrap().connect_size_allocate(on_size_allocate);\n\n sc\n\n }\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 58, "score": 25900.041841678965 }, { "content": " fn root(&self) -> Option<&controls::Member> {\n\n self.base.root()\n\n }\n\n fn root_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.root_mut()\n\n }\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n control.coords = Some((x, y));\n\n self.measure(member, control, pw, ph);\n\n self.base.dirty = false;\n\n self.draw(member, control);\n\n }\n\n fn on_removed_from_container(&mut self, _member: &mut MemberBase, _control: &mut ControlBase, _: &controls::Container) {}\n\n}\n\n\n\nimpl HasNativeIdInner for ScintillaQt {\n\n type Id = QtId;\n\n\n\n unsafe fn native_id(&self) -> Self::Id {\n\n QtId::from(self.base.widget.static_cast() as *const QObject as *mut QObject)\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 59, "score": 25899.975051669757 }, { "content": " };\n\n (control.measured.0, control.measured.1, control.measured != old_size)\n\n }\n\n fn invalidate(&mut self, _member: &mut MemberBase, _control: &mut ControlBase) {\n\n self.base.invalidate()\n\n }\n\n}\n\n\n\nunsafe extern \"system\" fn handler<T: crate::Scintilla>(hwnd: windef::HWND, msg: minwindef::UINT, wparam: minwindef::WPARAM, lparam: minwindef::LPARAM, _: usize, param: usize) -> isize {\n\n let sc: &mut Scintilla = mem::transmute(param);\n\n let ww = winuser::GetWindowLongPtrW(hwnd, winuser::GWLP_USERDATA);\n\n if ww == 0 {\n\n winuser::SetWindowLongPtrW(hwnd, winuser::GWLP_USERDATA, param as WinPtr);\n\n }\n\n match msg {\n\n winuser::WM_SIZE => {\n\n let width = lparam as u16;\n\n let height = (lparam >> 16) as u16;\n\n\n\n sc.call_on_size::<T>(width, height); \n\n }\n\n _ => {}\n\n }\n\n commctrl::DefSubclassProc(hwnd, msg, wparam, lparam)\n\n}\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 60, "score": 25899.52134849521 }, { "content": " let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n (fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_APPENDTEXT as i32, len as c_ulong, tptr as c_long);\n\n }\n\n }\n\n}\n\n\n\nimpl ControlInner for ScintillaCocoa {\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &controls::Container, _x: i32, _y: i32, pw: u16, ph: u16) {\n\n unsafe {\n\n use scintilla_sys::{SCI_GETDIRECTFUNCTION, SCI_GETDIRECTPOINTER};\n\n\n\n let fn_ptr: extern \"C\" fn(*mut c_void, c_int, c_ulong, c_long) -> *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTFUNCTION wParam:0 lParam:0];\n\n let self_ptr: *mut c_void = msg_send![self.base.control, message:SCI_GETDIRECTPOINTER wParam:0 lParam:0];\n\n\n\n self.fn_ptr = Some(fn_ptr);\n\n self.self_ptr = Some(self_ptr);\n\n }\n\n self.measure(member, control, pw, ph);\n\n }\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 61, "score": 25899.39485018811 }, { "content": " }\n\n fn codepage(&self) -> crate::Codepage {\n\n unsafe { (self.base.widget.send(SCI_GETCODEPAGE, 0, 0) as isize).into() }\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(crate::Codepage::Utf8);\n\n let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n unsafe {\n\n self.base.widget.send(SCI_APPENDTEXT, len, tptr as isize);\n\n }\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for QtScintilla {\n\n fn on_layout_changed(&mut self, _base: &mut MemberBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\nimpl Spawnable for QtScintilla {\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 62, "score": 25899.117363947036 }, { "content": " !(fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETREADONLY as i32, 0, 0).is_null()\n\n } else {\n\n true\n\n }\n\n }\n\n fn set_codepage(&mut self, cp: Codepage) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_SETCODEPAGE as i32, cp as c_ulong, 0) as isize);\n\n }\n\n }\n\n fn codepage(&self) -> super::Codepage {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), scintilla_sys::SCI_GETCODEPAGE as i32, 0, 0) as isize).into()\n\n } else {\n\n Default::default()\n\n }\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(super::Codepage::Utf8);\n\n if let Some(fn_ptr) = self.fn_ptr {\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 63, "score": 25899.092215083452 }, { "content": "}\n\n\n\nimpl scintilla_dev::ScintillaInner for ScintillaWin32 {\n\n fn new() -> Box<Scintilla> {\n\n if scintilla_dev::GLOBAL_COUNT.fetch_add(1, Ordering::SeqCst) < 1 {\n\n unsafe {\n\n if Scintilla_RegisterClasses(hinstance() as *mut r_void) == 0 {\n\n panic!(\"Cannot register Scintilla Win32 class\");\n\n }\n\n }\n\n }\n\n let b: Box<Scintilla> = Box::new(Member::with_inner(\n\n Control::with_inner(\n\n ScintillaWin32 {\n\n base: WindowsControlBase::new(),\n\n fn_ptr: None,\n\n self_ptr: None,\n\n },\n\n (),\n\n ),\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 64, "score": 25898.801566908587 }, { "content": " self.base.invalidate();\n\n true\n\n }\n\n}\n\nimpl HasVisibilityInner for ScintillaWin32 {\n\n fn on_visibility_set(&mut self, base: &mut MemberBase, visibility: types::Visibility) -> bool {\n\n let hwnd = self.base.hwnd;\n\n if !hwnd.is_null() {\n\n unsafe {\n\n winuser::ShowWindow(self.base.hwnd, if visibility == types::Visibility::Visible { winuser::SW_SHOW } else { winuser::SW_HIDE });\n\n }\n\n self.on_layout_changed(base);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl MemberInner for ScintillaWin32 {}\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 65, "score": 25898.722191898454 }, { "content": " layout::Size::Exact(h) => h,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_height\n\n }\n\n };\n\n (cmp::max(0, w as i32 + lm + rm) as u16, cmp::max(0, h as i32 + tm + bm) as u16)\n\n }\n\n };\n\n (control.measured.0, control.measured.1, control.measured != old_size)\n\n }\n\n fn invalidate(&mut self, _: &mut MemberBase, _: &mut ControlBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub(crate) fn spawn() -> Box<controls::Control> {\n\n use crate::NewScintilla;\n\n\n\n Scintilla::new().into_control()\n\n}\n\n\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 66, "score": 25898.09248982017 }, { "content": "\n\nimpl Drawable for ScintillaWin32 {\n\n fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) {\n\n if let Some((x, y)) = control.coords {\n\n unsafe {\n\n winuser::SetWindowPos(self.base.hwnd, ptr::null_mut(), x, y, control.measured.0 as i32, control.measured.1 as i32, 0);\n\n }\n\n }\n\n }\n\n fn measure(&mut self, _member: &mut MemberBase, control: &mut ControlBase, w: u16, h: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => w,\n\n layout::Size::Exact(w) => w,\n\n layout::Size::WrapContent => {\n\n defaults::THE_ULTIMATE_ANSWER_TO_EVERYTHING // TODO min_width\n\n }\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 67, "score": 25897.98443617758 }, { "content": " fn append_text(&mut self, text: &str) {\n\n self.set_codepage(super::Codepage::Utf8);\n\n let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n let widget: Object = Object::from(self.base.widget.clone()).into();\n\n widget.downcast::<GtkScintilla>().unwrap().send_message(scintilla_sys::SCI_APPENDTEXT as u32, len as u64, tptr as i64);\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for ScintillaGtk {\n\n fn on_layout_changed(&mut self, _: &mut MemberBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\nimpl ControlInner for ScintillaGtk {\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n self.measure(member, control, pw, ph);\n\n control.coords = Some((x, y));\n\n self.draw(member, control);\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 68, "score": 25897.803460791594 }, { "content": " }\n\n fn invalidate(&mut self, _member: &mut MemberBase, _control: &mut ControlBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub(crate) fn spawn() -> Box<controls::Control> {\n\n use super::NewScintilla;\n\n\n\n Scintilla::new().into_control()\n\n}\n\n\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 69, "score": 25897.780268890652 }, { "content": " layout::Size::MatchParent => w,\n\n layout::Size::Exact(w) => w,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_width\n\n }\n\n };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => h,\n\n layout::Size::Exact(h) => h,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_height\n\n }\n\n };\n\n (cmp::max(0, w as i32 + lm + rm) as u16, cmp::max(0, h as i32 + tm + bm) as u16)\n\n }\n\n };\n\n (control.measured.0, control.measured.1, control.measured != old_size)\n\n }\n\n fn invalidate(&mut self, _: &mut MemberBase, _: &mut ControlBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\nimpl Spawnable for GtkScintilla {\n\n fn spawn() -> Box<dyn controls::Control> {\n\n Self::new().into_control()\n\n }\n\n}\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 70, "score": 25896.637144037173 }, { "content": "impl HasVisibilityInner for GtkScintilla {\n\n fn on_visibility_set(&mut self, _: &mut MemberBase, _: types::Visibility) -> bool {\n\n self.base.invalidate()\n\n }\n\n}\n\n\n\nimpl MemberInner for GtkScintilla {}\n\n\n\nimpl Drawable for GtkScintilla {\n\n fn draw(&mut self, _: &mut MemberBase, control: &mut ControlBase) {\n\n self.base.draw(control);\n\n }\n\n fn measure(&mut self, _: &mut MemberBase, control: &mut ControlBase, w: u16, h: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n let (lm, tm, rm, bm) = self.base.margins().into();\n\n\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 71, "score": 25896.47852828409 }, { "content": " } else {\n\n Default::default()\n\n }\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(crate::Codepage::Utf8);\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n (fn_ptr)(self.self_ptr.unwrap(), crate::scintilla_sys::SCI_APPENDTEXT as i32, len as c_ulong, tptr as c_long);\n\n }\n\n }\n\n}\n\n\n\nimpl Spawnable for WindowsScintilla {\n\n fn spawn() -> Box<dyn controls::Control> {\n\n Self::new().into_control()\n\n }\n\n}\n\nimpl ControlInner for WindowsScintilla {\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 72, "score": 25896.336070419413 }, { "content": " let tptr = text.as_bytes().as_ptr();\n\n unsafe {\n\n self.base.widget.as_mut().send(SCI_APPENDTEXT, len, tptr as isize);\n\n }\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for ScintillaQt {\n\n fn on_layout_changed(&mut self, _base: &mut MemberBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\nimpl ControlInner for ScintillaQt {\n\n fn parent(&self) -> Option<&controls::Member> {\n\n self.base.parent()\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.parent_mut()\n\n }\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 73, "score": 25896.176813324786 }, { "content": " fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, parent: &dyn controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n let selfptr = member as *mut _ as *mut c_void;\n\n self.base.hwnd = unsafe { parent.native_id() as windef::HWND }; // required for measure, as we don't have own hwnd yet\n\n let (w, h, _) = self.measure(member, control, pw, ph);\n\n self.base.create_control_hwnd(x as i32, y as i32, w as i32, h as i32, unsafe { parent.native_id() as windef::HWND }, 0, WINDOW_CLASS.as_ptr(), \"\", winuser::BS_PUSHBUTTON | winuser::WS_TABSTOP, selfptr);\n\n \n\n unsafe {\n\n self.fn_ptr = Some(mem::transmute(winuser::SendMessageW(self.base.hwnd, crate::scintilla_sys::SCI_GETDIRECTFUNCTION, 0, 0)));\n\n self.self_ptr = Some(winuser::SendMessageW(self.base.hwnd, crate::scintilla_sys::SCI_GETDIRECTPOINTER, 0, 0) as *mut r_void);\n\n }\n\n }\n\n fn on_removed_from_container(&mut self, _member: &mut MemberBase, _control: &mut ControlBase, _: &dyn controls::Container) {\n\n self.base.destroy_control_hwnd();\n\n self.fn_ptr = None;\n\n self.self_ptr = None;\n\n }\n\n\n\n fn parent(&self) -> Option<&dyn controls::Member> {\n\n self.base.parent().map(|p| p.as_member())\n\n }\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 74, "score": 25896.158807537042 }, { "content": " fn spawn() -> Box<dyn controls::Control> {\n\n Self::new().into_control()\n\n }\n\n}\n\nimpl ControlInner for QtScintilla {\n\n fn parent(&self) -> Option<&dyn controls::Member> {\n\n self.base.parent()\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.parent_mut()\n\n }\n\n fn root(&self) -> Option<&dyn controls::Member> {\n\n self.base.root()\n\n }\n\n fn root_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.root_mut()\n\n }\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &dyn controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n control.coords = Some((x, y));\n\n self.measure(member, control, pw, ph);\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 75, "score": 25895.88206003166 }, { "content": " }\n\n fn set_codepage(&mut self, cp: super::Codepage) {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_SETCODEPAGE as i32, cp as c_ulong, 0) as isize);\n\n }\n\n }\n\n fn codepage(&self) -> super::Codepage {\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n ((fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_GETCODEPAGE as i32, 0, 0) as isize).into()\n\n } else {\n\n Default::default()\n\n }\n\n }\n\n fn append_text(&mut self, text: &str) {\n\n self.set_codepage(super::Codepage::Utf8);\n\n if let Some(fn_ptr) = self.fn_ptr {\n\n let len = text.len();\n\n let tptr = text.as_bytes().as_ptr();\n\n (fn_ptr)(self.self_ptr.unwrap(), super::scintilla_sys::SCI_APPENDTEXT as i32, len as c_ulong, tptr as c_long);\n\n }\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 76, "score": 25895.835130568776 }, { "content": " }\n\n}\n\n\n\nimpl ControlInner for ScintillaWin32 {\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, parent: &dyn controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n let selfptr = member as *mut _ as *mut c_void;\n\n let (hwnd, id) = unsafe {\n\n self.base.hwnd = parent.native_id() as windef::HWND; // required for measure, as we don't have own hwnd yet\n\n let (w, h, _) = self.measure(member, control, pw, ph);\n\n create_control_hwnd(x as i32, y as i32, w as i32, h as i32, parent.native_id() as windef::HWND, 0, WINDOW_CLASS.as_ptr(), \"\", winuser::BS_PUSHBUTTON | winuser::WS_TABSTOP, selfptr, Some(handler))\n\n };\n\n self.base.hwnd = hwnd;\n\n self.base.subclass_id = id;\n\n\n\n unsafe {\n\n self.fn_ptr = Some(mem::transmute(winuser::SendMessageW(self.base.hwnd, super::scintilla_sys::SCI_GETDIRECTFUNCTION, 0, 0)));\n\n self.self_ptr = Some(winuser::SendMessageW(self.base.hwnd, super::scintilla_sys::SCI_GETDIRECTPOINTER, 0, 0) as *mut r_void);\n\n }\n\n }\n\n fn on_removed_from_container(&mut self, _member: &mut MemberBase, _control: &mut ControlBase, _: &dyn controls::Container) {\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 77, "score": 25895.106657062497 }, { "content": "\n\n #[cfg(feature = \"markup\")]\n\n fn fill_from_markup(&mut self, member: &mut MemberBase, control: &mut ControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) {\n\n fill_from_markup_base!(self, base, markup, registry, Scintilla, [\"Scintilla\"]);\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for ScintillaCocoa {\n\n fn on_layout_changed(&mut self, _: &mut MemberBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n\nimpl Drawable for ScintillaCocoa {\n\n fn draw(&mut self, _member: &mut MemberBase, control: &mut ControlBase) {\n\n self.base.draw(control.coords, control.measured);\n\n }\n\n fn measure(&mut self, member: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n control.measured = match control.visibility {\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 78, "score": 25894.65492414095 }, { "content": " Scintilla_ReleaseResources();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for WindowsScintilla {\n\n fn on_layout_changed(&mut self, _base: &mut MemberBase) {\n\n let hwnd = self.base.hwnd;\n\n if !hwnd.is_null() {\n\n self.base.invalidate();\n\n }\n\n }\n\n}\n\n\n\nimpl HasNativeIdInner for WindowsScintilla {\n\n type Id = Hwnd;\n\n\n\n fn native_id(&self) -> Self::Id {\n\n self.base.hwnd.into()\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 79, "score": 25894.27375991938 }, { "content": "impl Drawable for ScintillaGtk {\n\n fn draw(&mut self, _: &mut MemberBase, control: &mut ControlBase) {\n\n self.base.draw(control);\n\n }\n\n fn measure(&mut self, member: &mut MemberBase, control: &mut ControlBase, w: u16, h: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n let (lm, tm, rm, bm) = self.base.margins().into();\n\n\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => w,\n\n layout::Size::Exact(w) => w,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_width\n\n }\n\n };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => h,\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 80, "score": 25893.7950703734 }, { "content": " #[cfg(feature = \"markup\")]\n\n fn fill_from_markup(&mut self, base: &mut development::MemberControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) {\n\n fill_from_markup_base!(self, base, markup, registry, Scintilla, [\"Scintilla\"]);\n\n }\n\n}\n\n\n\nimpl Drop for ScintillaWin32 {\n\n fn drop(&mut self) {\n\n if scintilla_dev::GLOBAL_COUNT.fetch_sub(1, Ordering::SeqCst) < 1 {\n\n unsafe {\n\n Scintilla_ReleaseResources();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl HasLayoutInner for ScintillaWin32 {\n\n fn on_layout_changed(&mut self, _base: &mut MemberBase) {\n\n let hwnd = self.base.hwnd;\n\n if !hwnd.is_null() {\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 82, "score": 25893.714216650023 }, { "content": " fn measure(&mut self, _member: &mut MemberBase, control: &mut ControlBase, parent_width: u16, parent_height: u16) -> (u16, u16, bool) {\n\n let old_size = control.measured;\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => parent_width as i32,\n\n layout::Size::Exact(w) => w as i32,\n\n layout::Size::WrapContent => 42, // TODO min size\n\n };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => parent_height as i32,\n\n layout::Size::Exact(h) => h as i32,\n\n layout::Size::WrapContent => 42, // TODO min size\n\n };\n\n (cmp::max(0, w) as u16, cmp::max(0, h) as u16)\n\n }\n\n };\n\n self.base.dirty = control.measured != old_size;\n\n (control.measured.0, control.measured.1, self.base.dirty)\n", "file_path": "src/legacy/scintilla/lib_qt.rs", "rank": 83, "score": 25892.931622028424 }, { "content": " fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.parent_mut().map(|p| p.as_member_mut())\n\n }\n\n fn root(&self) -> Option<&dyn controls::Member> {\n\n self.base.root().map(|p| p.as_member())\n\n }\n\n fn root_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.root_mut().map(|p| p.as_member_mut())\n\n }\n\n\n\n /*#[cfg(feature = \"markup\")]\n\n fn fill_from_markup(&mut self, base: &mut development::MemberControlBase, markup: &plygui_api::markup::Markup, registry: &mut plygui_api::markup::MarkupRegistry) {\n\n fill_from_markup_base!(self, base, markup, registry, Scintilla, [\"Scintilla\"]);\n\n }*/\n\n}\n\n\n\nimpl Drop for WindowsScintilla {\n\n fn drop(&mut self) {\n\n if GLOBAL_COUNT.fetch_sub(1, Ordering::SeqCst) < 1 {\n\n unsafe {\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 85, "score": 25892.196793252173 }, { "content": " }\n\n}\n\n\n\nimpl ControlInner for GtkScintilla {\n\n fn on_added_to_container(&mut self, member: &mut MemberBase, control: &mut ControlBase, _parent: &dyn controls::Container, x: i32, y: i32, pw: u16, ph: u16) {\n\n self.measure(member, control, pw, ph);\n\n control.coords = Some((x, y));\n\n self.draw(member, control);\n\n }\n\n fn on_removed_from_container(&mut self, _: &mut MemberBase, _: &mut ControlBase, _: &dyn controls::Container) {}\n\n\n\n fn parent(&self) -> Option<&dyn controls::Member> {\n\n self.base.parent().map(|m| m.as_member())\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.parent_mut().map(|m| m.as_member_mut())\n\n }\n\n fn root(&self) -> Option<&dyn controls::Member> {\n\n self.base.root().map(|m| m.as_member())\n\n }\n", "file_path": "src/imp/scintilla/mod_gtk.rs", "rank": 86, "score": 25892.044844909797 }, { "content": " fn on_removed_from_container(&mut self, _: &mut MemberBase, _: &mut ControlBase, _: &controls::Container) {\n\n self.fn_ptr = None;\n\n self.self_ptr = None;\n\n unsafe {\n\n self.base.on_removed_from_container();\n\n }\n\n }\n\n\n\n fn parent(&self) -> Option<&controls::Member> {\n\n self.base.parent()\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.parent_mut()\n\n }\n\n fn root(&self) -> Option<&controls::Member> {\n\n self.base.root()\n\n }\n\n fn root_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.root_mut()\n\n }\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 87, "score": 25891.89579741178 }, { "content": " };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => h,\n\n layout::Size::Exact(h) => h,\n\n layout::Size::WrapContent => {\n\n defaults::THE_ULTIMATE_ANSWER_TO_EVERYTHING // TODO min_height\n\n }\n\n };\n\n (cmp::max(0, w as i32) as u16, cmp::max(0, h as i32) as u16)\n\n }\n\n };\n\n (control.measured.0, control.measured.1, control.measured != old_size)\n\n }\n\n fn invalidate(&mut self, _member: &mut MemberBase, _control: &mut ControlBase) {\n\n self.base.invalidate()\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub(crate) fn spawn() -> Box<dyn controls::Control> {\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 90, "score": 25890.299798236887 }, { "content": " }\n\n fn on_removed_from_container(&mut self, _: &mut MemberBase, _: &mut ControlBase, _: &controls::Container) {}\n\n\n\n fn parent(&self) -> Option<&controls::Member> {\n\n self.base.parent().map(|m| m.as_member())\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.parent_mut().map(|m| m.as_member_mut())\n\n }\n\n fn root(&self) -> Option<&controls::Member> {\n\n self.base.root().map(|m| m.as_member())\n\n }\n\n fn root_mut(&mut self) -> Option<&mut controls::Member> {\n\n self.base.root_mut().map(|m| m.as_member_mut())\n\n }\n\n}\n\n\n\nimpl HasNativeIdInner for ScintillaGtk {\n\n type Id = GtkWidget;\n\n\n", "file_path": "src/legacy/scintilla/lib_gtk.rs", "rank": 91, "score": 25889.891779340818 }, { "content": " destroy_hwnd(self.base.hwnd, self.base.subclass_id, Some(handler));\n\n self.base.hwnd = 0 as windef::HWND;\n\n self.base.subclass_id = 0;\n\n self.fn_ptr = None;\n\n self.self_ptr = None;\n\n }\n\n\n\n fn parent(&self) -> Option<&dyn controls::Member> {\n\n self.base.parent().map(|p| p.as_member())\n\n }\n\n fn parent_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.parent_mut().map(|p| p.as_member_mut())\n\n }\n\n fn root(&self) -> Option<&dyn controls::Member> {\n\n self.base.root().map(|p| p.as_member())\n\n }\n\n fn root_mut(&mut self) -> Option<&mut dyn controls::Member> {\n\n self.base.root_mut().map(|p| p.as_member_mut())\n\n }\n\n\n", "file_path": "src/legacy/scintilla/lib_win32.rs", "rank": 92, "score": 25889.35899026594 }, { "content": " };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => parent_height as i32,\n\n layout::Size::Exact(h) => h as i32,\n\n layout::Size::WrapContent => 42, // TODO min size\n\n };\n\n (cmp::max(0, w) as u16, cmp::max(0, h) as u16)\n\n }\n\n };\n\n self.base.dirty = control.measured != old_size;\n\n (control.measured.0, control.measured.1, self.base.dirty)\n\n }\n\n fn invalidate(&mut self, _member: &mut MemberBase, _control: &mut ControlBase) {\n\n self.base.invalidate();\n\n }\n\n}\n\n\n", "file_path": "src/imp/scintilla/mod_qt.rs", "rank": 93, "score": 25888.733526105938 }, { "content": " let old_size = control.measured;\n\n control.measured = match control.visibility {\n\n types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => w,\n\n layout::Size::Exact(w) => w,\n\n layout::Size::WrapContent => {\n\n defaults::THE_ULTIMATE_ANSWER_TO_EVERYTHING // TODO min_width\n\n }\n\n };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => h,\n\n layout::Size::Exact(h) => h,\n\n layout::Size::WrapContent => {\n\n defaults::THE_ULTIMATE_ANSWER_TO_EVERYTHING // TODO min_height\n\n }\n\n };\n\n (cmp::max(0, w as i32) as u16, cmp::max(0, h as i32) as u16)\n\n }\n", "file_path": "src/imp/scintilla/mod_win32.rs", "rank": 94, "score": 25885.841979639754 }, { "content": " types::Visibility::Gone => (0, 0),\n\n _ => {\n\n let w = match control.layout.width {\n\n layout::Size::MatchParent => parent_width,\n\n layout::Size::Exact(w) => w,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_width\n\n }\n\n };\n\n let h = match control.layout.height {\n\n layout::Size::MatchParent => parent_height,\n\n layout::Size::Exact(h) => h,\n\n layout::Size::WrapContent => {\n\n 42 as u16 // TODO min_height\n\n }\n\n };\n\n (w, h)\n\n }\n\n };\n\n (control.measured.0, control.measured.1, control.measured != old_size)\n", "file_path": "src/legacy/scintilla/lib_cocoa.rs", "rank": 95, "score": 25883.506605315648 }, { "content": "use super::*;\n\n\n\nuse plygui_cocoa::common::*;\n\n\n\nuse std::os::raw::{c_int, c_long, c_ulong, c_void};\n\n\n\nlazy_static! {\n\n static ref WINDOW_CLASS: RefClass = unsafe {\n\n register_window_class(\"PlyguiScintilla\", BASE_CLASS, |decl| {\n\n decl.add_method(sel!(setFrameSize:), set_frame_size as extern \"C\" fn(&mut Object, Sel, NSSize));\n\n })\n\n };\n\n}\n\n\n\nconst BASE_CLASS: &str = \"ScintillaView\";\n\n\n\n#[repr(C)]\n\npub struct ConsoleCocoa {\n\n base: CocoaControlBase<Console>,\n\n\n", "file_path": "src/legacy/console/lib_cocoa.rs", "rank": 96, "score": 36.00210194208519 }, { "content": " }\n\n }\n\n fn set_label(&mut self, _: &mut MemberBase, label: Cow<str>) {\n\n match self.cmd {\n\n ConsoleThread::Idle(ref mut name) => *name = label.into(),\n\n ConsoleThread::Running(_, _) => {} // TODO warn\n\n }\n\n }\n\n}\n\nimpl ScintillaInner for ScintillaConsole {\n\n fn new() -> Box<dyn crate::Scintilla> {\n\n Self::with_path(\"\").into_scintilla()\t\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n\n self.inner.set_margin_width(index, width)\n\n }\n\n fn set_readonly(&mut self, readonly: bool) {\n\n self.inner.set_readonly(readonly)\n\n }\n\n fn is_readonly(&self) -> bool {\n", "file_path": "src/imp/console/mod.rs", "rank": 97, "score": 35.55745564021381 }, { "content": " default fn new() -> Box<dyn Scintilla> {\n\n <<Self as HasInner>::I as ScintillaInner>::new()\n\n }\n\n default fn set_margin_width(&mut self, index: usize, width: isize) {\n\n self.inner_mut().set_margin_width(index, width)\n\n }\n\n default fn set_readonly(&mut self, readonly: bool) {\n\n self.inner_mut().set_readonly(readonly)\n\n }\n\n default fn is_readonly(&self) -> bool {\n\n self.inner().is_readonly()\n\n }\n\n default fn set_codepage(&mut self, cp: super::Codepage) {\n\n self.inner_mut().set_codepage(cp)\n\n }\n\n default fn codepage(&self) -> super::Codepage {\n\n self.inner().codepage()\n\n }\n\n default fn append_text(&mut self, text: &str) {\n\n self.inner_mut().append_text(text)\n", "file_path": "src/api/code_editor.rs", "rank": 98, "score": 32.137544881844654 }, { "content": " AControl::with_inner(\n\n AScintilla::with_inner(\n\n ACodeEditor::with_inner(\n\n <Self as NewCodeEditorInner<CodeEditor>>::with_uninit(b.as_mut()),\n\n )\n\n )\n\n ),\n\n );\n\n ab.append_text(content.as_ref());\n\n\t\tunsafe {\n\n\t b.as_mut_ptr().write(ab);\n\n\t b.assume_init()\n\n }\n\n\t}\n\n}\n\nimpl ScintillaInner for ScintillaCodeEditor {\n\n fn new() -> Box<dyn crate::Scintilla> {\n\n Self::with_content(\"\").into_scintilla()\t\n\n }\n\n fn set_margin_width(&mut self, index: usize, width: isize) {\n", "file_path": "src/imp/code_editor/mod.rs", "rank": 99, "score": 31.94647795714429 } ]
Rust
core/http/src/unix.rs
pzmarzly/Rocket
a4677871796c7170cfbbf83d205a8758338762df
use std::io::{self, Read, Write}; use std::path::Path; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::time::Duration; #[cfg(unix)] use std::os::unix::net::{UnixListener, UnixStream}; #[cfg(windows)] use uds_windows::{UnixListener, UnixStream}; use crate::hyper; use crate::hyper::net::{NetworkStream, NetworkListener}; use crate::hyper::Server; pub struct UnixSocketStream(pub UnixStream); impl Clone for UnixSocketStream { #[inline] fn clone(&self) -> UnixSocketStream { UnixSocketStream(self.0.try_clone().unwrap()) } } impl NetworkStream for UnixSocketStream { #[inline] fn peer_addr(&mut self) -> io::Result<SocketAddr> { self.0.peer_addr() .map(|_| SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 0))) } #[inline] fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_read_timeout(dur) } #[inline] fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_write_timeout(dur) } } impl Read for UnixSocketStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.0.read(buf) } } impl Write for UnixSocketStream { #[inline] fn write(&mut self, msg: &[u8]) -> io::Result<usize> { self.0.write(msg) } #[inline] fn flush(&mut self) -> io::Result<()> { self.0.flush() } } #[derive(Debug)] pub struct UnixSocketListener(pub UnixListener); impl Clone for UnixSocketListener { #[inline] fn clone(&self) -> UnixSocketListener { UnixSocketListener(self.0.try_clone().unwrap()) } } impl UnixSocketListener { pub fn new<P: AsRef<Path>>(addr: P) -> hyper::Result<UnixSocketListener> { Ok(UnixSocketListener(UnixListener::bind(addr)?)) } } impl NetworkListener for UnixSocketListener { type Stream = UnixSocketStream; #[inline] fn accept(&mut self) -> hyper::Result<UnixSocketStream> { Ok(UnixSocketStream(self.0.accept()?.0)) } #[inline] fn local_addr(&mut self) -> io::Result<SocketAddr> { self.0.local_addr().map(|_| { SocketAddr::V4( SocketAddrV4::new( Ipv4Addr::new(0, 0, 0, 0), 0 ) ) }) } } pub struct UnixSocketServer; impl UnixSocketServer { pub fn http<P>(path: P) -> hyper::Result<Server<UnixSocketListener>> where P: AsRef<Path> { UnixSocketListener::new(path).map(Server::new) } } #[cfg(feature = "tls")] mod tls { use super::*; use crate::hyper::{self, net::SslServer}; use crate::tls::{TlsStream, ServerSession, TlsServer, WrappedStream}; use crate::unix::UnixSocketStream; pub type UnixHttpsStream = WrappedStream<ServerSession, UnixSocketStream>; impl UnixSocketServer { pub fn https<P, S>(path: P, ssl: S) -> hyper::Result<Server<HttpsListener<S>>> where P: AsRef<Path>, S: SslServer<UnixSocketStream> + Clone { HttpsListener::new(path, ssl).map(Server::new) } } #[derive(Clone)] pub struct HttpsListener<S: SslServer<UnixSocketStream>> { listener: UnixSocketListener, ssl: S, } impl<S: SslServer<UnixSocketStream>> HttpsListener<S> { pub fn new<P>(path: P, ssl: S) -> hyper::Result<HttpsListener<S>> where P: AsRef<Path> { UnixSocketListener::new(path) .map(|listener| HttpsListener { listener, ssl }) } } impl<S> NetworkListener for HttpsListener<S> where S: SslServer<UnixSocketStream> + Clone { type Stream = S::Stream; #[inline] fn accept(&mut self) -> hyper::Result<S::Stream> { self.listener.accept().and_then(|s| self.ssl.wrap_server(s)) } #[inline] fn local_addr(&mut self) -> io::Result<SocketAddr> { self.listener.local_addr() } fn set_read_timeout(&mut self, duration: Option<Duration>) { self.listener.set_read_timeout(duration) } fn set_write_timeout(&mut self, duration: Option<Duration>) { self.listener.set_write_timeout(duration) } } impl SslServer<UnixSocketStream> for TlsServer { type Stream = WrappedStream<ServerSession, UnixSocketStream>; fn wrap_server( &self, stream: UnixSocketStream ) -> hyper::Result<WrappedStream<ServerSession, UnixSocketStream>> { let tls = TlsStream::new(rustls::ServerSession::new(&self.cfg), stream); Ok(WrappedStream::new(tls)) } } } #[cfg(feature = "tls")] pub use self::tls::*;
use std::io::{self, Read, Write}; use std::path::Path; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::time::Duration; #[cfg(unix)] use std::os::unix::net::{UnixListener, UnixStream}; #[cfg(windows)] use uds_windows::{UnixListener, UnixStream}; use crate::hyper; use crate::hyper::net::{NetworkStream, NetworkListener}; use crate::hyper::Server; pub struct UnixSocketStream(pub UnixStream); impl Clone for UnixSocketStream { #[inline] fn clone(&self) -> UnixSocketStream { UnixSocketStream(self.0.try_clone().unwrap()) } } impl NetworkStream for UnixSocketStream { #[inline] fn peer_addr(&mut self) -> io::Result<SocketAddr> { self.0.peer_addr() .map(|_| SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 0))) } #[inline] fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_read_timeout(dur) } #[inline] fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_write_timeout(dur) } } impl Read for UnixSocketStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.0.read(buf) } } impl Write for UnixSocketStream { #[inline] fn write(&mut self, msg: &[u8]) -> io::Result<usize> { self.0.write(msg) } #[inline] fn flush(&mut self) -> io::Result<()> { self.0.flush() } } #[derive(Debug)] pub struct UnixSocketListener(pub UnixListener); impl Clone for UnixSocketListener { #[inline] fn clone(&self) -> UnixSocketListener { UnixSocketListener(self.0.try_clone().unwrap()) } } impl UnixSocketListener { pub fn new<P: AsRef<Path>>(addr: P) -> hyper::Result<UnixSocketListener> { Ok(UnixSocketListener(UnixListener::bind(addr)?)) } } impl NetworkListener for UnixSocketListener { type Stream = UnixSocketStream; #[inline] fn accept(&mut self) -> hyper::Result<UnixSocketStream> { Ok(UnixSocketStream(self.0.accept()?.0)) } #[inline] fn local_addr(&mut self) -> io::Result<SocketAddr> { self.0.local_addr().map(|_| { SocketAddr::V4( SocketAddrV4::new( Ipv4Addr::new(0, 0, 0, 0), 0 ) ) }) } } pub struct UnixSocketServer; impl UnixSocketServer { pub fn http<P>(path: P) -> hyper::Result<Server<UnixSocketListener>> where P: AsRef<Path> { UnixSocketListener::new(path).map(Server::new) } } #[cfg(feature = "tls")] mod tls { use super::*; use crate::hyper::{self, net::SslServer}; use crate::tls::{TlsStream, ServerSession, TlsServer, WrappedStream}; use crate::unix::UnixSocketStream; pub type UnixHttpsStream = WrappedStream<ServerSession, UnixSocketStream>; impl UnixSocketServer { pub fn https<P, S>(path: P, ssl: S) -> hyper::Result<Server<HttpsListener<S>>> where P: AsRef<Path>, S: SslServer<UnixSocketStream> + Clone { HttpsListener::new(path, ssl).map(Server::new) } } #[derive(Clone)] pub struct HttpsListener<S: SslServer<UnixSocketStream>> { listener: UnixSocketListener, ssl: S, } impl<S: SslServer<UnixSocketStream>> HttpsListener<S> {
} impl<S> NetworkListener for HttpsListener<S> where S: SslServer<UnixSocketStream> + Clone { type Stream = S::Stream; #[inline] fn accept(&mut self) -> hyper::Result<S::Stream> { self.listener.accept().and_then(|s| self.ssl.wrap_server(s)) } #[inline] fn local_addr(&mut self) -> io::Result<SocketAddr> { self.listener.local_addr() } fn set_read_timeout(&mut self, duration: Option<Duration>) { self.listener.set_read_timeout(duration) } fn set_write_timeout(&mut self, duration: Option<Duration>) { self.listener.set_write_timeout(duration) } } impl SslServer<UnixSocketStream> for TlsServer { type Stream = WrappedStream<ServerSession, UnixSocketStream>; fn wrap_server( &self, stream: UnixSocketStream ) -> hyper::Result<WrappedStream<ServerSession, UnixSocketStream>> { let tls = TlsStream::new(rustls::ServerSession::new(&self.cfg), stream); Ok(WrappedStream::new(tls)) } } } #[cfg(feature = "tls")] pub use self::tls::*;
pub fn new<P>(path: P, ssl: S) -> hyper::Result<HttpsListener<S>> where P: AsRef<Path> { UnixSocketListener::new(path) .map(|listener| HttpsListener { listener, ssl }) }
function_block-full_function
[ { "content": "pub fn kill_stream(stream: &mut BodyReader) {\n\n // Only do the expensive reading if we're not sure we're done.\n\n use self::HttpReader::*;\n\n match *stream {\n\n SizedReader(_, n) | ChunkedReader(_, Some(n)) if n > 0 => { /* continue */ },\n\n _ => return\n\n };\n\n\n\n // Take <= 1k from the stream. If there might be more data, force close.\n\n const FLUSH_LEN: u64 = 1024;\n\n match io::copy(&mut stream.take(FLUSH_LEN), &mut io::sink()) {\n\n Ok(FLUSH_LEN) | Err(_) => {\n\n warn_!(\"Data left unread. Force closing network stream.\");\n\n let (_, network) = stream.get_mut().get_mut();\n\n if let Err(e) = network.close(Shutdown::Read) {\n\n error_!(\"Failed to close network stream: {:?}\", e);\n\n }\n\n }\n\n Ok(n) => debug!(\"flushed {} unread bytes\", n)\n\n }\n\n}\n\n\n\nimpl Drop for DataStream {\n\n fn drop(&mut self) {\n\n kill_stream(&mut self.0.get_mut().1);\n\n }\n\n}\n", "file_path": "core/lib/src/data/data_stream.rs", "rank": 0, "score": 342380.32382423716 }, { "content": "#[parser]\n\npub fn media_type<'a>(input: &mut Input<'a>) -> Result<'a, MediaType> {\n\n let (top, sub, params) = {\n\n let top = (take_some_while_until(is_valid_token, '/')?, eat('/')?).0;\n\n let sub = take_some_while_until(is_valid_token, ';')?;\n\n let params = series(true, ';', is_whitespace, |i| {\n\n media_param(i).map(|(k, v)| (k.coerce_lifetime(), v.coerce_lifetime()))\n\n })?;\n\n\n\n (top.coerce_lifetime(), sub.coerce_lifetime(), params)\n\n };\n\n\n\n MediaType {\n\n source: Source::Custom(Cow::Owned(input.source().to_string())),\n\n top, sub, params\n\n }\n\n}\n\n\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 1, "score": 279740.0455861469 }, { "content": "pub fn routes_macro(input: TokenStream) -> TokenStream {\n\n prefixed_vec(ROUTE_STRUCT_PREFIX, input, quote!(::rocket::Route))\n\n}\n\n\n", "file_path": "core/codegen/src/bang/mod.rs", "rank": 2, "score": 261773.34052324184 }, { "content": "pub fn uri_macro(input: TokenStream) -> TokenStream {\n\n uri::_uri_macro(input)\n\n .map_err(|diag| diag.emit())\n\n .unwrap_or_else(|_| quote!(()).into())\n\n}\n\n\n", "file_path": "core/codegen/src/bang/mod.rs", "rank": 3, "score": 261773.34052324184 }, { "content": "pub fn catchers_macro(input: TokenStream) -> TokenStream {\n\n prefixed_vec(CATCH_STRUCT_PREFIX, input, quote!(::rocket::Catcher))\n\n}\n\n\n", "file_path": "core/codegen/src/bang/mod.rs", "rank": 4, "score": 261773.34052324184 }, { "content": "pub fn uri_internal_macro(input: TokenStream) -> TokenStream {\n\n uri::_uri_internal_macro(input)\n\n .map_err(|diag| diag.emit())\n\n .unwrap_or_else(|_| quote!(()).into())\n\n}\n", "file_path": "core/codegen/src/bang/mod.rs", "rank": 5, "score": 259056.69205885535 }, { "content": "#[doc(hidden)]\n\npub fn assert_ignorable<P: UriPart, T: Ignorable<P>>() { }\n\n\n\n#[cfg(test)]\n\nmod uri_display_tests {\n\n use std::path;\n\n use uri::{FromUriParam, UriDisplay, Query, Path};\n\n\n\n macro_rules! uri_display {\n\n (<$P:ident, $Target:ty> $source:expr) => ({\n\n let tmp = $source;\n\n let target = <$Target as FromUriParam<$P, _>>::from_uri_param(tmp);\n\n format!(\"{}\", &target as &dyn UriDisplay<$P>)\n\n })\n\n }\n\n\n\n macro_rules! assert_display {\n\n (<$P:ident, $Target:ty> $source:expr, $expected:expr) => ({\n\n assert_eq!(uri_display!(<$P, $Target> $source), $expected);\n\n })\n\n }\n", "file_path": "core/http/src/uri/uri_display.rs", "rank": 6, "score": 251711.4337490612 }, { "content": "#[inline(always)]\n\npub fn is_pchar(c: u8) -> bool {\n\n PATH_CHARS[c as usize] == c\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/tables.rs", "rank": 7, "score": 248061.99927405355 }, { "content": "#[inline(always)]\n\npub fn is_pchar_or_rchar(c: u8) -> bool {\n\n PATH_CHARS[c as usize] != 0\n\n}\n\n\n\nconst REG_CHARS: [u8; 256] = [\n\n // 0 1 2 3 4 5 6 7 8 9\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // x\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1x\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 2x\n\n 0, 0, 0, b'!', 0, 0, b'$', 0, b'&', b'\\'', // 3x\n\n b'(', b')', b'*', b'+', b',', b'-', b'.', 0, b'0', b'1', // 4x\n\n b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', 0, b';', // 5x\n\n 0, b'=', 0, 0, 0, b'A', b'B', b'C', b'D', b'E', // 6x\n\n b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O', // 7x\n\n b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', b'X', b'Y', // 8x\n\n b'Z', 0, 0, 0, 0, b'_', 0, b'a', b'b', b'c', // 9x\n\n b'd', b'e', b'f', b'g', b'h', b'i', b'j', b'k', b'l', b'm', // 10x\n\n b'n', b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', // 11x\n\n b'x', b'y', b'z', 0, 0, 0, b'~', 0, 0, 0, // 12x\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 13x\n", "file_path": "core/http/src/parse/uri/tables.rs", "rank": 8, "score": 245699.57020444097 }, { "content": "#[inline(always)]\n\npub fn is_reg_name_char(c: u8) -> bool {\n\n REG_CHARS[c as usize] != 0\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn test_char_table(table: &[u8]) {\n\n for (i, &v) in table.iter().enumerate() {\n\n if v != 0 && v != 1 {\n\n assert_eq!(i, v as usize);\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn check_tables() {\n\n test_char_table(&super::PATH_CHARS[..]);\n\n test_char_table(&super::REG_CHARS[..]);\n\n }\n\n}\n", "file_path": "core/http/src/parse/uri/tables.rs", "rank": 9, "score": 243413.33953095082 }, { "content": "/// Removes the file path's extension or does nothing if there is none.\n\nfn remove_extension<P: AsRef<Path>>(path: P) -> PathBuf {\n\n let path = path.as_ref();\n\n let stem = match path.file_stem() {\n\n Some(stem) => stem,\n\n None => return path.to_path_buf()\n\n };\n\n\n\n match path.parent() {\n\n Some(parent) => parent.join(stem),\n\n None => PathBuf::from(stem)\n\n }\n\n}\n\n\n", "file_path": "contrib/lib/src/templates/context.rs", "rank": 10, "score": 220089.55336201965 }, { "content": "#[proc_macro]\n\npub fn routes(input: TokenStream) -> TokenStream {\n\n emit!(bang::routes_macro(input))\n\n}\n\n\n\n/// Generates a [`Vec`] of [`Catcher`]s from a set of catcher paths.\n\n///\n\n/// The `catchers!` macro expands a list of catcher paths into a [`Vec`] of\n\n/// their corresponding [`Catcher`] structures. For example, given the following\n\n/// catchers:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[catch(404)]\n\n/// fn not_found() { /* .. */ }\n\n///\n\n/// mod inner {\n\n/// #[catch(400)]\n\n/// pub fn unauthorized() { /* .. */ }\n", "file_path": "core/codegen/src/lib.rs", "rank": 11, "score": 219969.9428634818 }, { "content": "#[proc_macro]\n\npub fn catchers(input: TokenStream) -> TokenStream {\n\n emit!(bang::catchers_macro(input))\n\n}\n\n\n\n/// Type safe generation of route URIs.\n\n///\n\n/// The `uri!` macro creates a type-safe, URL safe URI given a route and values\n\n/// for the route's URI parameters. The inputs to the macro are the path to a\n\n/// route, a colon, and one argument for each dynamic parameter (parameters in\n\n/// `<>`) in the route's path and query.\n\n///\n\n/// For example, for the following route:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[get(\"/person/<name>?<age>\")]\n\n/// fn person(name: String, age: Option<u8>) -> String {\n\n/// # \"\".into() /*\n", "file_path": "core/codegen/src/lib.rs", "rank": 12, "score": 219969.9428634818 }, { "content": "#[proc_macro]\n\npub fn uri(input: TokenStream) -> TokenStream {\n\n emit!(bang::uri_macro(input))\n\n}\n\n\n", "file_path": "core/codegen/src/lib.rs", "rank": 13, "score": 219969.9428634818 }, { "content": "pub fn parse_simple_toml_value(mut input: &str) -> StdResult<Value, String> {\n\n parse!(value: &mut input).map_err(|e| e.to_string())\n\n}\n\n\n\n/// A simple wrapper over a `Value` reference with a custom implementation of\n\n/// `Display`. This is used to log config values at initialization.\n\ncrate struct LoggedValue<'a>(pub &'a Value);\n\n\n\nimpl<'a> fmt::Display for LoggedValue<'a> {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use config::Value::*;\n\n match *self.0 {\n\n String(_) | Integer(_) | Float(_) | Boolean(_) | Datetime(_) | Array(_) => {\n\n self.0.fmt(f)\n\n }\n\n Table(ref map) => {\n\n write!(f, \"{{ \")?;\n\n for (i, (key, val)) in map.iter().enumerate() {\n\n write!(f, \"{} = {}\", key, LoggedValue(val))?;\n", "file_path": "core/lib/src/config/toml_ext.rs", "rank": 14, "score": 218300.53755112266 }, { "content": "#[proc_macro_derive(FromForm, attributes(form))]\n\npub fn derive_from_form(input: TokenStream) -> TokenStream {\n\n emit!(derive::from_form::derive_from_form(input))\n\n}\n\n\n\n/// Derive for the [`Responder`] trait.\n\n///\n\n/// The [`Responder`] derive can be applied to enums and structs with named\n\n/// fields. When applied to enums, variants must have at least one field. When\n\n/// applied to structs, the struct must have at least one field.\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// # use std::fs::File;\n\n/// # use rocket::http::ContentType;\n\n/// # type OtherResponder = MyResponderA;\n\n/// #\n\n/// #[derive(Responder)]\n\n/// enum MyResponderA {\n\n/// A(String),\n\n/// B(File, ContentType),\n", "file_path": "core/codegen/src/lib.rs", "rank": 15, "score": 217853.96186415092 }, { "content": "#[proc_macro_derive(Responder, attributes(response))]\n\npub fn derive_responder(input: TokenStream) -> TokenStream {\n\n emit!(derive::responder::derive_responder(input))\n\n}\n\n\n\n/// Derive for the [`UriDisplay<Query>`] trait.\n\n///\n\n/// The [`UriDisplay<Query>`] derive can be applied to enums and structs. When\n\n/// applied to enums, variants must have at least one field. When applied to\n\n/// structs, the struct must have at least one field.\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #[derive(UriDisplayQuery)]\n\n/// enum Kind {\n\n/// A(String),\n\n/// B(usize),\n\n/// }\n\n///\n\n/// #[derive(UriDisplayQuery)]\n\n/// struct MyStruct {\n", "file_path": "core/codegen/src/lib.rs", "rank": 16, "score": 217853.96186415092 }, { "content": "type RawInput<'a> = IndexedInput<'a, [u8]>;\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 17, "score": 217405.4024979882 }, { "content": "#[proc_macro_derive(FromFormValue, attributes(form))]\n\npub fn derive_from_form_value(input: TokenStream) -> TokenStream {\n\n emit!(derive::from_form_value::derive_from_form_value(input))\n\n}\n\n\n\n/// Derive for the [`FromForm`] trait.\n\n///\n\n/// The [`FromForm`] derive can be applied to structures with named fields:\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[derive(FromForm)]\n\n/// struct MyStruct {\n\n/// field: usize,\n\n/// other: String\n\n/// }\n\n/// ```\n\n///\n\n/// Each field's type is required to implement [`FromFormValue`].\n\n///\n", "file_path": "core/codegen/src/lib.rs", "rank": 18, "score": 215798.0932229553 }, { "content": "pub fn derive_responder(input: TokenStream) -> TokenStream {\n\n DeriveGenerator::build_for(input, quote!(impl<'__r> ::rocket::response::Responder<'__r>))\n\n .generic_support(GenericSupport::Lifetime)\n\n .data_support(DataSupport::Struct | DataSupport::Enum)\n\n .replace_generic(0, 0)\n\n .validate_generics(|_, generics| match generics.lifetimes().count() > 1 {\n\n true => Err(generics.span().error(\"only one lifetime is supported\")),\n\n false => Ok(())\n\n })\n\n .validate_fields(|_, fields| match fields.is_empty() {\n\n true => return Err(fields.span().error(\"need at least one field\")),\n\n false => Ok(())\n\n })\n\n .function(|_, inner| quote! {\n\n fn respond_to(\n\n self,\n\n __req: &::rocket::Request\n\n ) -> ::rocket::response::Result<'__r> {\n\n #inner\n\n }\n", "file_path": "core/codegen/src/derive/responder.rs", "rank": 19, "score": 215798.0932229553 }, { "content": "pub fn derive_from_form(input: TokenStream) -> TokenStream {\n\n let form_error = quote!(::rocket::request::FormParseError);\n\n DeriveGenerator::build_for(input, quote!(impl<'__f> ::rocket::request::FromForm<'__f>))\n\n .generic_support(GenericSupport::Lifetime | GenericSupport::Type)\n\n .replace_generic(0, 0)\n\n .data_support(DataSupport::NamedStruct)\n\n .map_type_generic(|_, ident, _| quote! {\n\n #ident : ::rocket::request::FromFormValue<'__f>\n\n })\n\n .validate_generics(|_, generics| match generics.lifetimes().count() > 1 {\n\n true => Err(generics.span().error(\"only one lifetime is supported\")),\n\n false => Ok(())\n\n })\n\n .validate_struct(validate_struct)\n\n .function(|_, inner| quote! {\n\n type Error = ::rocket::request::FormParseError<'__f>;\n\n\n\n fn from_form(\n\n __items: &mut ::rocket::request::FormItems<'__f>,\n\n __strict: bool,\n", "file_path": "core/codegen/src/derive/from_form.rs", "rank": 20, "score": 215798.09322295533 }, { "content": "#[doc(hidden)]\n\n#[proc_macro]\n\npub fn rocket_internal_uri(input: TokenStream) -> TokenStream {\n\n emit!(bang::uri_internal_macro(input))\n\n}\n", "file_path": "core/codegen/src/lib.rs", "rank": 21, "score": 215798.0932229553 }, { "content": "pub fn parse_media_type(input: &str) -> Result<MediaType> {\n\n parse!(media_type: &mut input.into())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use MediaType;\n\n use super::parse_media_type;\n\n\n\n macro_rules! assert_no_parse {\n\n ($string:expr) => ({\n\n let result: Result<_, _> = parse_media_type($string).into();\n\n if result.is_ok() {\n\n panic!(\"{:?} parsed unexpectedly.\", $string)\n\n }\n\n });\n\n }\n\n\n\n macro_rules! assert_parse {\n\n ($string:expr) => ({\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 22, "score": 214183.92788245407 }, { "content": "#[cfg(feature = \"database_attribute\")]\n\n#[proc_macro_attribute]\n\npub fn database(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n ::database::database_attr(attr, input).unwrap_or_else(|diag| {\n\n diag.emit();\n\n TokenStream::new()\n\n })\n\n}\n", "file_path": "contrib/codegen/src/lib.rs", "rank": 23, "score": 213809.88837306996 }, { "content": "#[proc_macro_attribute]\n\npub fn catch(args: TokenStream, input: TokenStream) -> TokenStream {\n\n emit!(attribute::catch::catch_attribute(args, input))\n\n}\n\n\n\n/// Derive for the [`FromFormValue`] trait.\n\n///\n\n/// The [`FromFormValue`] derive can be applied to enums with nullary\n\n/// (zero-length) fields:\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[derive(FromFormValue)]\n\n/// enum MyValue {\n\n/// First,\n\n/// Second,\n\n/// Third,\n\n/// }\n\n/// ```\n\n///\n", "file_path": "core/codegen/src/lib.rs", "rank": 24, "score": 213809.88837306996 }, { "content": "#[proc_macro_derive(UriDisplayQuery, attributes(form))]\n\npub fn derive_uri_display_query(input: TokenStream) -> TokenStream {\n\n emit!(derive::uri_display::derive_uri_display_query(input))\n\n}\n\n\n\n/// Derive for the [`UriDisplay<Path>`] trait.\n\n///\n\n/// The [`UriDisplay<Path>`] derive can only be applied to tuple structs with\n\n/// one field.\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #[derive(UriDisplayPath)]\n\n/// struct Name(String);\n\n///\n\n/// #[derive(UriDisplayPath)]\n\n/// struct Age(usize);\n\n/// ```\n\n///\n\n/// The field's type is required to implement [`UriDisplay<Path>`].\n\n///\n\n/// The derive generates an implementation of the [`UriDisplay<Path>`] trait.\n\n/// The implementation calls [`Formatter::write_value()`] for the field.\n\n///\n\n/// [`UriDisplay<Path>`]: ../rocket/http/uri/trait.UriDisplay.html\n\n/// [`Formatter::write_value()`]: ../rocket/http/uri/struct.Formatter.html#method.write_value\n", "file_path": "core/codegen/src/lib.rs", "rank": 25, "score": 213799.72587260552 }, { "content": "#[proc_macro_derive(UriDisplayPath)]\n\npub fn derive_uri_display_path(input: TokenStream) -> TokenStream {\n\n emit!(derive::uri_display::derive_uri_display_path(input))\n\n}\n\n\n\n/// Generates a [`Vec`] of [`Route`]s from a set of route paths.\n\n///\n\n/// The `routes!` macro expands a list of route paths into a [`Vec`] of their\n\n/// corresponding [`Route`] structures. For example, given the following routes:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[get(\"/\")]\n\n/// fn index() { /* .. */ }\n\n///\n\n/// mod person {\n\n/// #[post(\"/hi/<person>\")]\n\n/// pub fn hello(person: String) { /* .. */ }\n\n/// }\n", "file_path": "core/codegen/src/lib.rs", "rank": 26, "score": 213799.72587260552 }, { "content": "pub fn test_config(environment: Environment) {\n\n // Manually set the config environment variable. Rocket will initialize the\n\n // environment in `ignite()`. We'll read this back in the handler to config.\n\n ::std::env::set_var(\"ROCKET_ENV\", environment.to_string());\n\n\n\n let rocket = rocket::ignite()\n\n .attach(AdHoc::on_attach(\"Local Config\", |rocket| {\n\n println!(\"Attaching local config.\");\n\n let config = rocket.config().clone();\n\n Ok(rocket.manage(LocalConfig(config)))\n\n }))\n\n .mount(\"/\", routes![check_config]);\n\n\n\n let client = Client::new(rocket).unwrap();\n\n let response = client.get(\"/check_config\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n}\n", "file_path": "examples/config/tests/common/mod.rs", "rank": 27, "score": 213554.51051508085 }, { "content": "pub fn derive_from_form_value(input: TokenStream) -> TokenStream {\n\n DeriveGenerator::build_for(input, quote!(impl<'__v> ::rocket::request::FromFormValue<'__v>))\n\n .generic_support(GenericSupport::None)\n\n .data_support(DataSupport::Enum)\n\n .validate_enum(|generator, data| {\n\n // This derive only works for variants that are nullary.\n\n for variant in data.variants() {\n\n if !variant.fields().is_empty() {\n\n return Err(variant.span().error(\"variants cannot have fields\"));\n\n }\n\n }\n\n\n\n // Emit a warning if the enum is empty.\n\n if data.variants.is_empty() {\n\n generator.input.span().warning(\"deriving for empty enum\").emit();\n\n }\n\n\n\n Ok(())\n\n })\n\n .function(|_, inner| quote! {\n", "file_path": "core/codegen/src/derive/from_form_value.rs", "rank": 28, "score": 211856.40143505513 }, { "content": "pub fn catch_attribute(args: TokenStream, input: TokenStream) -> TokenStream {\n\n _catch(args, input).unwrap_or_else(|d| { d.emit(); TokenStream::new() })\n\n}\n", "file_path": "core/codegen/src/attribute/catch.rs", "rank": 29, "score": 210191.04692406996 }, { "content": "#[allow(non_snake_case)]\n\npub fn derive_uri_display_path(input: TokenStream) -> TokenStream {\n\n let Path = quote!(::rocket::http::uri::Path);\n\n let UriDisplay = quote!(::rocket::http::uri::UriDisplay<#Path>);\n\n let Formatter = quote!(::rocket::http::uri::Formatter<#Path>);\n\n let FromUriParam = quote!(::rocket::http::uri::FromUriParam);\n\n\n\n let uri_display = DeriveGenerator::build_for(input.clone(), quote!(impl #UriDisplay))\n\n .data_support(DataSupport::TupleStruct)\n\n .generic_support(GenericSupport::Type | GenericSupport::Lifetime)\n\n .map_type_generic(move |_, ident, _| quote!(#ident : #UriDisplay))\n\n .validate_fields(|_, fields| match fields.count() {\n\n 1 => Ok(()),\n\n _ => Err(fields.span().error(EXACTLY_ONE_FIELD))\n\n })\n\n .function(move |_, inner| quote! {\n\n fn fmt(&self, f: &mut #Formatter) -> ::std::fmt::Result {\n\n #inner\n\n Ok(())\n\n }\n\n })\n", "file_path": "core/codegen/src/derive/uri_display.rs", "rank": 30, "score": 209965.80303686354 }, { "content": "#[allow(non_snake_case)]\n\npub fn derive_uri_display_query(input: TokenStream) -> TokenStream {\n\n let Query = quote!(::rocket::http::uri::Query);\n\n let UriDisplay = quote!(::rocket::http::uri::UriDisplay<#Query>);\n\n let Formatter = quote!(::rocket::http::uri::Formatter<#Query>);\n\n let FromUriParam = quote!(::rocket::http::uri::FromUriParam);\n\n\n\n let uri_display = DeriveGenerator::build_for(input.clone(), quote!(impl #UriDisplay))\n\n .data_support(DataSupport::Struct | DataSupport::Enum)\n\n .generic_support(GenericSupport::Type | GenericSupport::Lifetime)\n\n .validate_enum(validate_enum)\n\n .validate_struct(validate_struct)\n\n .map_type_generic(move |_, ident, _| quote!(#ident : #UriDisplay))\n\n .function(move |_, inner| quote! {\n\n fn fmt(&self, f: &mut #Formatter) -> ::std::fmt::Result {\n\n #inner\n\n Ok(())\n\n }\n\n })\n\n .try_map_field(|_, field| {\n\n let span = field.span().into();\n", "file_path": "core/codegen/src/derive/uri_display.rs", "rank": 31, "score": 209965.80303686354 }, { "content": "pub fn _catch(args: TokenStream, input: TokenStream) -> Result<TokenStream> {\n\n // Parse and validate all of the user's input.\n\n let catch = parse_params(TokenStream2::from(args), input)?;\n\n\n\n // Gather everything we'll need to generate the catcher.\n\n let user_catcher_fn = &catch.function;\n\n let mut user_catcher_fn_name = catch.function.ident.clone();\n\n let generated_struct_name = user_catcher_fn_name.prepend(CATCH_STRUCT_PREFIX);\n\n let generated_fn_name = user_catcher_fn_name.prepend(CATCH_FN_PREFIX);\n\n let (vis, status) = (&catch.function.vis, &catch.status);\n\n let status_code = status.0.code;\n\n\n\n // Variables names we'll use and reuse.\n\n define_vars_and_mods!(req, catcher, response, Request, Response);\n\n\n\n // Determine the number of parameters that will be passed in.\n\n let (fn_sig, inputs) = match catch.function.decl.inputs.len() {\n\n 0 => (quote!(fn() -> _), quote!()),\n\n 1 => (quote!(fn(&#Request) -> _), quote!(#req)),\n\n _ => return Err(catch.function.decl.inputs.span()\n", "file_path": "core/codegen/src/attribute/catch.rs", "rank": 32, "score": 207707.60707377808 }, { "content": "#[allow(non_snake_case)]\n\npub fn database_attr(attr: TokenStream, input: TokenStream) -> Result<TokenStream> {\n\n let invocation = parse_invocation(attr, input)?;\n\n\n\n // Store everything we're going to need to generate code.\n\n let conn_type = &invocation.connection_type;\n\n let name = &invocation.db_name;\n\n let guard_type = &invocation.type_name;\n\n let vis = &invocation.visibility;\n\n let pool_type = Ident::new(&format!(\"{}Pool\", guard_type), guard_type.span());\n\n let fairing_name = format!(\"'{}' Database Pool\", name);\n\n let span = conn_type.span().into();\n\n\n\n // A few useful paths.\n\n let databases = quote_spanned!(span => ::rocket_contrib::databases);\n\n let Poolable = quote_spanned!(span => #databases::Poolable);\n\n let r2d2 = quote_spanned!(span => #databases::r2d2);\n\n let request = quote!(::rocket::request);\n\n\n\n let generated_types = quote_spanned! { span =>\n\n /// The request guard type.\n", "file_path": "contrib/codegen/src/database.rs", "rank": 33, "score": 207707.60707377808 }, { "content": "fn read_file_content(path: &str) -> Vec<u8> {\n\n let mut fp = File::open(&path).expect(&format!(\"Can't open {}\", path));\n\n let mut file_content = vec![];\n\n\n\n fp.read_to_end(&mut file_content).expect(&format!(\"Reading {} failed.\", path));\n\n file_content\n\n}\n\n\n", "file_path": "examples/static_files/src/tests.rs", "rank": 34, "score": 206725.38210599712 }, { "content": "#[parser]\n\nfn weighted_media_type<'a>(input: &mut Input<'a>) -> Result<'a, QMediaType> {\n\n let media_type = media_type()?;\n\n let weight = match media_type.params().next() {\n\n Some((\"q\", value)) if value.len() <= 5 => match value.parse::<f32>().ok() {\n\n Some(q) if q > 1. => return Err(pear_error!(\"q value must be <= 1\")),\n\n Some(q) if q < 0. => return Err(pear_error!(\"q value must be > 0\")),\n\n Some(q) => Some(q),\n\n None => return Err(pear_error!(\"invalid media-type weight\"))\n\n },\n\n _ => None\n\n };\n\n\n\n QMediaType(media_type, weight)\n\n}\n\n\n", "file_path": "core/http/src/parse/accept.rs", "rank": 35, "score": 201476.78458718775 }, { "content": "fn add_binding(to: &mut Vec<TokenStream2>, ident: &Ident, ty: &Type, expr: &Expr, source: Source) {\n\n let uri_mod = quote!(rocket::http::uri);\n\n let (span, ident_tmp) = (expr.span(), ident.prepend(\"tmp_\"));\n\n let from_uri_param = if source == Source::Query {\n\n quote_spanned!(span => #uri_mod::FromUriParam<#uri_mod::Query, _>)\n\n } else {\n\n quote_spanned!(span => #uri_mod::FromUriParam<#uri_mod::Path, _>)\n\n };\n\n\n\n to.push(quote_spanned!(span =>\n\n let #ident_tmp = #expr;\n\n let #ident = <#ty as #from_uri_param>::from_uri_param(#ident_tmp);\n\n ));\n\n}\n\n\n", "file_path": "core/codegen/src/bang/uri.rs", "rank": 36, "score": 201088.35859259852 }, { "content": "#[inline]\n\npub fn from_str(string: &str) -> Result<Uri, Error> {\n\n parse!(uri: &mut RawInput::from(string.as_bytes()))\n\n .map_err(|e| Error::from(string, e))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 37, "score": 197813.09056659933 }, { "content": "#[get(\"/<file..>\", rank = 2)]\n\npub fn files(file: PathBuf) -> io::Result<NamedFile> {\n\n NamedFile::open(Path::new(\"static/\").join(file))\n\n}\n", "file_path": "examples/form_validation/src/files.rs", "rank": 38, "score": 196019.3499549342 }, { "content": "#[inline]\n\npub fn authority_from_str(string: &str) -> Result<Authority, Error> {\n\n parse!(authority_only: &mut RawInput::from(string.as_bytes()))\n\n .map_err(|e| Error::from(string, e))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 39, "score": 195674.10339161588 }, { "content": "#[inline]\n\npub fn absolute_from_str(string: &str) -> Result<Absolute, Error> {\n\n parse!(absolute_only: &mut RawInput::from(string.as_bytes()))\n\n .map_err(|e| Error::from(string, e))\n\n}\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 40, "score": 195674.10339161588 }, { "content": "#[inline]\n\npub fn origin_from_str(string: &str) -> Result<Origin, Error> {\n\n parse!(origin: &mut RawInput::from(string.as_bytes()))\n\n .map_err(|e| Error::from(string, e))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 41, "score": 195674.10339161588 }, { "content": "#[get(\"/<_number>\")]\n\nfn get0(_number: u8) -> &'static str { \"0\" }\n\n\n", "file_path": "core/codegen/tests/route-ranking.rs", "rank": 42, "score": 195506.59519113973 }, { "content": "#[get(\"/<name>/<age>\", format = \"json\")]\n\nfn get_hello(name: String, age: u8) -> content::Json<String> {\n\n // In a real application, we'd use the JSON contrib type.\n\n let person = Person { name: name, age: age, };\n\n content::Json(serde_json::to_string(&person).unwrap())\n\n}\n\n\n\n// In a `POST` request and all other payload supporting request types, the\n\n// content type is matched against the `format` in the route attribute.\n\n//\n\n// Note that `content::Json` simply sets the content-type to `application/json`.\n\n// In a real application, we wouldn't use `serde_json` directly; instead, we'd\n\n// use `contrib::Json` to automatically serialize a type into JSON.\n", "file_path": "examples/content_types/src/main.rs", "rank": 43, "score": 193694.20420178943 }, { "content": "#[inline]\n\npub fn route_origin_from_str(string: &str) -> Result<Origin, Error> {\n\n parse!(rocket_route_origin: &mut RawInput::from(string.as_bytes()))\n\n .map_err(|e| Error::from(string, e))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 44, "score": 193600.9237982452 }, { "content": "#[parser]\n\nfn quoted_string<'a>(input: &mut Input<'a>) -> Result<'a, IndexedStr<'a>> {\n\n eat('\"')?;\n\n\n\n let mut is_escaped = false;\n\n let inner = take_while(|c| {\n\n if is_escaped { is_escaped = false; return true; }\n\n if c == '\\\\' { is_escaped = true; return true; }\n\n c != '\"'\n\n })?;\n\n\n\n eat('\"')?;\n\n inner\n\n}\n\n\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 45, "score": 189172.63081635267 }, { "content": "pub trait ReadExt: io::Read {\n\n fn read_max(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {\n\n let start_len = buf.len();\n\n while !buf.is_empty() {\n\n match self.read(buf) {\n\n Ok(0) => break,\n\n Ok(n) => { let tmp = buf; buf = &mut tmp[n..]; }\n\n Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n\n\n Ok(start_len - buf.len())\n\n }\n\n}\n\n\n\nimpl<T: io::Read> ReadExt for T { }\n", "file_path": "core/lib/src/ext.rs", "rank": 46, "score": 188091.90668608423 }, { "content": "#[post(\"/<age>\", format = \"plain\", data = \"<name_data>\")]\n\nfn post_hello(age: u8, name_data: Data) -> io::Result<content::Json<String>> {\n\n let mut name = String::with_capacity(32);\n\n name_data.open().take(32).read_to_string(&mut name)?;\n\n let person = Person { name: name, age: age, };\n\n Ok(content::Json(serde_json::to_string(&person).unwrap()))\n\n}\n\n\n", "file_path": "examples/content_types/src/main.rs", "rank": 47, "score": 183218.74666363778 }, { "content": "#[parser]\n\nfn media_param<'a>(input: &mut Input<'a>) -> Result<'a, (IndexedStr<'a>, IndexedStr<'a>)> {\n\n let key = (take_some_while_until(is_valid_token, '=')?, eat('=')?).0;\n\n let value = switch! {\n\n peek('\"') => quoted_string()?,\n\n _ => take_some_while_until(is_valid_token, ';')?\n\n };\n\n\n\n (key, value)\n\n}\n\n\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 48, "score": 181275.82385176572 }, { "content": "#[post(\"/a/<path..>\")]\n\nfn segments(path: PathBuf) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 49, "score": 178995.95046762656 }, { "content": "#[get(\"/big_file\")]\n\nfn file() -> io::Result<Stream<File>> {\n\n File::open(FILENAME).map(|file| Stream::from(file))\n\n}\n\n\n", "file_path": "examples/stream/src/main.rs", "rank": 50, "score": 175655.4027248423 }, { "content": "#[get(\"/<_n>\")]\n\nfn get0b(_n: u8) { }\n\n\n", "file_path": "core/codegen/tests/route-ranking.rs", "rank": 51, "score": 174622.8131257435 }, { "content": "#[get(\"/\")]\n\nfn root() -> content::Plain<Stream<LimitedRepeat>> {\n\n content::Plain(Stream::from(repeat('a' as u8).take(25000)))\n\n}\n\n\n", "file_path": "examples/stream/src/main.rs", "rank": 52, "score": 174232.16674510663 }, { "content": "/// Retrieves the database configuration for the database named `name`.\n\n///\n\n/// This function is primarily used by the code generated by the `#[database]`\n\n/// attribute.\n\n///\n\n/// # Example\n\n///\n\n/// Consider the following configuration:\n\n///\n\n/// ```toml\n\n/// [global.databases]\n\n/// my_db = { url = \"db/db.sqlite\", pool_size = 25 }\n\n/// my_other_db = { url = \"mysql://root:root@localhost/database\" }\n\n/// ```\n\n///\n\n/// The following example uses `database_config` to retrieve the configurations\n\n/// for the `my_db` and `my_other_db` databases:\n\n///\n\n/// ```rust\n\n/// # extern crate rocket;\n\n/// # extern crate rocket_contrib;\n\n/// #\n\n/// # use std::{collections::BTreeMap, mem::drop};\n\n/// # use rocket::{fairing::AdHoc, config::{Config, Environment, Value}};\n\n/// use rocket_contrib::databases::{database_config, ConfigError};\n\n///\n\n/// # let mut databases = BTreeMap::new();\n\n/// #\n\n/// # let mut my_db = BTreeMap::new();\n\n/// # my_db.insert(\"url\".to_string(), Value::from(\"db/db.sqlite\"));\n\n/// # my_db.insert(\"pool_size\".to_string(), Value::from(25));\n\n/// #\n\n/// # let mut my_other_db = BTreeMap::new();\n\n/// # my_other_db.insert(\"url\".to_string(),\n\n/// # Value::from(\"mysql://root:root@localhost/database\"));\n\n/// #\n\n/// # databases.insert(\"my_db\".to_string(), Value::from(my_db));\n\n/// # databases.insert(\"my_other_db\".to_string(), Value::from(my_other_db));\n\n/// #\n\n/// # let config = Config::build(Environment::Development)\n\n/// # .extra(\"databases\", databases)\n\n/// # .expect(\"custom config okay\");\n\n/// #\n\n/// # rocket::custom(config).attach(AdHoc::on_attach(\"Testing\", |rocket| {\n\n/// # {\n\n/// let config = database_config(\"my_db\", rocket.config()).unwrap();\n\n/// assert_eq!(config.url, \"db/db.sqlite\");\n\n/// assert_eq!(config.pool_size, 25);\n\n///\n\n/// let other_config = database_config(\"my_other_db\", rocket.config()).unwrap();\n\n/// assert_eq!(other_config.url, \"mysql://root:root@localhost/database\");\n\n///\n\n/// let error = database_config(\"invalid_db\", rocket.config()).unwrap_err();\n\n/// assert_eq!(error, ConfigError::MissingKey);\n\n/// # }\n\n/// #\n\n/// # Ok(rocket)\n\n/// # }));\n\n/// ```\n\npub fn database_config<'a>(\n\n name: &str,\n\n from: &'a config::Config\n\n) -> Result<DatabaseConfig<'a>, ConfigError> {\n\n // Find the first `databases` config that's a table with a key of 'name'\n\n // equal to `name`.\n\n let connection_config = from.get_table(\"databases\")\n\n .map_err(|_| ConfigError::MissingTable)?\n\n .get(name)\n\n .ok_or(ConfigError::MissingKey)?\n\n .as_table()\n\n .ok_or(ConfigError::MalformedConfiguration)?;\n\n\n\n let maybe_url = connection_config.get(\"url\")\n\n .ok_or(ConfigError::MissingUrl)?;\n\n\n\n let url = maybe_url.as_str().ok_or(ConfigError::MalformedUrl)?;\n\n\n\n let pool_size = connection_config.get(\"pool_size\")\n\n .and_then(Value::as_integer)\n", "file_path": "contrib/lib/src/databases.rs", "rank": 53, "score": 173890.50259651133 }, { "content": "/// Alias to [`Rocket::ignite()`] Creates a new instance of `Rocket`.\n\npub fn ignite() -> Rocket {\n\n Rocket::ignite()\n\n}\n\n\n", "file_path": "core/lib/src/lib.rs", "rank": 54, "score": 173881.07898974605 }, { "content": "/// Trait implemented by types that can be ignored in `uri!`.\n\n///\n\n/// When a parameter is explicitly ignored in `uri!` by supplying `_` as the\n\n/// parameter's value, that parameter's type is required to implement this\n\n/// trait for the corresponding `UriPart`.\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// #[get(\"/item/<id>?<track>\")]\n\n/// fn get_item(id: i32, track: Option<u8>) { /* .. */ }\n\n///\n\n/// // Ignore the `track` parameter: `Option<u8>` must be `Ignorable`.\n\n/// uri!(get_item: 100, _);\n\n/// uri!(get_item: id = 100, track = _);\n\n///\n\n/// // Provide a value for `track`.\n\n/// uri!(get_item: 100, 4);\n\n/// uri!(get_item: id = 100, track = 4);\n\n/// ```\n\n///\n\n/// # Implementations\n\n///\n\n/// Only `Option<T>` and `Result<T, E>` implement this trait. You may implement\n\n/// this trait for your own ignorable types as well:\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// use rocket::http::uri::{Ignorable, Query};\n\n///\n\n/// # struct MyType;\n\n/// impl Ignorable<Query> for MyType { }\n\n/// ```\n\npub trait Ignorable<P: UriPart> { }\n\n\n\nimpl<T> Ignorable<Query> for Option<T> { }\n\nimpl<T, E> Ignorable<Query> for Result<T, E> { }\n\n\n", "file_path": "core/http/src/uri/uri_display.rs", "rank": 55, "score": 173444.0437664002 }, { "content": "/// Trait implemented by types that can be displayed as part of a URI in\n\n/// [`uri!`].\n\n///\n\n/// Types implementing this trait can be displayed in a URI-safe manner. Unlike\n\n/// `Display`, the string written by a `UriDisplay` implementation must be\n\n/// URI-safe. In practice, this means that the string must either be\n\n/// percent-encoded or consist only of characters that are alphanumeric, \"-\",\n\n/// \".\", \"_\", or \"~\" - the \"unreserved\" characters.\n\n///\n\n/// # Marker Generic: `Path`, `Query`\n\n///\n\n/// The [`UriPart`] parameter `P` in `UriDisplay<P>` must be either [`Path`] or\n\n/// [`Query`] (see the [`UriPart`] documentation for how this is enforced),\n\n/// resulting in either `UriDisplay<Path>` or `UriDisplay<Query>`.\n\n///\n\n/// As the names might imply, the `Path` version of the trait is used when\n\n/// displaying parameters in the path part of the URI while the `Query` version\n\n/// is used when displaying parameters in the query part of the URI. These\n\n/// distinct versions of the trait exist exactly to differentiate, at the\n\n/// type-level, where in the URI a value is to be written to, allowing for type\n\n/// safety in the face of differences between the two locations. For example,\n\n/// while it is valid to use a value of `None` in the query part, omitting the\n\n/// parameter entirely, doing so is _not_ valid in the path part. By\n\n/// differentiating in the type system, both of these conditions can be enforced\n\n/// appropriately through distinct implementations of `UriDisplay<Path>` and\n\n/// `UriDisplay<Query>`.\n\n///\n\n/// Occasionally, the implementation of `UriDisplay` is independent of where the\n\n/// parameter is to be displayed. When this is the case, the parameter may be\n\n/// kept generic. That is, implementations can take the form:\n\n///\n\n/// ```rust\n\n/// # extern crate rocket;\n\n/// # use std::fmt;\n\n/// # use rocket::http::uri::{UriPart, UriDisplay, Formatter};\n\n/// # struct SomeType;\n\n/// impl<P: UriPart> UriDisplay<P> for SomeType\n\n/// # { fn fmt(&self, f: &mut Formatter<P>) -> fmt::Result { Ok(()) } }\n\n/// ```\n\n///\n\n/// [`UriPart`]: uri::UriPart\n\n/// [`Path`]: uri::Path\n\n/// [`Query`]: uri::Query\n\n///\n\n/// # Code Generation\n\n///\n\n/// When the [`uri!`] macro is used to generate a URI for a route, the types for\n\n/// the route's _path_ URI parameters must implement `UriDisplay<Path>`, while\n\n/// types in the route's query parameters must implement `UriDisplay<Query>`.\n\n/// Any parameters ignored with `_` must be of a type that implements\n\n/// [`Ignorable`]. The `UriDisplay` implementation for these types is used when\n\n/// generating the URI.\n\n///\n\n/// To illustrate `UriDisplay`'s role in code generation for `uri!`, consider\n\n/// the following route:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// #[get(\"/item/<id>?<track>\")]\n\n/// fn get_item(id: i32, track: Option<String>) { /* .. */ }\n\n/// ```\n\n///\n\n/// A URI for this route can be generated as follows:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// # type T = ();\n\n/// # #[get(\"/item/<id>?<track>\")]\n\n/// # fn get_item(id: i32, track: Option<String>) { /* .. */ }\n\n/// #\n\n/// // With unnamed parameters.\n\n/// uri!(get_item: 100, \"inbound\");\n\n///\n\n/// // With named parameters.\n\n/// uri!(get_item: id = 100, track = \"inbound\");\n\n/// uri!(get_item: track = \"inbound\", id = 100);\n\n///\n\n/// // Ignoring `track`.\n\n/// uri!(get_item: 100, _);\n\n/// uri!(get_item: id = 100, track = _);\n\n/// uri!(get_item: track = _, id = 100);\n\n/// ```\n\n///\n\n/// After verifying parameters and their types, Rocket will generate code\n\n/// similar (in spirit) to the following:\n\n///\n\n/// ```rust\n\n/// # extern crate rocket;\n\n/// # use rocket::http::uri::{UriDisplay, Path, Query, Origin};\n\n/// #\n\n/// Origin::parse(&format!(\"/item/{}?track={}\",\n\n/// &100 as &UriDisplay<Path>, &\"inbound\" as &UriDisplay<Query>));\n\n/// ```\n\n///\n\n/// For this expression to typecheck, `i32` must implement `UriDisplay<Path>`\n\n/// and `&str` must implement `UriDisplay<Query>`. What's more, when `track` is\n\n/// ignored, `Option<String>` is required to implement [`Ignorable`]. As can be\n\n/// seen, the implementations will be used to display the value in a URI-safe\n\n/// manner.\n\n///\n\n/// [`uri!`]: ../../../rocket_codegen/macro.uri.html\n\n///\n\n/// # Provided Implementations\n\n///\n\n/// Rocket implements `UriDisplay<P>` for all `P: UriPart` for several built-in\n\n/// types.\n\n///\n\n/// * **i8, i16, i32, i64, i128, isize, u8, u16, u32, u64, u128, usize, f32,\n\n/// f64, bool, IpAddr, Ipv4Addr, Ipv6Addr**\n\n///\n\n/// The implementation of `UriDisplay` for these types is identical to the\n\n/// `Display` implementation.\n\n///\n\n/// * **[`&RawStr`](RawStr), `String`, `&str`, `Cow<str>`**\n\n///\n\n/// The string is percent encoded.\n\n///\n\n/// * **`&T`, `&mut T`** _where_ **`T: UriDisplay`**\n\n///\n\n/// Uses the implementation of `UriDisplay` for `T`.\n\n///\n\n/// Rocket implements `UriDisplay<Path>` (but not `UriDisplay<Query>`) for\n\n/// several built-in types.\n\n///\n\n/// * `T` for **`Option<T>`** _where_ **`T: UriDisplay<Path>`**\n\n///\n\n/// Uses the implementation of `UriDisplay` for `T::Target`.\n\n///\n\n/// When a type of `Option<T>` appears in a route path, use a type of `T` as\n\n/// the parameter in `uri!`. Note that `Option<T>` itself _does not_\n\n/// implement `UriDisplay<Path>`.\n\n///\n\n/// * `T` for **`Result<T, E>`** _where_ **`T: UriDisplay<Path>`**\n\n///\n\n/// Uses the implementation of `UriDisplay` for `T::Target`.\n\n///\n\n/// When a type of `Result<T, E>` appears in a route path, use a type of `T`\n\n/// as the parameter in `uri!`. Note that `Result<T, E>` itself _does not_\n\n/// implement `UriDisplay<Path>`.\n\n///\n\n/// Rocket implements `UriDisplay<Query>` (but not `UriDisplay<Path>`) for\n\n/// several built-in types.\n\n///\n\n/// * **`Form<T>`, `LenientForm<T>`** _where_ **`T: FromUriParam + FromForm`**\n\n///\n\n/// Uses the implementation of `UriDisplay` for `T::Target`.\n\n///\n\n/// In general, when a type of `Form<T>` is to be displayed as part of a\n\n/// URI's query, it suffices to derive `UriDisplay` for `T`. Note that any\n\n/// type that can be converted into a `T` using [`FromUriParam`] can be used\n\n/// in place of a `Form<T>` in a `uri!` invocation.\n\n///\n\n/// * **`Option<T>`** _where_ **`T: UriDisplay<Query>`**\n\n///\n\n/// If the `Option` is `Some`, uses the implementation of `UriDisplay` for\n\n/// `T`. Otherwise, nothing is rendered.\n\n///\n\n/// * **`Result<T, E>`** _where_ **`T: UriDisplay<Query>`**\n\n///\n\n/// If the `Result` is `Ok`, uses the implementation of `UriDisplay` for\n\n/// `T`. Otherwise, nothing is rendered.\n\n///\n\n/// [`FromUriParam`]: uri::FromUriParam\n\n///\n\n/// # Deriving\n\n///\n\n/// Manually implementing `UriDisplay` should be done with care. For most use\n\n/// cases, deriving `UriDisplay` will suffice:\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// # use rocket::http::uri::{UriDisplay, Query, Path};\n\n/// // Derives `UriDisplay<Query>`\n\n/// #[derive(UriDisplayQuery)]\n\n/// struct User {\n\n/// name: String,\n\n/// age: usize,\n\n/// }\n\n///\n\n/// let user = User { name: \"Michael Smith\".into(), age: 31 };\n\n/// let uri_string = format!(\"{}\", &user as &UriDisplay<Query>);\n\n/// assert_eq!(uri_string, \"name=Michael%20Smith&age=31\");\n\n///\n\n/// // Derives `UriDisplay<Path>`\n\n/// #[derive(UriDisplayPath)]\n\n/// struct Name(String);\n\n///\n\n/// let name = Name(\"Bob Smith\".into());\n\n/// let uri_string = format!(\"{}\", &name as &UriDisplay<Path>);\n\n/// assert_eq!(uri_string, \"Bob%20Smith\");\n\n/// ```\n\n///\n\n/// As long as every field in the structure (or enum) implements `UriDisplay`,\n\n/// the trait can be derived. The implementation calls\n\n/// [`Formatter::write_named_value()`] for every named field and\n\n/// [`Formatter::write_value()`] for every unnamed field. See the [`UriDisplay`\n\n/// derive] documentation for full details.\n\n///\n\n/// [`Ignorable`]: uri::Ignorable\n\n/// [`UriDisplay` derive]: ../../../rocket_codegen/derive.UriDisplay.html\n\n/// [`Formatter::write_named_value()`]: uri::Formatter::write_named_value()\n\n/// [`Formatter::write_value()`]: uri::Formatter::write_value()\n\n///\n\n/// # Implementing\n\n///\n\n/// Implementing `UriDisplay` is similar to implementing\n\n/// [`Display`](::std::fmt::Display) with the caveat that extra care must be\n\n/// taken to ensure that the written string is URI-safe. As mentioned before, in\n\n/// practice, this means that the string must either be percent-encoded or\n\n/// consist only of characters that are alphanumeric, \"-\", \".\", \"_\", or \"~\".\n\n///\n\n/// When manually implementing `UriDisplay` for your types, you should defer to\n\n/// existing implementations of `UriDisplay` as much as possible. In the example\n\n/// below, for instance, `Name`'s implementation defers to `String`'s\n\n/// implementation. To percent-encode a string, use\n\n/// [`Uri::percent_encode()`](uri::Uri::percent_encode()).\n\n///\n\n/// ## Example\n\n///\n\n/// The following snippet consists of a `Name` type that implements both\n\n/// `FromParam` and `UriDisplay<Path>`. The `FromParam` implementation allows\n\n/// `Name` to be used as the target type of a dynamic parameter, while the\n\n/// `UriDisplay` implementation allows URIs to be generated for routes with\n\n/// `Name` as a dynamic path parameter type. Note the custom parsing in the\n\n/// `FromParam` implementation; as a result of this, a custom (reflexive)\n\n/// `UriDisplay` implementation is required.\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// use rocket::http::RawStr;\n\n/// use rocket::request::FromParam;\n\n///\n\n/// struct Name(String);\n\n///\n\n/// const PREFIX: &str = \"name:\";\n\n///\n\n/// impl<'r> FromParam<'r> for Name {\n\n/// type Error = &'r RawStr;\n\n///\n\n/// /// Validates parameters that start with 'name:', extracting the text\n\n/// /// after 'name:' as long as there is at least one character.\n\n/// fn from_param(param: &'r RawStr) -> Result<Self, Self::Error> {\n\n/// let decoded = param.percent_decode().map_err(|_| param)?;\n\n/// if !decoded.starts_with(PREFIX) || decoded.len() < (PREFIX.len() + 1) {\n\n/// return Err(param);\n\n/// }\n\n///\n\n/// let real_name = decoded[PREFIX.len()..].to_string();\n\n/// Ok(Name(real_name))\n\n/// }\n\n/// }\n\n///\n\n/// use std::fmt;\n\n/// use rocket::http::impl_from_uri_param_identity;\n\n/// use rocket::http::uri::{Formatter, FromUriParam, UriDisplay, Path};\n\n/// use rocket::response::Redirect;\n\n///\n\n/// impl UriDisplay<Path> for Name {\n\n/// // Delegates to the `UriDisplay` implementation for `String` via the\n\n/// // call to `write_value` to ensure that the written string is\n\n/// // URI-safe. In this case, the string will be percent encoded.\n\n/// // Prefixes the inner name with `name:`.\n\n/// fn fmt(&self, f: &mut Formatter<Path>) -> fmt::Result {\n\n/// f.write_value(&format!(\"name:{}\", self.0))\n\n/// }\n\n/// }\n\n///\n\n/// impl_from_uri_param_identity!([Path] Name);\n\n///\n\n/// #[get(\"/name/<name>\")]\n\n/// fn redirector(name: Name) -> Redirect {\n\n/// Redirect::to(uri!(real: name))\n\n/// }\n\n///\n\n/// #[get(\"/<name>\")]\n\n/// fn real(name: Name) -> String {\n\n/// format!(\"Hello, {}!\", name.0)\n\n/// }\n\n///\n\n/// let uri = uri!(real: Name(\"Mike Smith\".into()));\n\n/// assert_eq!(uri.path(), \"/name:Mike%20Smith\");\n\n/// ```\n\npub trait UriDisplay<P: UriPart> {\n\n /// Formats `self` in a URI-safe manner using the given formatter.\n\n fn fmt(&self, f: &mut Formatter<P>) -> fmt::Result;\n\n}\n\n\n\nimpl<'a, P: UriPart> fmt::Display for &'a UriDisplay<P> {\n\n #[inline(always)]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n UriDisplay::fmt(*self, &mut <Formatter<P>>::new(f))\n\n }\n\n}\n\n\n\n// Direct implementations: these are the leaves of a call to `UriDisplay::fmt`.\n\n\n\n/// Percent-encodes the raw string.\n\nimpl<P: UriPart> UriDisplay<P> for str {\n\n #[inline(always)]\n\n fn fmt(&self, f: &mut Formatter<P>) -> fmt::Result {\n\n f.write_raw(&Uri::percent_encode(self))\n\n }\n", "file_path": "core/http/src/uri/uri_display.rs", "rank": 56, "score": 171907.77512544615 }, { "content": "#[post(\"/<used>/<_unused>\")]\n\nfn unused_param(used: i32, _unused: i32) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 57, "score": 171173.58271322382 }, { "content": "#[post(\"/a/<id>/then/<path..>\")]\n\nfn param_and_segments(path: PathBuf, id: usize) { }\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 58, "score": 168877.6089236186 }, { "content": "fn prefixed_vec(prefix: &str, input: TokenStream, ty: TokenStream2) -> TokenStream {\n\n let vec = _prefixed_vec(prefix, input, &ty)\n\n .map_err(|diag| diag.emit())\n\n .unwrap_or_else(|_| quote!(vec![]));\n\n\n\n quote!({\n\n let __vector: Vec<#ty> = #vec;\n\n __vector\n\n }).into()\n\n}\n\n\n", "file_path": "core/codegen/src/bang/mod.rs", "rank": 59, "score": 167449.63108976732 }, { "content": "/// Conversion trait for parameters used in [`uri!`] invocations.\n\n///\n\n/// # Overview\n\n///\n\n/// In addition to implementing [`UriDisplay`], to use a custom type in a `uri!`\n\n/// expression, the `FromUriParam` trait must be implemented. The `UriDisplay`\n\n/// derive automatically generates _identity_ implementations of `FromUriParam`,\n\n/// so in the majority of cases, as with `UriDisplay`, this trait is never\n\n/// implemented manually.\n\n///\n\n/// In the rare case that `UriDisplay` is implemented manually, this trait, too,\n\n/// must be implemented explicitly. In the majority of cases, implementation can\n\n/// be automated. Rocket provides the [`impl_from_uri_param_identity`] macro to\n\n/// generate the _identity_ implementations automatically. For a type `T`, these\n\n/// are:\n\n///\n\n/// * `impl<P: UriPart> FromUriParam<P, T> for T`\n\n/// * `impl<'x, P: UriPart> FromUriParam<P, &'x T> for T`\n\n/// * `impl<'x, P: UriPart> FromUriParam<P, &'x mut T> for T`\n\n///\n\n/// See [`impl_from_uri_param_identity`] for usage details.\n\n///\n\n/// [`impl_from_uri_param_identity`]: ../macro.impl_from_uri_param_identity.html\n\n///\n\n/// # Code Generation\n\n///\n\n/// This trait is invoked once per expression passed into a [`uri!`] invocation.\n\n/// In particular, for a route URI parameter of type `T` and a user-supplied\n\n/// expression of type `S`, `<T as FromUriParam<S>>::from_uri_param` is\n\n/// invoked. The returned value is used in place of the user's value and\n\n/// rendered using its [`UriDisplay`] implementation.\n\n///\n\n/// This trait allows types that differ from the route URI parameter's types to\n\n/// be used in their place at no cost. For instance, the following\n\n/// implementation, provided by Rocket, allows an `&str` to be used in a `uri!`\n\n/// invocation for route URI parameters declared as `String`:\n\n///\n\n/// ```rust\n\n/// # extern crate rocket;\n\n/// # use rocket::http::uri::{FromUriParam, UriPart};\n\n/// # struct S;\n\n/// # type String = S;\n\n/// impl<'a, P: UriPart> FromUriParam<P, &'a str> for String {\n\n/// type Target = &'a str;\n\n/// # fn from_uri_param(s: &'a str) -> Self::Target { \"hi\" }\n\n/// }\n\n/// ```\n\n///\n\n/// Because the [`FromUriParam::Target`] type is the same as the input type, the\n\n/// conversion is a no-op and free of cost, allowing an `&str` to be used in\n\n/// place of a `String` without penalty. A similar no-op conversion exists for\n\n/// [`&RawStr`](RawStr):\n\n///\n\n/// ```rust\n\n/// # extern crate rocket;\n\n/// # use rocket::http::uri::{FromUriParam, UriPart};\n\n/// # struct S;\n\n/// # type RawStr = S;\n\n/// impl<'a, 'b, P: UriPart> FromUriParam<P, &'a str> for &'b RawStr {\n\n/// type Target = &'a str;\n\n/// # fn from_uri_param(s: &'a str) -> Self::Target { \"hi\" }\n\n/// }\n\n/// ```\n\n///\n\n/// # Provided Implementations\n\n///\n\n/// The following types have _identity_ implementations:\n\n///\n\n/// * `String`, `i8`, `i16`, `i32`, `i64`, `i128`, `isize`, `u8`, `u16`,\n\n/// `u32`, `u64`, `u128`, `usize`, `f32`, `f64`, `bool`, `IpAddr`,\n\n/// `Ipv4Addr`, `Ipv6Addr`, `&str`, `&RawStr`, `Cow<str>`\n\n///\n\n/// The following conversions are implemented, allowing a value of the type on\n\n/// the left to be used when a type on the right is expected by a route:\n\n///\n\n/// * `&str` to `String`\n\n/// * `&str` to `RawStr`\n\n/// * `String` to `&str`\n\n/// * `String` to `RawStr`\n\n/// * `T` to `Option<T>`\n\n/// * `T` to `Result<T, E>`\n\n/// * `T` to `Form<T>`\n\n///\n\n/// The following types have _identity_ implementations _only in [`Path`]_:\n\n///\n\n/// * `&Path`, `PathBuf`\n\n///\n\n/// The following conversions are implemented _only in [`Path`]_:\n\n///\n\n/// * `&str` to `&Path`\n\n/// * `&str` to `PathBuf`\n\n/// * `PathBuf` to `&Path`\n\n///\n\n/// See [Foreign Impls](#foreign-impls) for all provided implementations.\n\n///\n\n/// # Implementing\n\n///\n\n/// This trait should only be implemented when you'd like to allow a type\n\n/// different from the route's declared type to be used in its place in a `uri!`\n\n/// invocation. For instance, if the route has a type of `T` and you'd like to\n\n/// use a type of `S` in a `uri!` invocation, you'd implement `FromUriParam<P,\n\n/// T> for S` where `P` is `Path` for conversions valid in the path part of a\n\n/// URI, `Uri` for conversions valid in the query part of a URI, or `P: UriPart`\n\n/// when a conversion is valid in either case.\n\n///\n\n/// This is typically only warranted for owned-value types with corresponding\n\n/// reference types: `String` and `&str`, for instance. In this case, it's\n\n/// desirable to allow an `&str` to be used in place of a `String`.\n\n///\n\n/// When implementing `FromUriParam`, be aware that Rocket will use the\n\n/// [`UriDisplay`] implementation of [`FromUriParam::Target`], _not_ of the\n\n/// source type. Incorrect implementations can result in creating unsafe URIs.\n\n///\n\n/// # Example\n\n///\n\n/// The following example implements `FromUriParam<Query, (&str, &str)>` for a\n\n/// `User` type. The implementation allows an `(&str, &str)` type to be used in\n\n/// a `uri!` invocation where a `User` type is expected in the query part of the\n\n/// URI.\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// use std::fmt;\n\n///\n\n/// use rocket::http::RawStr;\n\n/// use rocket::http::uri::{Formatter, UriDisplay, FromUriParam, Query};\n\n///\n\n/// #[derive(FromForm)]\n\n/// struct User<'a> {\n\n/// name: &'a RawStr,\n\n/// nickname: String,\n\n/// }\n\n///\n\n/// impl<'a> UriDisplay<Query> for User<'a> {\n\n/// fn fmt(&self, f: &mut Formatter<Query>) -> fmt::Result {\n\n/// f.write_named_value(\"name\", &self.name)?;\n\n/// f.write_named_value(\"nickname\", &self.nickname)\n\n/// }\n\n/// }\n\n///\n\n/// impl<'a, 'b> FromUriParam<Query, (&'a str, &'b str)> for User<'a> {\n\n/// type Target = User<'a>;\n\n///\n\n/// fn from_uri_param((name, nickname): (&'a str, &'b str)) -> User<'a> {\n\n/// User { name: name.into(), nickname: nickname.to_string() }\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// With these implementations, the following typechecks:\n\n///\n\n/// ```rust\n\n/// # #![feature(proc_macro_hygiene, decl_macro)]\n\n/// # #[macro_use] extern crate rocket;\n\n/// # use std::fmt;\n\n/// use rocket::http::RawStr;\n\n/// use rocket::request::Form;\n\n/// # use rocket::http::uri::{Formatter, UriDisplay, FromUriParam, Query};\n\n/// #\n\n/// # #[derive(FromForm)]\n\n/// # struct User<'a> { name: &'a RawStr, nickname: String, }\n\n/// #\n\n/// # impl<'a> UriDisplay<Query> for User<'a> {\n\n/// # fn fmt(&self, f: &mut Formatter<Query>) -> fmt::Result {\n\n/// # f.write_named_value(\"name\", &self.name)?;\n\n/// # f.write_named_value(\"nickname\", &self.nickname)\n\n/// # }\n\n/// # }\n\n/// #\n\n/// # impl<'a, 'b> FromUriParam<Query, (&'a str, &'b str)> for User<'a> {\n\n/// # type Target = User<'a>;\n\n/// # fn from_uri_param((name, nickname): (&'a str, &'b str)) -> User<'a> {\n\n/// # User { name: name.into(), nickname: nickname.to_string() }\n\n/// # }\n\n/// # }\n\n///\n\n/// #[post(\"/<name>?<user..>\")]\n\n/// fn some_route(name: &RawStr, user: Form<User>) { /* .. */ }\n\n///\n\n/// let uri = uri!(some_route: name = \"hey\", user = (\"Robert Mike\", \"Bob\"));\n\n/// assert_eq!(uri.path(), \"/hey\");\n\n/// assert_eq!(uri.query(), Some(\"name=Robert%20Mike&nickname=Bob\"));\n\n/// ```\n\n///\n\n/// [`uri!`]: ::rocket_codegen::uri\n\n/// [`UriDisplay`]: uri::UriDisplay\n\n/// [`FromUriParam::Target`]: uri::FromUriParam::Target\n\n/// [`Path`]: uri::Path\n\npub trait FromUriParam<P: UriPart, T> {\n\n /// The resulting type of this conversion.\n\n type Target: UriDisplay<P>;\n\n\n\n /// Converts a value of type `T` into a value of type `Self::Target`. The\n\n /// resulting value of type `Self::Target` will be rendered into a URI using\n\n /// its [`UriDisplay`](uri::UriDisplay) implementation.\n\n fn from_uri_param(param: T) -> Self::Target;\n\n}\n\n\n\nuse std::{borrow::Cow, net::{IpAddr, Ipv4Addr, Ipv6Addr}};\n\n\n\n#[doc(hidden)]\n\n#[macro_export(local_inner_macros)]\n\nmacro_rules! impl_conversion_ref {\n\n ($(($($l:tt)+) $A:ty => $B:ty),*) => ( impl_conversion_ref!(@_ $(($($l)+,) $A => $B),*); );\n\n ($($A:ty => $B:ty),*) => ( impl_conversion_ref!(@_ $(() $A => $B),*); );\n\n\n\n (@_ $(($($l:tt)*) $A:ty => $B:ty),*) => ($(\n\n impl_conversion_ref!([P] ($($l)* P: $crate::uri::UriPart) $A => $B);\n", "file_path": "core/http/src/uri/from_uri_param.rs", "rank": 60, "score": 166808.04246034264 }, { "content": "fn validate_struct(gen: &DeriveGenerator, data: Struct) -> Result<()> {\n\n if data.fields().is_empty() {\n\n return Err(gen.input.span().error(\"at least one field is required\"));\n\n }\n\n\n\n let mut names = ::std::collections::HashMap::new();\n\n for field in data.fields().iter() {\n\n let id = field.ident.as_ref().expect(\"named field\");\n\n let field = match Form::from_attrs(\"form\", &field.attrs) {\n\n Some(result) => result?.field,\n\n None => FormField { span: Spanned::span(&id), name: id.to_string() }\n\n };\n\n\n\n if let Some(span) = names.get(&field.name) {\n\n return Err(field.span.error(\"duplicate field name\")\n\n .span_note(*span, \"previous definition here\"));\n\n }\n\n\n\n names.insert(field.name, field.span);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/codegen/src/derive/from_form.rs", "rank": 61, "score": 166095.13559091528 }, { "content": "fn validate_struct(gen: &DeriveGenerator, data: Struct) -> Result<()> {\n\n validate_fields(data.fields(), gen.input.span())\n\n}\n\n\n", "file_path": "core/codegen/src/derive/uri_display.rs", "rank": 62, "score": 164820.32932951843 }, { "content": "pub fn log_level(conf: &Config,\n\n name: &str,\n\n value: &Value\n\n ) -> Result<LoggingLevel> {\n\n str(conf, name, value)\n\n .and_then(|s| s.parse().map_err(|e| conf.bad_type(name, value.type_str(), e)))\n\n}\n\n\n", "file_path": "core/lib/src/config/custom_values.rs", "rank": 63, "score": 163715.69336221874 }, { "content": "#[inline]\n\npub fn is_valid_token(c: char) -> bool {\n\n match c {\n\n '0'...'9' | 'A'...'Z' | '^'...'~' | '#'...'\\''\n\n | '!' | '*' | '+' | '-' | '.' => true,\n\n _ => false\n\n }\n\n}\n", "file_path": "core/http/src/parse/checkers.rs", "rank": 64, "score": 162043.32453692792 }, { "content": "#[inline(always)]\n\npub fn is_whitespace(byte: char) -> bool {\n\n byte == ' ' || byte == '\\t'\n\n}\n\n\n", "file_path": "core/http/src/parse/checkers.rs", "rank": 65, "score": 162043.28499678447 }, { "content": "#[doc(hidden)]\n\npub fn init(level: LoggingLevel) -> bool {\n\n try_init(level, true)\n\n}\n\n\n\n// Expose logging macros as (hidden) funcions for use by core/contrib codegen.\n\nmacro_rules! external_log_function {\n\n ($fn_name:ident: $macro_name:ident) => (\n\n #[doc(hidden)] #[inline(always)]\n\n pub fn $fn_name(msg: &str) { $macro_name!(\"{}\", msg); }\n\n )\n\n}\n\n\n\nexternal_log_function!(error: error);\n\nexternal_log_function!(error_: error_);\n\nexternal_log_function!(warn: warn);\n\nexternal_log_function!(warn_: warn_);\n", "file_path": "core/lib/src/logger.rs", "rank": 66, "score": 162037.4846486558 }, { "content": "#[post(\"/a/<id>/then/<path..>\")]\n\nfn guarded_segments(cookies: Cookies, path: PathBuf, id: usize) { }\n\n\n\nmacro assert_uri_eq($($uri:expr => $expected:expr,)+) {\n\n $(assert_eq!($uri, Origin::parse($expected).expect(\"valid origin URI\"));)+\n\n}\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 67, "score": 162003.90899086822 }, { "content": "#[inline(always)]\n\npub fn is_whitespace(byte: char) -> bool {\n\n byte == ' ' || byte == '\\t'\n\n}\n\n\n", "file_path": "core/lib/src/config/toml_ext.rs", "rank": 68, "score": 160478.19719125627 }, { "content": "pub fn tls_config<'v>(conf: &Config,\n\n name: &str,\n\n value: &'v Value,\n\n ) -> Result<(&'v str, &'v str)> {\n\n let (mut certs_path, mut key_path) = (None, None);\n\n let table = value.as_table()\n\n .ok_or_else(|| conf.bad_type(name, value.type_str(), \"a table\"))?;\n\n\n\n let env = conf.environment;\n\n for (key, value) in table {\n\n match key.as_str() {\n\n \"certs\" => certs_path = Some(str(conf, \"tls.certs\", value)?),\n\n \"key\" => key_path = Some(str(conf, \"tls.key\", value)?),\n\n _ => return Err(ConfigError::UnknownKey(format!(\"{}.tls.{}\", env, key)))\n\n }\n\n }\n\n\n\n if let (Some(certs), Some(key)) = (certs_path, key_path) {\n\n Ok((certs, key))\n\n } else {\n\n Err(conf.bad_type(name, \"a table with missing entries\",\n\n \"a table with `certs` and `key` entries\"))\n\n }\n\n}\n\n\n", "file_path": "core/lib/src/config/custom_values.rs", "rank": 69, "score": 160472.39684312762 }, { "content": "#[get(\"/\")]\n\npub fn index() -> io::Result<NamedFile> {\n\n NamedFile::open(\"static/index.html\")\n\n}\n\n\n", "file_path": "examples/form_validation/src/files.rs", "rank": 70, "score": 160472.39684312762 }, { "content": "#[post(\"/logout\")]\n\nfn logout(mut cookies: Cookies) -> Flash<Redirect> {\n\n cookies.remove_private(Cookie::named(\"user_id\"));\n\n Flash::success(Redirect::to(uri!(login_page)), \"Successfully logged out.\")\n\n}\n\n\n", "file_path": "examples/session/src/main.rs", "rank": 71, "score": 159645.20207248605 }, { "content": "fn raw<'f>(string: &mut &'f RawStr, index: &mut usize) -> Option<FormItem<'f>> {\n\n loop {\n\n let start = *index;\n\n let s = &string[start..];\n\n if s.is_empty() {\n\n return None;\n\n }\n\n\n\n let (key, rest, key_consumed) = match memchr2(b'=', b'&', s.as_bytes()) {\n\n Some(i) if s.as_bytes()[i] == b'=' => (&s[..i], &s[(i + 1)..], i + 1),\n\n Some(i) => (&s[..i], &s[i..], i),\n\n None => (s, &s[s.len()..], s.len())\n\n };\n\n\n\n let (value, val_consumed) = match memchr2(b'=', b'&', rest.as_bytes()) {\n\n Some(i) if rest.as_bytes()[i] == b'=' => return None,\n\n Some(i) => (&rest[..i], i + 1),\n\n None => (rest, rest.len())\n\n };\n\n\n", "file_path": "core/lib/src/request/form/form_items.rs", "rank": 72, "score": 159169.3894494545 }, { "content": "/// Alias to [`Rocket::custom()`]. Creates a new instance of `Rocket` with a\n\n/// custom configuration.\n\npub fn custom(config: config::Config) -> Rocket {\n\n Rocket::custom(config)\n\n}\n", "file_path": "core/lib/src/lib.rs", "rank": 73, "score": 159082.21237759604 }, { "content": "#[get(\"/hello/<name>/<age>\")]\n\nfn hello(name: String, age: u8) -> String {\n\n format!(\"Hello, {} year old named {}!\", age, name)\n\n}\n\n\n", "file_path": "examples/hello_person/src/main.rs", "rank": 74, "score": 158172.1958722273 }, { "content": "pub fn parse_accept(input: &str) -> Result<Accept> {\n\n parse!(accept: &mut input.into())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use MediaType;\n\n use super::parse_accept;\n\n\n\n macro_rules! assert_parse {\n\n ($string:expr) => ({\n\n match parse_accept($string) {\n\n Ok(accept) => accept,\n\n Err(e) => panic!(\"{:?} failed to parse: {}\", $string, e)\n\n }\n\n });\n\n }\n\n\n\n macro_rules! assert_parse_eq {\n\n ($string:expr, [$($mt:expr),*]) => ({\n", "file_path": "core/http/src/parse/accept.rs", "rank": 75, "score": 156002.5175120173 }, { "content": "pub fn route_attribute<M: Into<Option<::http::Method>>>(\n\n method: M,\n\n args: TokenStream,\n\n input: TokenStream\n\n) -> TokenStream {\n\n let result = match method.into() {\n\n Some(method) => incomplete_route(method, args.into(), input),\n\n None => complete_route(args.into(), input)\n\n };\n\n\n\n result.unwrap_or_else(|diag| { diag.emit(); TokenStream::new() })\n\n}\n", "file_path": "core/codegen/src/attribute/route.rs", "rank": 76, "score": 153298.53325846134 }, { "content": "struct HasContentType;\n\n\n\nimpl<'a, 'r> FromRequest<'a, 'r> for HasContentType {\n\n type Error = ();\n\n\n\n fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, ()> {\n\n if request.content_type().is_some() {\n\n Success(HasContentType)\n\n } else {\n\n Forward(())\n\n }\n\n }\n\n}\n\n\n\nuse rocket::data::{self, FromDataSimple};\n\n\n\nimpl FromDataSimple for HasContentType {\n\n type Error = ();\n\n\n\n fn from_data(request: &Request, data: Data) -> data::Outcome<Self, ()> {\n\n if request.content_type().is_some() {\n\n Success(HasContentType)\n\n } else {\n\n Forward(data)\n\n }\n\n }\n\n}\n\n\n", "file_path": "core/lib/tests/local-request-content-type-issue-505.rs", "rank": 77, "score": 152827.47919857845 }, { "content": "struct S;\n\n\n\nimpl<'a> FromParam<'a> for S {\n\n type Error = ();\n\n fn from_param(param: &'a RawStr) -> Result<Self, Self::Error> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 78, "score": 152827.47919857845 }, { "content": "#[get(\"/\")]\n\nfn index(msg: Option<FlashMessage>, conn: DbConn) -> Template {\n\n Template::render(\"index\", &match msg {\n\n Some(ref msg) => Context::raw(&conn, Some((msg.name(), msg.msg()))),\n\n None => Context::raw(&conn, None),\n\n })\n\n}\n\n\n", "file_path": "examples/todo/src/main.rs", "rank": 79, "score": 152522.98460670514 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct BopP(FooP);\n\n\n", "file_path": "core/codegen/tests/uri_display.rs", "rank": 80, "score": 152441.63937033142 }, { "content": "pub fn syn_to_diag(error: syn::parse::Error) -> Diagnostic {\n\n error.span().unstable().error(error.to_string())\n\n}\n\n\n", "file_path": "core/codegen/src/syn_ext.rs", "rank": 81, "score": 151832.00337804074 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 82, "score": 151425.88595081636 }, { "content": "#[post(\"/submit\", data = \"<message>\")]\n\nfn submit(mut cookies: Cookies, message: Form<Message>) -> Redirect {\n\n cookies.add(Cookie::new(\"message\", message.into_inner().message));\n\n Redirect::to(\"/\")\n\n}\n\n\n", "file_path": "examples/cookies/src/main.rs", "rank": 83, "score": 151339.33574404143 }, { "content": "fn assert_invalid_form(client: &Client, vals: &mut [&str; 6]) {\n\n let s = format!(\"checkbox={}&number={}&type={}&password={}&textarea={}&select={}\",\n\n vals[0], vals[1], vals[2], vals[3], vals[4], vals[5]);\n\n assert_form_eq(client, &s, format!(\"Invalid form input: {}\", s));\n\n *vals = [\"true\", \"1\", \"a\", \"hi\", \"hey\", \"b\"];\n\n}\n\n\n", "file_path": "examples/form_kitchen_sink/src/tests.rs", "rank": 84, "score": 150947.4457575648 }, { "content": "#[parser]\n\nfn accept<'a>(input: &mut Input<'a>) -> Result<'a, Accept> {\n\n Accept(series(false, ',', is_whitespace, weighted_media_type)?)\n\n}\n\n\n", "file_path": "core/http/src/parse/accept.rs", "rank": 85, "score": 150552.40302380268 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct Context<'a, 'b>{ msg: Option<(&'a str, &'b str)>, tasks: Vec<Task> }\n\n\n\nimpl<'a, 'b> Context<'a, 'b> {\n\n pub fn err(conn: &DbConn, msg: &'a str) -> Context<'static, 'a> {\n\n Context{msg: Some((\"error\", msg)), tasks: Task::all(conn)}\n\n }\n\n\n\n pub fn raw(conn: &DbConn, msg: Option<(&'a str, &'b str)>) -> Context<'a, 'b> {\n\n Context{msg: msg, tasks: Task::all(conn)}\n\n }\n\n}\n\n\n", "file_path": "examples/todo/src/main.rs", "rank": 86, "score": 149669.67028708698 }, { "content": "#[derive(FromForm)]\n\nstruct Second {\n\n nickname: String,\n\n}\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 87, "score": 148253.19954972016 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct Person {\n\n name: String,\n\n age: u8,\n\n}\n\n\n\n// In a `GET` request and all other non-payload supporting request types, the\n\n// preferred media type in the Accept header is matched against the `format` in\n\n// the route attribute. Note: if this was a real application, we'd use\n\n// `rocket_contrib`'s built-in JSON support and return a `JsonValue` instead.\n", "file_path": "examples/content_types/src/main.rs", "rank": 88, "score": 148253.19954972016 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar1(BadType);\n\n//~^ ERROR UriDisplay<rocket::http::uri::Query>\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 89, "score": 147641.53054468578 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Baz(BadType);\n\n//~^ ERROR UriDisplay<rocket::http::uri::Path>\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 90, "score": 147641.53054468578 }, { "content": "#[parser]\n\nfn port<'a>(input: &mut RawInput<'a>) -> Result<'a, u16> {\n\n let port_str = take_n_while(5, |c| c >= b'0' && c <= b'9')?;\n\n port_from(&port_str)?\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/parser.rs", "rank": 91, "score": 147566.72502090395 }, { "content": "fn data_expr(ident: &syn::Ident, ty: &syn::Type) -> TokenStream2 {\n\n define_vars_and_mods!(req, data, FromData, Outcome, Transform);\n\n let span = ident.span().unstable().join(ty.span()).unwrap().into();\n\n quote_spanned! { span =>\n\n let __transform = <#ty as #FromData>::transform(#req, #data);\n\n\n\n #[allow(unreachable_patterns, unreachable_code)]\n\n let __outcome = match __transform {\n\n #Transform::Owned(#Outcome::Success(__v)) => {\n\n #Transform::Owned(#Outcome::Success(__v))\n\n },\n\n #Transform::Borrowed(#Outcome::Success(ref __v)) => {\n\n #Transform::Borrowed(#Outcome::Success(::std::borrow::Borrow::borrow(__v)))\n\n },\n\n #Transform::Borrowed(__o) => #Transform::Borrowed(__o.map(|_| {\n\n unreachable!(\"Borrowed(Success(..)) case handled in previous block\")\n\n })),\n\n #Transform::Owned(__o) => #Transform::Owned(__o),\n\n };\n\n\n\n #[allow(non_snake_case, unreachable_patterns, unreachable_code)]\n\n let #ident: #ty = match <#ty as #FromData>::from_data(#req, __outcome) {\n\n #Outcome::Success(__d) => __d,\n\n #Outcome::Forward(__d) => return #Outcome::Forward(__d),\n\n #Outcome::Failure((__c, _)) => return #Outcome::Failure(__c),\n\n };\n\n }\n\n}\n\n\n", "file_path": "core/codegen/src/attribute/route.rs", "rank": 92, "score": 146671.27616121658 }, { "content": "#[derive(Debug, Clone)]\n\nstruct MediaParam {\n\n key: IndexedString,\n\n value: IndexedString,\n\n}\n\n\n\n// FIXME: `Static` is needed for `const` items. Need `const SmallVec::new`.\n\n#[derive(Debug, Clone)]\n\npub enum MediaParams {\n\n Static(&'static [(IndexedString, IndexedString)]),\n\n Dynamic(SmallVec<[(IndexedString, IndexedString); 2]>)\n\n}\n\n\n\nimpl ::pear::parsers::Collection for MediaParams {\n\n type Item = (IndexedString, IndexedString);\n\n\n\n fn new() -> Self {\n\n MediaParams::Dynamic(SmallVec::new())\n\n }\n\n\n\n fn add(&mut self, item: Self::Item) {\n", "file_path": "core/http/src/media_type.rs", "rank": 93, "score": 146444.88410745573 }, { "content": "#[get(\"/<_>\")] //~ ERROR must be named\n\nfn k0(_: usize) {} //~^ HELP use a name such as\n\n\n\n// Check that strange dynamic syntax is caught.\n\n\n", "file_path": "core/codegen/tests/ui-fail/route-path-bad-syntax.rs", "rank": 94, "score": 146293.10575861204 }, { "content": "#[derive(FromForm, UriDisplayQuery)]\n\nstruct Third<'r> {\n\n one: String,\n\n two: &'r RawStr,\n\n}\n\n\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 95, "score": 145832.56118735147 }, { "content": "#[derive(FromForm, UriDisplayQuery)]\n\nstruct User<'a> {\n\n name: &'a RawStr,\n\n nickname: String,\n\n}\n\n\n\nimpl<'a, 'b> FromUriParam<Query, (&'a str, &'b str)> for User<'a> {\n\n type Target = User<'a>;\n\n fn from_uri_param((name, nickname): (&'a str, &'b str)) -> User<'a> {\n\n User { name: name.into(), nickname: nickname.to_string() }\n\n }\n\n}\n\n\n\n// This one has no `UriDisplay`. It exists to ensure that this file still\n\n// compiles even though it's used a URI parameter's type. As long as a user\n\n// doesn't request a URI from that route, things should be okay.\n", "file_path": "core/codegen/tests/typed-uris.rs", "rank": 96, "score": 145832.56118735147 }, { "content": "#[parser]\n\nfn key<'a>(input: &mut &'a str) -> Result<String, &'a str> {\n\n take_some_while(is_ident_char)?.to_string()\n\n}\n\n\n", "file_path": "core/lib/src/config/toml_ext.rs", "rank": 97, "score": 145436.61567724874 }, { "content": "#[parser]\n\nfn array<'a>(input: &mut &'a str) -> Result<Value, &'a str> {\n\n Value::Array(collection('[', value, ',', ']')?)\n\n}\n\n\n", "file_path": "core/lib/src/config/toml_ext.rs", "rank": 98, "score": 145436.61567724874 }, { "content": "#[parser]\n\nfn table<'a>(input: &mut &'a str) -> Result<Value, &'a str> {\n\n Value::Table(collection('{', key_value, ',', '}')?)\n\n}\n\n\n", "file_path": "core/lib/src/config/toml_ext.rs", "rank": 99, "score": 145436.61567724874 } ]
Rust
src/platform/bluez/misc/thermometer.rs
OtaK/niter
e66b301b2469b7048e0ce254110a8e4df04cceba
#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)] pub struct ThermometerManager { object_path: String, } impl std::str::FromStr for ThermometerManager { type Err = crate::NiterError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self { object_path: s.into(), }) } } crate::to_proxy_impl!(ThermometerManager, ThermometerManagerProxy, "org.bluez"); crate::impl_tryfrom_zvariant!(ThermometerManager); #[zbus::dbus_proxy( interface = "org.bluez.ThermometerManager1", default_service = "org.bluez" )] pub trait ThermometerManager { fn register_watcher(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn unregister_watcher(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn enable_intermediate_measurement(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn disable_intermediate_measurement(&self, agent: ThermometerWatcher) -> zbus::Result<()>; } #[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)] pub struct Thermometer { object_path: String, } impl std::str::FromStr for Thermometer { type Err = crate::NiterError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self { object_path: s.into(), }) } } crate::to_proxy_impl!(Thermometer, ThermometerProxy, "org.bluez"); crate::impl_tryfrom_zvariant!(Thermometer); #[zbus::dbus_proxy(interface = "org.bluez.Thermometer1", default_service = "org.bluez")] pub trait Thermometer { #[dbus_proxy(property)] fn intermediate(&self) -> zbus::fdo::Result<bool>; #[dbus_proxy(property)] fn interval(&self) -> zbus::fdo::Result<u16>; #[dbus_proxy(property)] fn set_interval(&self, interval_seconds: u16) -> zbus::fdo::Result<()>; #[dbus_proxy(property)] fn maximum(&self) -> zbus::fdo::Result<u16>; #[dbus_proxy(property)] fn minimum(&self) -> zbus::fdo::Result<u16>; } #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct ThermometerWatcher { object_path: String, current_measurement: Option<ThermometerMeasurement>, } impl zvariant::Type for ThermometerWatcher { fn signature() -> zvariant::Signature<'static> { zvariant::Signature::from_str_unchecked("s") } } #[zbus::dbus_interface(name = "org.bluez.ThermometerWatcher1")] impl ThermometerWatcher { fn measurement_received( &mut self, measurement: ThermometerMeasurement, ) -> zbus::fdo::Result<()> { self.current_measurement = Some(measurement); Ok(()) } } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct ThermometerMeasurement { #[serde(skip)] calculated_value: Option<f64>, exponent: i16, mantissa: i32, unit: ThermometerMeasurementUnit, time: Option<u64>, r#type: Option<ThermometerMeasurementType>, measurement: ThermometerMeasurementKind, } impl zvariant::Type for ThermometerMeasurement { fn signature() -> zvariant::Signature<'static> { zvariant::Signature::from_str_unchecked("a{sv}") } } const TWO_POW_23: i32 = 8388608; const MANTISSA_NAN: i32 = TWO_POW_23 - 1; const MANTISSA_PINF: i32 = TWO_POW_23 - 2; const MANTISSA_NINF: i32 = -(MANTISSA_PINF); const MANTISSA_NRES: i32 = -(TWO_POW_23); impl std::convert::TryFrom<zvariant::Dict<'_, '_>> for ThermometerMeasurement { type Error = crate::NiterError; fn try_from(dict: zvariant::Dict<'_, '_>) -> crate::NiterResult<Self> { use std::str::FromStr as _; let exponent: i16 = *dict .get("Exponent")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let mantissa: i32 = *dict .get("Mantissa")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let unit: &str = dict .get("Unit")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let time = dict.get("Time")?; let measurement_type: Option<&str> = dict.get("Type")?; let measurement_kind: &str = dict .get("Measurement")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let calculated_value: Option<f64> = if exponent == 0 { match mantissa { MANTISSA_NAN => Some(f64::NAN), MANTISSA_NRES => None, MANTISSA_PINF => Some(f64::INFINITY), MANTISSA_NINF => Some(-f64::INFINITY), _ => None, } } else { Some(mantissa as f64 * 10.0_f64.powi(exponent.into())) }; Ok(Self { calculated_value, exponent, mantissa, unit: ThermometerMeasurementUnit::from_str(&unit)?, time: time.copied(), r#type: measurement_type.and_then(|s| ThermometerMeasurementType::from_str(s).ok()), measurement: ThermometerMeasurementKind::from_str(measurement_kind)?, }) } } impl std::convert::TryFrom<zvariant::OwnedValue> for ThermometerMeasurement { type Error = crate::NiterError; fn try_from(v: zvariant::OwnedValue) -> Result<Self, Self::Error> { use std::convert::TryInto as _; let dict: zvariant::Dict = v.try_into()?; Self::try_from(dict) } } #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementUnit { Celsius, Farenheit, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementUnit); #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementType { Armpit, Body, Ear, Finger, Intestines, Mouth, Rectum, Toe, Tympanum, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementType); #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementKind { Final, Intermediate, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementKind);
#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)] pub struct ThermometerManager { object_path: String, } impl std::str::FromStr for ThermometerManager { type Err = crate::NiterError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self { object_path: s.into(), }) } } crate::to_proxy_impl!(ThermometerManager, ThermometerManagerProxy, "org.bluez"); crate::impl_tryfrom_zvariant!(ThermometerManager); #[zbus::dbus_proxy( interface = "org.bluez.ThermometerManager1", default_service = "org.bluez" )] pub trait ThermometerManager { fn register_watcher(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn unregister_watcher(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn enable_intermediate_measurement(&self, agent: ThermometerWatcher) -> zbus::Result<()>; fn disable_intermediate_measurement(&self, agent: ThermometerWatcher) -> zbus::Result<()>; } #[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)] pub struct Thermometer { object_path: String, } impl std::str::FromStr for Thermometer { type Err = crate::NiterError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self { object_path: s.into(), }) } } crate::to_proxy_impl!(Thermometer, ThermometerProxy, "org.bluez"); crate::impl_tryfrom_zvariant!(Thermometer); #[zbus::dbus_proxy(interface = "org.bluez.Thermometer1", default_service = "org.bluez")] pub trait Thermometer { #[dbus_proxy(property)] fn intermediate(&self) -> zbus::fdo::Result<bool>; #[dbus_proxy(property)] fn interval(&self) -> zbus::fdo::Result<u16>; #[dbus_proxy(property)] fn set_interval(&self, interval_seconds: u16) -> zbus::fdo::Result<()>; #[dbus_proxy(property)] fn maximum(&self) -> zbus::fdo::Result<u16>; #[dbus_proxy(property)] fn minimum(&self) -> zbus::fdo::Result<u16>; } #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct ThermometerWatcher { object_path: String, current_measurement: Option<ThermometerMeasurement>, } impl zvariant::Type for ThermometerWatcher { fn signature() -> zvariant::Signature<'static> { zvariant::Signature::from_str_unchecked("s") } } #[zbus::dbus_interface(name = "org.bluez.ThermometerWatcher1")] impl ThermometerWatcher { fn measurement_received( &mut self, measurement: ThermometerMeasurement, ) -> zbus::fdo::Result<()> { self.current_measurement = Some(measurement); Ok(()) } } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct ThermometerMeasurement { #[serde(skip)] calculated_value: Option<f64>, exponent: i16, mantissa: i32, unit: ThermometerMeasurementUnit, time: Option<u64>, r#type: Option<ThermometerMeasurementType>, measurement: ThermometerMeasurementKind, } impl zvariant::Type for ThermometerMeasurement { fn signature() -> zvariant::Signature<'static> { zvariant::Signature::from_str_unchecked("a{sv}") } } const TWO_POW_23: i32 = 8388608; const MANTISSA_NAN: i32 = TWO_POW_23 - 1; const MANTISSA_PINF: i32 = TWO_POW_23 - 2; const MANTISSA_NINF: i32 = -(MANTISSA_PINF); const MANTISSA_NRES: i32 = -(TWO_POW_23); impl std::convert::TryFrom<zvariant::Dict<'_, '_>> for ThermometerMeasurement { type Error = crate::NiterError; fn try_from(dict: zvariant::Dict<'_, '_>) -> crate::NiterResult<Self> { use std::str::FromStr as _; let exponent: i16 = *dict .get("Exponent")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let mantissa: i32 = *dict .get("Mantissa")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let unit: &str = dict .get("Unit")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let time = dict.get("Time")?; let measurement_type: Option<&str> = dict.get("Type")?; let measurement_kind: &str = dict .get("Measurement")? .ok_or_else(|| zvariant::Error::IncorrectType)?; let calculated_value: Option<f64> = if exponent == 0 { match mantissa { MANTISSA_NAN => Some(f64::NAN), MANTISSA_NRES => None, MANTISSA_PINF => Some(f64::INFINITY), MANTISSA_NINF => Some(-f64::INFINITY), _ => None, } } else { Some(mantissa as f64 * 10.0_f64.powi(exponent.into())) };
} } impl std::convert::TryFrom<zvariant::OwnedValue> for ThermometerMeasurement { type Error = crate::NiterError; fn try_from(v: zvariant::OwnedValue) -> Result<Self, Self::Error> { use std::convert::TryInto as _; let dict: zvariant::Dict = v.try_into()?; Self::try_from(dict) } } #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementUnit { Celsius, Farenheit, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementUnit); #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementType { Armpit, Body, Ear, Finger, Intestines, Mouth, Rectum, Toe, Tympanum, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementType); #[derive( Debug, Clone, Copy, strum::EnumString, strum::Display, zvariant_derive::Type, serde::Serialize, serde::Deserialize, )] #[strum(serialize_all = "lowercase")] pub enum ThermometerMeasurementKind { Final, Intermediate, } crate::impl_tryfrom_zvariant!(ThermometerMeasurementKind);
Ok(Self { calculated_value, exponent, mantissa, unit: ThermometerMeasurementUnit::from_str(&unit)?, time: time.copied(), r#type: measurement_type.and_then(|s| ThermometerMeasurementType::from_str(s).ok()), measurement: ThermometerMeasurementKind::from_str(measurement_kind)?, })
call_expression
[ { "content": "pub trait MediaEndpointDelegate<E: std::error::Error>: zvariant::Type + 'static {\n\n fn set_configuration(&mut self, transport: MediaTransport, properties: MediaEndpointProperties) -> Result<(), E>;\n\n fn select_configuration(&mut self, capabilities: MediaEndpointCapabilities) -> Result<MediaTransportConfiguration, E>;\n\n fn clear_configuration(&mut self, transport: MediaTransport) -> Result<(), E>;\n\n fn release(&mut self) -> Result<(), E>;\n\n\n\n fn uuid(&self) -> &str;\n\n fn codec(&self) -> u8;\n\n fn capabilities(&self) -> MediaEndpointCapabilities;\n\n fn device(&self) -> zvariant::Value;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct MediaEndpointServer<T: MediaEndpointDelegate<zbus::fdo::Error>> {\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.MediaEndpoint1\")]\n\nimpl<T: MediaEndpointDelegate<zbus::fdo::Error>> MediaEndpointServer<T> {\n", "file_path": "src/platform/bluez/media/endpoint.rs", "rank": 0, "score": 151386.62634439862 }, { "content": "pub trait MediaItemDelegate<E: std::error::Error>: zvariant::Type + 'static {\n\n fn play(&mut self) -> Result<(), E>;\n\n fn add_to_now_playing(&mut self) -> Result<(), E>;\n\n\n\n //fn player(&self) -> &MediaPlayer;\n\n fn name(&self) -> &str;\n\n fn item_type(&self) -> MediaItemType;\n\n fn folder_type(&self) -> MediaFolderType;\n\n fn playable(&self) -> bool;\n\n fn metadata(&self) -> &MediaItemMetadata;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct MediaItemTarget<T: MediaItemDelegate<zbus::fdo::Error>> {\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.MediaItem1\")]\n\nimpl<T: MediaItemDelegate<zbus::fdo::Error>> MediaItemTarget<T> {\n", "file_path": "src/platform/bluez/media/item.rs", "rank": 1, "score": 151386.62634439865 }, { "content": "pub trait ProvisioningAgentDelegate: zvariant::Type + 'static {\n\n fn private_key(&self) -> Vec<u8>;\n\n fn public_key(&self) -> Vec<u8>;\n\n fn display_string(&self, value: String);\n\n fn display_numeric(&self, display_kind: DisplayNumericKind, number: u32);\n\n fn prompt_numeric(&self, display_kind: PromptNumericKind) -> u32;\n\n fn prompt_static(&self, display_kind: PromptStaticKind) -> [u8; 16];\n\n fn cancel(&mut self);\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct ProvisioningAgent<T: ProvisioningAgentDelegate> {\n\n capabilities: Vec<DisplayCapability>,\n\n oob_info: Vec<OutOfBandInfoAvailability>,\n\n uri: String,\n\n service_name: String,\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 2, "score": 140499.11291085748 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.Agent1\")]\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait Agent {\n\n fn release(&self) -> zbus::Result<()>;\n\n fn request_pin_code(&self, device: Device) -> zbus::Result<String>;\n\n fn display_pin_code(&self, device: Device, pincode: String) -> zbus::Result<()>;\n\n fn request_passkey(&self, device: Device) -> zbus::Result<u32>;\n\n fn display_passkey(&self, device: Device, passkey: u32, entered: u16) -> zbus::Result<()>;\n\n fn request_confirmation(&self, device: Device, passkey: u32) -> zbus::Result<()>;\n\n fn request_authorization(&self, device: Device) -> zbus::Result<()>;\n\n fn authorize_service(&self, uuid: crate::Uuid) -> zbus::Result<()>;\n\n fn cancel(&self) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/agent.rs", "rank": 3, "score": 139617.91045768355 }, { "content": "pub trait ObexAgentDelegate: zvariant::Type + serde::Serialize + 'static {\n\n fn release(&mut self);\n\n fn authorize_push(&self, transfer: ()) -> String;\n\n fn cancel(&mut self);\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct ObexAgent<T: ObexAgentDelegate> {\n\n object_path: String,\n\n service_name: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.obex.Agent1\")]\n\nimpl<T: ObexAgentDelegate> ObexAgent<T> {\n\n fn release(&mut self) {\n\n self.delegate.release()\n\n }\n\n fn authorize_push(&self, transfer: ()) -> String {\n\n #[allow(clippy::unit_arg)]\n", "file_path": "src/platform/bluez/obex/agent.rs", "rank": 4, "score": 137179.20423363903 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait AgentManager {\n\n fn register_agent(&self, agent: Agent, capability: AgentCapability) -> zbus::Result<()>;\n\n fn unregister_agent(&self, agent: Agent) -> zbus::Result<()>;\n\n fn request_default_agent(&self, agent: Agent) -> zbus::Result<()>;\n\n}\n\n\n", "file_path": "src/platform/bluez/agent.rs", "rank": 6, "score": 113939.09313071468 }, { "content": "pub trait ObexAgentManager {\n\n fn register_agent(&self, agent: ObexAgent<impl ObexAgentDelegate>) -> zbus::Result<()>;\n\n fn unregister_agent(&self, agent: ObexAgent<impl ObexAgentDelegate>) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/obex/agent.rs", "rank": 8, "score": 109212.95263445609 }, { "content": "pub trait GattProfileDelegate: zvariant::Type + 'static {\n\n fn release(&mut self);\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct GattProfile<T: GattProfileDelegate> {\n\n uuids: crate::UuidArray,\n\n profile_impl: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.GattProfile1\")]\n\nimpl<T: GattProfileDelegate> GattProfile<T> {\n\n fn release(&mut self) -> zbus::fdo::Result<()> {\n\n // Letting the profile impl know that cleanup has started and do its thing\n\n self.profile_impl.release();\n\n // We're dropping self since that's a profile release signal\n\n // Side effect is that it'll drop the inner profile_impl\n\n Ok(())\n\n }\n\n\n\n #[dbus_interface(property, name = \"UUIDs\")]\n\n fn uuids(&self) -> crate::UuidArray {\n\n self.uuids.clone()\n\n }\n\n}\n", "file_path": "src/platform/bluez/gatt/profile.rs", "rank": 9, "score": 108724.45045702589 }, { "content": "pub trait MeshAttentionDelegate: zvariant::Type + 'static {\n\n fn set_timer(&self, element_index: u8, time: u16);\n\n fn get_timer(&self, element_index: u8) -> u16;\n\n}\n\n\n\n#[allow(dead_code)]\n\npub struct MeshAttention<T: MeshAttentionDelegate> {\n\n object_path: String,\n\n service_name: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.mesh.Attention1\")]\n\nimpl<T: MeshAttentionDelegate> MeshAttention<T> {\n\n fn set_timer(&self, element_index: u8, time: u16) {\n\n self.delegate.set_timer(element_index, time)\n\n }\n\n fn get_timer(&self, element_index: u8) -> u16 {\n\n self.delegate.get_timer(element_index)\n\n }\n\n}\n", "file_path": "src/platform/bluez/mesh/attention.rs", "rank": 10, "score": 108724.45045702589 }, { "content": "pub trait MeshElementDelegate: zvariant::Type + 'static {\n\n fn message_received(\n\n &mut self,\n\n source: u16,\n\n key_index: u16,\n\n destination: MeshMessageDestination,\n\n data: Vec<u8>,\n\n );\n\n fn dev_key_message_received(\n\n &mut self,\n\n source: u16,\n\n remote: bool,\n\n net_index: u16,\n\n data: Vec<u8>,\n\n );\n\n fn update_model_configuration(\n\n &mut self,\n\n model_id: u16,\n\n config: std::collections::HashMap<String, zvariant::Value<'_>>,\n\n );\n", "file_path": "src/platform/bluez/mesh/element.rs", "rank": 11, "score": 108724.45045702589 }, { "content": "pub trait ProvisionerDelegate: zvariant::Type + 'static {\n\n fn scan_result(\n\n &self,\n\n rssi: i16,\n\n data: Vec<u8>,\n\n options: std::collections::HashMap<String, String>,\n\n );\n\n fn request_prov_data(&self, count: u8) -> (u16, u16);\n\n fn add_node_complete(&self, uuid: crate::Uuid, unicast: u16, count: u8);\n\n fn add_node_failed(&self, uuid: crate::Uuid, reason: ProvisionerAddNodeFailedReason);\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Provisioner<T: ProvisionerDelegate> {\n\n service_name: String,\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.mesh.Provisioner1\")]\n", "file_path": "src/platform/bluez/mesh/provision/provisioner.rs", "rank": 12, "score": 108724.45045702589 }, { "content": "pub trait MediaFolderDelegate: zvariant::Type + Sized + 'static {\n\n fn search(&self, value: String, filter: MediaPlayerFilter) -> zbus::fdo::Result<MediaFolder<Self>>;\n\n fn list_items(&self, filter: MediaPlayerFilter) -> zbus::fdo::Result<Vec<(String, MediaItemMetadata)>>;\n\n fn change_folder(&self, folder: String) -> zbus::fdo::Result<()>;\n\n\n\n fn number_of_items(&self) -> u32;\n\n fn folder_name(&self) -> &str;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct MediaFolder<T: MediaFolderDelegate> {\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.MediaFolder1\")]\n\nimpl<T: MediaFolderDelegate + serde::Serialize> MediaFolder<T> {\n\n fn search(&self, value: String, filter: MediaPlayerFilter) -> zbus::fdo::Result<Self> {\n\n self.delegate.search(value, filter)\n\n }\n", "file_path": "src/platform/bluez/media/folder.rs", "rank": 13, "score": 104974.17216871402 }, { "content": "pub trait MeshApplicationDelegate: zvariant::Type + serde::Serialize + 'static {\n\n fn join_complete(&mut self, token: u64);\n\n fn join_failed(&mut self, reason: MeshApplicationJoinFailureReason);\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct MeshApplication<T: MeshApplicationDelegate> {\n\n company_id: u16,\n\n product_id: u16,\n\n version_id: u16,\n\n crpl: u16,\n\n service_name: String,\n\n object_path: String,\n\n delegate: T,\n\n}\n\n\n\n#[zbus::dbus_interface(name = \"org.bluez.mesh.Application1\")]\n\nimpl<T: MeshApplicationDelegate> MeshApplication<T> {\n\n fn join_complete(&mut self, token: u64) {\n\n self.delegate.join_complete(token)\n", "file_path": "src/platform/bluez/mesh/application.rs", "rank": 14, "score": 101532.30091343532 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.Profile1\")]\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait Profile {\n\n fn release(&self) -> zbus::Result<()>;\n\n fn new_connection(\n\n &self,\n\n device: Device,\n\n fd: std::os::unix::io::RawFd,\n\n fd_properties: FdProperties,\n\n ) -> zbus::Result<()>;\n\n fn request_disconnection(&self, device: Device) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/profile.rs", "rank": 15, "score": 75558.12005930072 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait Device {\n\n fn connect(&self) -> zbus::Result<()>;\n\n fn disconnect(&self) -> zbus::Result<()>;\n\n fn connect_profile(&self, uuid: crate::Uuid) -> zbus::Result<()>;\n\n fn disconnect_profile(&self, uuid: crate::Uuid) -> zbus::Result<()>;\n\n fn pair(&self) -> zbus::Result<()>;\n\n fn cancel_pairing(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn address(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn address_type(&self) -> zbus::fdo::Result<bluez::AddressType>;\n\n #[dbus_proxy(property)]\n\n fn name(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn icon(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn class(&self) -> zbus::fdo::Result<u32>;\n\n #[dbus_proxy(property)]\n\n fn appearance(&self) -> zbus::fdo::Result<crate::spec::ble_appearance::BLEAppearance>;\n", "file_path": "src/platform/bluez/device.rs", "rank": 16, "score": 75555.25894316011 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait Adapter {\n\n fn start_discovery(&self) -> zbus::Result<()>;\n\n fn stop_discovery(&self) -> zbus::Result<()>;\n\n fn remove_device(&self, device: bluez::device::Device) -> zbus::Result<()>;\n\n fn set_discovery_filter(&self, filter: zvariant::Value) -> zbus::Result<()>;\n\n fn get_discovery_filters(&self) -> zbus::Result<Vec<String>>;\n\n fn connect_device(&self, device: zvariant::Value) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn address(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn address_type(&self) -> zbus::fdo::Result<bluez::AddressType>;\n\n #[dbus_proxy(property)]\n\n fn name(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn alias(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn set_alias(&self, alias: &str) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn class(&self) -> zbus::fdo::Result<u32>;\n", "file_path": "src/platform/bluez/adapter.rs", "rank": 17, "score": 75555.25894316011 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.Input1\", default_service = \"org.bluez\")]\n\npub trait Input {\n\n #[dbus_proxy(property)]\n\n fn reconnect_mode(&self) -> zbus::fdo::Result<InputReconnectMode>;\n\n}\n", "file_path": "src/platform/bluez/misc/input.rs", "rank": 18, "score": 73755.2279757922 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.Battery1\", default_service = \"org.bluez\")]\n\npub trait Battery {\n\n #[dbus_proxy(property)]\n\n fn percentage(&self) -> zbus::fdo::Result<u8>;\n\n}\n", "file_path": "src/platform/bluez/misc/battery.rs", "rank": 19, "score": 73755.2279757922 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.Network1\", default_service = \"org.bluez\")]\n\npub trait Network {\n\n fn connect(&self, uuid: NetworkUuid) -> zbus::Result<String>;\n\n fn disconnect(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn connected(&self) -> zbus::fdo::Result<bool>;\n\n #[dbus_proxy(property)]\n\n fn interface(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property, name = \"UUID\")]\n\n fn uuid(&self) -> zbus::fdo::Result<NetworkUuid>;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct NetworkServer {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for NetworkServer {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "src/platform/bluez/misc/network.rs", "rank": 20, "score": 73755.2279757922 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait ProfileManager {\n\n fn register_profile(\n\n &self,\n\n profile: Profile,\n\n uuid: crate::Uuid,\n\n options: ProfileManagerRegisterOptions,\n\n ) -> zbus::Result<()>;\n\n fn unregister_profile(&self, profile: Profile) -> zbus::Result<()>;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct FdProperties {\n\n version: u16,\n\n features: u16,\n\n}\n\n\n", "file_path": "src/platform/bluez/profile.rs", "rank": 21, "score": 73754.52153888541 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait AdvertisingManager {\n\n fn register_advertisement(\n\n &self,\n\n advertisement: Advertisement,\n\n options: std::collections::HashMap<String, String>,\n\n ) -> zbus::Result<()>;\n\n fn unregister_advertisement(&self, advertisement: Advertisement) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn active_instances(&self) -> zbus::fdo::Result<u8>;\n\n #[dbus_proxy(property)]\n\n fn supported_instances(&self) -> zbus::fdo::Result<u8>;\n\n #[dbus_proxy(property)]\n\n fn supported_includes(&self) -> zbus::fdo::Result<bluez::ZvariantableArray<SystemInclude>>;\n\n #[dbus_proxy(property)]\n\n fn supported_secondary_channels(\n\n &self,\n\n ) -> zbus::fdo::Result<bluez::ZvariantableArray<SecondaryChannel>>;\n\n}\n", "file_path": "src/platform/bluez/advertising.rs", "rank": 22, "score": 73754.52153888541 }, { "content": "pub trait Media {\n\n fn register_endpoint(\n\n &self,\n\n endpoint: endpoint::MediaEndpointServer<impl endpoint::MediaEndpointDelegate<zbus::fdo::Error> + serde::Serialize>,\n\n properties: MediaEndpointRegisterProperties,\n\n ) -> zbus::Result<()>;\n\n\n\n fn unregister_endpoint(&self, endpoint: endpoint::MediaEndpointServer<impl endpoint::MediaEndpointDelegate<zbus::fdo::Error> + serde::Serialize>) -> zbus::Result<()>;\n\n\n\n fn register_player<\n\n K: serde::Serialize + Eq + std::hash::Hash + zvariant::Type,\n\n V: serde::Serialize + zvariant::Type\n\n >(&self, player: player::MediaPlayer, properties: std::collections::HashMap<K, V>) -> zbus::Result<()>;\n\n\n\n fn unregister_player(&self, player: player::MediaPlayer) -> zbus::Result<()>;\n\n\n\n fn register_application<\n\n K: serde::Serialize + Eq + std::hash::Hash + zvariant::Type,\n\n V: serde::Serialize + zvariant::Type\n\n >(&self, root: zvariant::ObjectPath, options: std::collections::HashMap<K, V>) -> zbus::Result<()>;\n\n\n\n fn unregister_application(&self, application: zvariant::ObjectPath) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/media/mod.rs", "rank": 23, "score": 73748.02344873264 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.GattManager1\", default_service = \"org.bluez\")]\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait GattManager {\n\n fn register_application(\n\n &self,\n\n application: Application,\n\n options: RegisterApplicationOptions,\n\n ) -> zbus::Result<Vec<u8>>;\n\n fn unregister_application(&self, application: Application) -> zbus::Result<bool>;\n\n}\n", "file_path": "src/platform/bluez/gatt/manager.rs", "rank": 24, "score": 72059.44905377062 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.SimAccess1\", default_service = \"org.bluez\")]\n\npub trait SimAccess {\n\n fn disconnect(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn connected(&self) -> zbus::fdo::Result<bool>;\n\n}\n", "file_path": "src/platform/bluez/misc/sap.rs", "rank": 25, "score": 72054.46520139082 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.HealthDevice1\", default_service = \"org.bluez\")]\n\npub trait HealthDevice {\n\n fn echo(&self) -> zbus::Result<bool>;\n\n fn create_channel(&self) -> zbus::Result<HealthChannel>;\n\n fn destroy_channel(&self, channel: HealthChannel) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(signal)]\n\n fn channel_connected(&self, channel: HealthChannel) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(signal)]\n\n fn channel_deleted(&self, channel: HealthChannel) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn main_channel(&self) -> zbus::fdo::Result<HealthChannel>;\n\n}\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::EnumString,\n\n strum::Display,\n\n zvariant_derive::Type,\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 26, "score": 72054.46520139082 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.HealthChannel\", default_service = \"org.bluez\")]\n\npub trait HealthChannel {\n\n fn acquire(&self) -> zbus::Result<std::os::unix::io::RawFd>;\n\n fn release(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property, name = \"Type\")]\n\n fn r#type(&self) -> zbus::fdo::Result<HealthChannelType>;\n\n #[dbus_proxy(property)]\n\n fn device(&self) -> zbus::fdo::Result<HealthDevice>;\n\n #[dbus_proxy(property)]\n\n fn application(&self) -> zbus::fdo::Result<HealthApplication>;\n\n}\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 27, "score": 72054.46520139082 }, { "content": "#[zbus::dbus_proxy(interface = \"org.bluez.mesh.Node1\", default_service = \"org.bluez.mesh\")]\n\npub trait MeshNode {\n\n fn send(\n\n &self,\n\n element_path: zvariant::ObjectPath,\n\n destination: u16,\n\n key_index: u16,\n\n data: Vec<u8>,\n\n ) -> zbus::Result<()>;\n\n fn dev_key_send(\n\n &self,\n\n element_path: zvariant::ObjectPath,\n\n destination: u16,\n\n remote: bool,\n\n net_index: u16,\n\n data: Vec<u8>,\n\n ) -> zbus::Result<()>;\n\n fn add_net_key(\n\n &self,\n\n element_path: zvariant::ObjectPath,\n\n destination: u16,\n", "file_path": "src/platform/bluez/mesh/node.rs", "rank": 28, "score": 72054.3856460686 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait AdvertisementMonitor {\n\n fn release(&self) -> zbus::Result<()>;\n\n fn activate(&self) -> zbus::Result<()>;\n\n fn device_found(&self, device: Device) -> zbus::Result<()>;\n\n fn device_lost(&self, device: Device) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property, name = \"Type\")]\n\n fn r#type(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn rssi_thresholds_and_timers(&self)\n\n -> zbus::fdo::Result<AdvertisementRssiThresholdsAndTimers>;\n\n #[dbus_proxy(property)]\n\n fn patterns(&self) -> zbus::fdo::Result<crate::ZvariantableArray<AdvertisementPattern>>;\n\n}\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n", "file_path": "src/platform/bluez/advertisement_monitor.rs", "rank": 29, "score": 72053.8401374426 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait GattDescriptor {\n\n fn read_value(&self, flags: GattDescriptorReadOptions) -> zbus::Result<Vec<u8>>;\n\n fn write_value(&self, value: Vec<u8>, flags: GattDescriptorWriteOptions) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property, name = \"UUID\")]\n\n fn uuid(&self) -> zbus::fdo::Result<crate::UuidArray>;\n\n #[dbus_proxy(property)]\n\n fn characteristic(&self) -> zbus::fdo::Result<super::GattCharacteristic>;\n\n #[dbus_proxy(property)]\n\n fn value(&self) -> zbus::fdo::Result<Vec<u8>>;\n\n #[dbus_proxy(property)]\n\n fn flags(&self) -> zbus::fdo::Result<crate::platform::bluez::ZvariantableArray<GattDescriptorFlags>>;\n\n #[dbus_proxy(property)]\n\n fn handle(&self) -> zbus::fdo::Result<u16>;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct GattDescriptor<D: GattDescriptorDelegate> {\n\n object_path: String,\n\n characteristic_object_path: String,\n", "file_path": "src/platform/bluez/gatt/descriptor.rs", "rank": 30, "score": 72053.8401374426 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait ObexMessage {\n\n fn get(&self, targetfile: String, attachment: bool) -> zbus::Result<(String, )>;\n\n\n\n #[dbus_proxy(property)]\n\n fn folder(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn subject(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn timestamp(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn sender(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn sender_address(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn reply_to(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn recipient(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn recipient_address(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property, name = \"Type\")]\n", "file_path": "src/platform/bluez/obex/message.rs", "rank": 31, "score": 72053.8401374426 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait GattService {\n\n #[dbus_proxy(property, name = \"UUID\")]\n\n fn uuid(&self) -> zbus::fdo::Result<crate::Uuid>;\n\n #[dbus_proxy(property)]\n\n fn primary(&self) -> zbus::fdo::Result<bool>;\n\n #[dbus_proxy(property)]\n\n fn device(&self) -> zbus::fdo::Result<bluez::device::Device>;\n\n #[dbus_proxy(property)]\n\n fn includes(\n\n &self,\n\n ) -> zbus::fdo::Result<bluez::ZvariantableArray<bluez::advertising::SystemInclude>>;\n\n #[dbus_proxy(property)]\n\n fn handle(&self) -> zbus::fdo::Result<u16>;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct GattService {\n\n object_path: String,\n\n}\n\n\n", "file_path": "src/platform/bluez/gatt/service.rs", "rank": 32, "score": 72053.8401374426 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait GattCharacteristic {\n\n fn read_value(&self, options: GattCharacteristicReadOptions) -> zbus::Result<Vec<u8>>;\n\n fn write_value(\n\n &self,\n\n value: Vec<u8>,\n\n options: GattCharacteristicWriteOptions,\n\n ) -> zbus::Result<bool>;\n\n fn acquire_write(\n\n &self,\n\n options: GattCharacteristicAcquireOptions,\n\n ) -> zbus::Result<(std::os::unix::io::RawFd, u16)>;\n\n fn acquire_notify(\n\n &self,\n\n options: GattCharacteristicAcquireOptions,\n\n ) -> zbus::Result<(std::os::unix::io::RawFd, u16)>;\n\n fn start_notify(&self) -> zbus::Result<()>;\n\n fn stop_notify(&self) -> zbus::Result<()>;\n\n fn confirm(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property, name = \"UUID\")]\n", "file_path": "src/platform/bluez/gatt/characteristic.rs", "rank": 33, "score": 72053.8401374426 }, { "content": "pub trait ObexClient {\n\n fn create_session(\n\n &self,\n\n destination: String,\n\n args: ObexSessionArgs,\n\n ) -> zbus::Result<ObexSession>;\n\n fn remove_session(&self, session: ObexSession) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/obex/client.rs", "rank": 34, "score": 72047.34204728983 }, { "content": "pub trait HealthManager {\n\n fn create_application(\n\n &self,\n\n config: HealthApplicationConfiguration,\n\n ) -> zbus::Result<HealthApplication>;\n\n fn destroy_application(&self, application: HealthApplication) -> zbus::Result<()>;\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct HealthDevice {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for HealthDevice {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(HealthDevice, HealthDeviceProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(HealthDevice);\n\n\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 35, "score": 72047.34204728983 }, { "content": "pub trait NetworkServer {\n\n fn register(&self, uuid: NetworkUuid, bridge: String) -> zbus::Result<()>;\n\n fn unregsiter(&self, uuid: NetworkUuid) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/misc/network.rs", "rank": 36, "score": 72047.34204728983 }, { "content": "pub trait MediaControl {\n\n #[deprecated]\n\n fn play(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn pause(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn stop(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn next(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn previous(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn volume_up(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn volume_down(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn fast_forward(&self) -> zbus::Result<()>;\n\n #[deprecated]\n\n fn rewind(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn connected(&self) -> zbus::fdo::Result<bool>;\n\n #[dbus_proxy(property)]\n\n fn player(&self) -> zbus::fdo::Result<super::player::MediaPlayer>;\n\n}\n", "file_path": "src/platform/bluez/media/control.rs", "rank": 37, "score": 72047.34204728983 }, { "content": "pub trait MediaTransport {\n\n fn acquire(&self) -> zbus::Result<(std::os::unix::io::RawFd, u16, u16)>;\n\n fn try_acquire(&self) -> zbus::Result<(std::os::unix::io::RawFd, u16, u16)>;\n\n fn release(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn device(&self) -> zbus::fdo::Result<crate::platform::bluez::device::Device>;\n\n #[dbus_proxy(property, name = \"UUID\")]\n\n fn uuid(&self) -> zbus::fdo::Result<crate::Uuid>;\n\n #[dbus_proxy(property)]\n\n fn codec(&self) -> zbus::fdo::Result<u8>;\n\n #[dbus_proxy(property)]\n\n fn configuration(&self) -> zbus::fdo::Result<MediaTransportConfiguration>;\n\n #[dbus_proxy(property)]\n\n fn state(&self) -> zbus::fdo::Result<MediaTransportState>;\n\n #[dbus_proxy(property)]\n\n fn delay(&self) -> zbus::fdo::Result<u16>;\n\n #[dbus_proxy(property)]\n\n fn set_delay(&self, delay: u16) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn volume(&self) -> zbus::fdo::Result<u16>;\n\n #[dbus_proxy(property)]\n\n fn set_volume(&self, volume: u16) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn endpoint(&self) -> zbus::fdo::Result<MediaEndpoint>;\n\n}\n", "file_path": "src/platform/bluez/media/transport.rs", "rank": 38, "score": 72047.34204728983 }, { "content": "pub trait MeshNetwork {\n\n fn join(\n\n &self,\n\n app_root: MeshApplication<impl MeshApplicationDelegate>,\n\n uuid: crate::RawUuid,\n\n ) -> zbus::Result<()>;\n\n fn cancel(&self) -> zbus::Result<()>;\n\n fn attach(\n\n &self,\n\n app_root: MeshApplication<impl MeshApplicationDelegate>,\n\n token: u64,\n\n ) -> zbus::Result<(MeshNode, NodeConfiguration)>;\n\n fn leave(&self, token: u64) -> zbus::Result<()>;\n\n fn create_network(\n\n &self,\n\n app_root: MeshApplication<impl MeshApplicationDelegate>,\n\n uuid: crate::RawUuid,\n\n ) -> zbus::Result<u64>;\n\n #[allow(clippy::too_many_arguments)]\n\n fn import(\n", "file_path": "src/platform/bluez/mesh/network.rs", "rank": 39, "score": 72047.34204728983 }, { "content": "pub trait MediaPlayer {\n\n fn play(&self) -> zbus::Result<()>;\n\n fn pause(&self) -> zbus::Result<()>;\n\n fn stop(&self) -> zbus::Result<()>;\n\n fn next(&self) -> zbus::Result<()>;\n\n fn previous(&self) -> zbus::Result<()>;\n\n fn fast_forward(&self) -> zbus::Result<()>;\n\n fn rewind(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn equalizer(&self) -> zbus::fdo::Result<EqualizerStatus>;\n\n #[dbus_proxy(property)]\n\n fn set_equalizer(&self, equalizer: EqualizerStatus) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn repeat(&self) -> zbus::fdo::Result<RepeatStatus>;\n\n #[dbus_proxy(property)]\n\n fn set_repeat(&self, repeat: RepeatStatus) -> zbus::fdo::Result<()>;\n\n #[dbus_proxy(property)]\n\n fn shuffle(&self) -> zbus::fdo::Result<ShuffleScanStatus>;\n\n #[dbus_proxy(property)]\n", "file_path": "src/platform/bluez/media/player.rs", "rank": 40, "score": 72047.34204728983 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub trait AdvertisementMonitorManager {\n\n fn register_monitor(&self, application: AdvertisementMonitor) -> zbus::Result<()>;\n\n fn unregister_monitor(&self, application: AdvertisementMonitor) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn supported_monitor_types(&self) -> zbus::fdo::Result<crate::ZvariantableArray<MonitorType>>;\n\n #[dbus_proxy(property)]\n\n fn supported_features(&self) -> zbus::fdo::Result<crate::ZvariantableArray<MonitorFeature>>;\n\n}\n", "file_path": "src/platform/bluez/advertisement_monitor.rs", "rank": 41, "score": 70445.10090360013 }, { "content": "pub trait MediaFolderController {\n\n fn search(&self, value: String, filter: MediaPlayerFilter) -> zbus::Result<String>;\n\n fn list_items(&self, filter: MediaPlayerFilter) -> zbus::Result<Vec<(String, MediaItemMetadata)>>;\n\n fn change_folder(&self, folder: String) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property)]\n\n fn number_of_items(&self) -> zbus::fdo::Result<u32>;\n\n #[dbus_proxy(property, name = \"Name\")]\n\n fn folder_name(&self) -> zbus::fdo::Result<String>;\n\n}\n", "file_path": "src/platform/bluez/media/folder.rs", "rank": 42, "score": 70438.60281344736 }, { "content": "pub trait MediaEndpointClient {\n\n fn set_configuration(&self, transport: MediaTransport, properties: MediaEndpointProperties) -> zbus::Result<()>;\n\n fn select_configuration(&self, capabilities: MediaEndpointCapabilities) -> zbus::Result<MediaTransportConfiguration>;\n\n fn clear_configuration(&self, transport: MediaTransport) -> zbus::Result<()>;\n\n fn release(&self) -> zbus::Result<()>;\n\n\n\n #[dbus_proxy(property, name = \"UUID\")]\n\n fn uuid(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property)]\n\n fn codec(&self) -> zbus::fdo::Result<u8>;\n\n #[dbus_proxy(property)]\n\n fn capabilities(&self) -> zbus::fdo::Result<MediaEndpointCapabilities>;\n\n #[dbus_proxy(property)]\n\n fn device(&self) -> zbus::fdo::Result<crate::platform::bluez::device::Device>;\n\n}\n", "file_path": "src/platform/bluez/media/endpoint.rs", "rank": 43, "score": 70438.60281344736 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub trait GattDescriptorDelegate:\n\n GattDescriptorDelegateInner +\n\n GattDescriptorDelegateInnerDbusExt +\n\n zvariant::Type\n\n{}\n\n\n", "file_path": "src/peripheral/descriptor/descriptor_delegate.rs", "rank": 44, "score": 70438.60281344736 }, { "content": "pub trait MediaItemController {\n\n fn play(&self) -> zbus::Result<()>;\n\n fn add_to_now_playing(&self) -> zbus::Result<()>;\n\n\n\n // #[dbus_proxy(property)]\n\n // fn player(&self) -> zbus::fdo::Result<MediaPlayer>;\n\n #[dbus_proxy(property)]\n\n fn name(&self) -> zbus::fdo::Result<String>;\n\n #[dbus_proxy(property, name = \"Type\")]\n\n fn item_type(&self) -> zbus::fdo::Result<MediaItemType>;\n\n #[dbus_proxy(property)]\n\n fn folder_type(&self) -> zbus::fdo::Result<MediaFolderType>;\n\n #[dbus_proxy(property)]\n\n fn playable(&self) -> zbus::fdo::Result<bool>;\n\n #[dbus_proxy(property)]\n\n fn metadata(&self) -> zbus::fdo::Result<MediaItemMetadata>;\n\n}\n", "file_path": "src/platform/bluez/media/item.rs", "rank": 45, "score": 70438.60281344736 }, { "content": "pub trait GattServiceDelegate {\n\n const UUID: u16;\n\n const IS_PRIMARY: bool;\n\n fn create() -> Self;\n\n fn characteristics(&mut self) -> HashSet<GattCharacteristic>;\n\n}\n", "file_path": "src/peripheral/service/service_delegate.rs", "rank": 46, "score": 70438.60281344736 }, { "content": "pub trait SdpShortUUID<T: Into<u32>> {\n\n #[inline(always)]\n\n fn from_sdp_short_uuid(uuid: T) -> NiterResult<uuid::Uuid> {\n\n uuid::Uuid::from_fields(\n\n uuid.into(),\n\n BASE_UUID.1,\n\n BASE_UUID.2,\n\n &BASE_UUID.3.to_be_bytes(),\n\n )\n\n .map_err(Into::into)\n\n }\n\n}\n\n\n\nimpl SdpShortUUID<u16> for uuid::Uuid {}\n\nimpl SdpShortUUID<u32> for uuid::Uuid {}\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct Uuid(uuid::Uuid);\n\nimpl Default for Uuid {\n\n fn default() -> Self {\n", "file_path": "src/uuid.rs", "rank": 47, "score": 69457.45051219636 }, { "content": "pub trait Peripheral: std::fmt::Debug {\n\n fn find_adapter() -> crate::NiterResult<Self> where Self: Sized;\n\n fn is_powered(&self) -> crate::NiterResult<bool>;\n\n fn is_advertising(&self) -> crate::NiterResult<bool>;\n\n fn start_advertising(&self, name: &str) -> crate::NiterResult<()>;\n\n fn stop_advertising(&self) -> crate::NiterResult<()>;\n\n fn register_gatt(&self) -> crate::NiterResult<()>;\n\n fn unregister_gatt(&self) -> crate::NiterResult<()>;\n\n\n\n fn add_service<S: GattServiceDelegate + 'static>(&mut self, service: GattService<S>) -> crate::NiterResult<()>;\n\n}\n", "file_path": "src/peripheral/mod.rs", "rank": 48, "score": 69457.45051219636 }, { "content": "pub trait Device: std::fmt::Debug {\n\n type Service; // FIXME: Actual traits\n\n type Characteristic; // FIXME: Actual traits\n\n type Descriptor; // FIXME: Actual traits\n\n\n\n fn address(&self) -> String;\n\n fn is_connected(&self) -> bool;\n\n fn rssi(&self) -> i16;\n\n fn connect(&self) -> crate::NiterResult<()>;\n\n fn disconnect(&self) -> crate::NiterResult<()>;\n\n fn characteristics(&self) -> crate::NiterResult<Vec<Self::Characteristic>>;\n\n fn services(&self) -> crate::NiterResult<Vec<Self::Service>>;\n\n fn descriptors(&self) -> crate::NiterResult<Vec<Self::Descriptor>>;\n\n}\n\n\n", "file_path": "src/central/mod.rs", "rank": 49, "score": 69457.45051219636 }, { "content": "pub trait MeshProvisioningManager {\n\n fn unprovisioned_scan(&self, options: UnprovisionedScanOptions) -> zbus::Result<()>;\n\n fn unprovisioned_scan_cancel(&self) -> zbus::Result<()>;\n\n fn add_node(&self, uuid: crate::Uuid, options: bluez::BlueZDummy) -> zbus::Result<()>;\n\n fn create_subnet(&self, net_index: u16) -> zbus::Result<()>;\n\n fn import_subnet(&self, net_index: u16, net_key: &[u8; 16]) -> zbus::Result<()>;\n\n fn update_subnet(&self, net_index: u16) -> zbus::Result<()>;\n\n fn delete_subnet(&self, net_index: u16) -> zbus::Result<()>;\n\n fn set_key_phase(&self, index: u16, phase: KeyPhase) -> zbus::Result<()>;\n\n fn create_app_key(&self, net_index: u16, app_index: u16) -> zbus::Result<()>;\n\n fn import_app_key(\n\n &self,\n\n net_index: u16,\n\n app_index: u16,\n\n app_key: &[u8; 16],\n\n ) -> zbus::Result<()>;\n\n fn update_app_key(&self, app_index: u16) -> zbus::Result<()>;\n\n fn delete_app_key(&self, app_index: u16) -> zbus::Result<()>;\n\n fn import_remote_node(\n\n &self,\n\n primary: u16,\n\n count: u8,\n\n device_key: &[u8; 16],\n\n ) -> zbus::Result<()>;\n\n fn delete_remote_node(&self, primary: u16, count: u8) -> zbus::Result<()>;\n\n}\n", "file_path": "src/platform/bluez/mesh/provision/manager.rs", "rank": 50, "score": 68914.54612150307 }, { "content": "pub trait Central<D: Device>: std::fmt::Debug {\n\n fn start_scan(&self) -> crate::NiterResult<()>;\n\n fn stop_scan(&self) -> crate::NiterResult<()>;\n\n fn filter_duplicates(&mut self, filter: bool) -> crate::NiterResult<()>;\n\n fn devices(&self) -> crate::NiterResult<Vec<D>>;\n\n fn get_device(&self, uuid: crate::Uuid) -> crate::NiterResult<Option<D>>;\n\n}\n", "file_path": "src/central/mod.rs", "rank": 51, "score": 65120.724492155976 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub trait GattDescriptorDelegate: GattDescriptorDelegateInner {}\n", "file_path": "src/peripheral/descriptor/descriptor_delegate.rs", "rank": 52, "score": 63787.26581594662 }, { "content": "pub trait GattDescriptorDelegateInner: Send + Sync + 'static {\n\n const UUID: u16;\n\n const METHODS_ENABLED: GattDescriptorMethodsEnabled;\n\n const ENCRYPT_READ: bool = false;\n\n const ENCRYPT_AUTH_READ: bool = false;\n\n const ENCRYPT_WRITE: bool = false;\n\n const ENCRYPT_AUTH_WRITE: bool = false;\n\n const AUTHORIZE: bool = false;\n\n\n\n fn needs_loop(&self) -> bool {\n\n !Self::METHODS_ENABLED.is_empty()\n\n }\n\n\n\n fn value(&self) -> &[u8];\n\n\n\n fn read(&self) -> NiterGattResult<Vec<u8>> {\n\n if Self::METHODS_ENABLED.contains(GattDescriptorMethodsEnabled::READ) {\n\n return Err(NiterGattError::DelegateNotImplemented(Self::METHODS_ENABLED.bits));\n\n }\n\n\n", "file_path": "src/peripheral/descriptor/descriptor_delegate.rs", "rank": 53, "score": 62198.60874942463 }, { "content": "pub trait GattCharacteristicDelegate: Send + Sync + Sized + 'static {\n\n const UUID: u16;\n\n const METHODS_ENABLED: GattCharacteristicMethodsEnabled;\n\n const SECURE_READ: bool = true;\n\n const WRITE_WITH_RESPONSE: bool = false;\n\n const WRITE_WITH_RESPONSE_SECURE: bool = true;\n\n const NOTIFY_POLL_INTERVAL_MS: u64 = 1000;\n\n\n\n // fn descriptors(&mut self) -> std::collections::HashSet<Descriptor> {\n\n // std::collections::HashSet::new()\n\n // }\n\n\n\n fn needs_loop(&self) -> bool {\n\n !Self::METHODS_ENABLED.is_empty()\n\n }\n\n\n\n fn initial_value(&self) -> Option<Vec<u8>> {\n\n None\n\n }\n\n\n", "file_path": "src/peripheral/characteristic/characteristic_delegate.rs", "rank": 54, "score": 61557.850399260766 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub trait GattDescriptorDelegateInnerDbusExt: GattDescriptorDelegateInner {\n\n #[cfg(target_os = \"linux\")]\n\n fn dbus_descriptor_flags(&self) -> Vec<super::GattDescriptorFlags> {\n\n let mut ret = vec![];\n\n if Self::METHODS_ENABLED.contains(GattDescriptorMethodsEnabled::READ) {\n\n ret.push(super::GattDescriptorFlags::Read);\n\n }\n\n if Self::METHODS_ENABLED.contains(GattDescriptorMethodsEnabled::WRITE) {\n\n ret.push(super::GattDescriptorFlags::Write);\n\n }\n\n if Self::ENCRYPT_READ {\n\n ret.push(super::GattDescriptorFlags::EncryptRead);\n\n }\n\n if Self::ENCRYPT_WRITE {\n\n ret.push(super::GattDescriptorFlags::EncryptWrite);\n\n }\n\n if Self::ENCRYPT_AUTH_READ {\n\n ret.push(super::GattDescriptorFlags::EncryptAuthenticatedRead);\n\n }\n\n if Self::ENCRYPT_AUTH_WRITE {\n\n ret.push(super::GattDescriptorFlags::EncryptAuthenticatedWrite);\n\n }\n\n if Self::AUTHORIZE {\n\n ret.push(super::GattDescriptorFlags::Authorize);\n\n }\n\n ret\n\n }\n\n}\n\n\n", "file_path": "src/peripheral/descriptor/descriptor_delegate.rs", "rank": 55, "score": 60223.931779845596 }, { "content": "type MediaEndpoint = bool;\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, zvariant_derive::Type)]\n\npub struct MediaTransport {\n\n object_path: String\n\n}\n\n\n\nimpl MediaTransport {\n\n pub fn configuration(&self, connection: &zbus::Connection) -> crate::NiterResult<MediaTransportConfiguration> {\n\n Ok(self.get_proxy(connection)?.configuration()?)\n\n }\n\n}\n\n\n\nimpl std::str::FromStr for MediaTransport {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(MediaTransport);\n\ncrate::to_proxy_impl!(MediaTransport, MediaTransportProxy, \"org.bluez\");\n\n\n\n#[zbus::dbus_proxy(\n\n interface = \"org.bluez.MediaTransport1\",\n\n default_service = \"org.bluez\",\n\n // default_path = \"[variable prefix]/{hci0,hci1,...}/dev_XX_XX_XX_XX_XX_XX/fdX\"\n\n)]\n", "file_path": "src/platform/bluez/media/transport.rs", "rank": 56, "score": 52854.31068879807 }, { "content": "pub enum BlueZError {\n\n ZBus(zbus::Error),\n\n InvalidArguments(String),\n\n Failed(String),\n\n InProgress,\n\n NotPermitted,\n\n NotAuthorized,\n\n InvalidOffset,\n\n NotSupported,\n\n}\n\n\n\npub type NiterResult<T> = Result<T, NiterError>;\n\npub type NiterGattResult<T> = Result<T, NiterGattError>;\n", "file_path": "src/error.rs", "rank": 57, "score": 37160.25962527438 }, { "content": " #[error(transparent)]\n\n Other(#[from] anyhow::Error),\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum NiterGattError {\n\n #[error(\"The delegated call (flag: {0:b}) has not been implemented on this delegate\")]\n\n DelegateNotImplemented(u8),\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\nimpl Into<zbus::fdo::Error> for NiterGattError {\n\n fn into(self) -> zbus::fdo::Error {\n\n zbus::fdo::Error::NotSupported(format!(\"{}\", self))\n\n }\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\n#[derive(Debug, zbus::DBusError)]\n\n#[dbus_error(prefix = \"org.bluez.Error\")]\n", "file_path": "src/error.rs", "rank": 58, "score": 37158.08434346795 }, { "content": "#[derive(Debug, thiserror::Error)]\n\npub enum NiterError {\n\n #[cfg(target_os = \"linux\")]\n\n #[error(transparent)]\n\n DbusError(#[from] zbus::Error),\n\n #[cfg(target_os = \"linux\")]\n\n #[error(transparent)]\n\n DbusFdoError(#[from] zbus::fdo::Error),\n\n #[cfg(target_os = \"linux\")]\n\n #[error(transparent)]\n\n BlueZError(#[from] BlueZError),\n\n #[cfg(target_os = \"linux\")]\n\n #[error(transparent)]\n\n ZvariantError(#[from] zvariant::Error),\n\n #[error(transparent)]\n\n StrumParseError(#[from] strum::ParseError),\n\n #[error(transparent)]\n\n UuidError(#[from] uuid::Error),\n\n #[error(transparent)]\n\n GattError(#[from] NiterGattError),\n", "file_path": "src/error.rs", "rank": 59, "score": 37155.81303751233 }, { "content": "#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n Eq,\n\n PartialEq,\n\n num_enum::TryFromPrimitive,\n\n num_enum::IntoPrimitive,\n\n serde_repr::Serialize_repr,\n\n serde_repr::Deserialize_repr,\n\n)]\n\n#[cfg_attr(target_os = \"linux\", derive(zvariant_derive::Type))]\n\n#[repr(u16)]\n\npub enum Unit {\n\n Unitless = 0x2700,\n\n Metre = 0x2701,\n\n Ngstrm = 0x2782,\n\n NauticalMile = 0x2783,\n\n Yard = 0x27A0,\n\n Parsec = 0x27A1,\n", "file_path": "src/spec/unit.rs", "rank": 60, "score": 36012.18507784832 }, { "content": " Inch = 0x27A2,\n\n Foot = 0x27A3,\n\n Mile = 0x27A4,\n\n Kilogram = 0x2702,\n\n Tonne = 0x2768,\n\n Pound = 0x27B8,\n\n GramPerSecond = 0x27C1,\n\n KilogramPerCubicMetre = 0x271B,\n\n MilligramPerDecilitre = 0x27B1,\n\n MillimolePerLitre = 0x27B2,\n\n Second = 0x2703,\n\n Minute = 0x2760,\n\n Hour = 0x2761,\n\n Day = 0x2762,\n\n Year = 0x27B3,\n\n Month = 0x27B4,\n\n Ampere = 0x2704,\n\n Coulomb = 0x2727,\n\n AmpereHour = 0x27B0,\n\n Volt = 0x2728,\n", "file_path": "src/spec/unit.rs", "rank": 61, "score": 36000.52552687628 }, { "content": " Ohm = 0x272A,\n\n Siemens = 0x272B,\n\n VoltPerMetre = 0x274B,\n\n CoulombPerCubicMetre = 0x274C,\n\n CoulombPerSquareMetre = 0x274E,\n\n Kelvin = 0x2705,\n\n Celsius = 0x272F,\n\n Fahrenheit = 0x27AC,\n\n Mole = 0x2706,\n\n MolePerCubicMetre = 0x271A,\n\n Candela = 0x2707,\n\n Lumen = 0x2730,\n\n LumenPerWatt = 0x27BE,\n\n LumenHour = 0x27BF,\n\n LuxHour = 0x27C0,\n\n SquareMetres = 0x2710,\n\n CubicMetres = 0x2711,\n\n MetresPerSecond = 0x2712,\n\n MetresPerSecondSquared = 0x2713,\n\n ReciprocalMetre = 0x2714,\n\n CubicMetrePerKilogram = 0x2717,\n\n Percentage = 0x27AD\n\n}\n", "file_path": "src/spec/unit.rs", "rank": 62, "score": 36000.52552687628 }, { "content": "}\n\n\n\nimpl Default for AgentCapability {\n\n fn default() -> Self {\n\n Self::KeyboardDisplay\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Agent(String);\n\n\n\n#[zbus::dbus_proxy(\n\n interface = \"org.bluez.AgentManager1\",\n\n default_service = \"org.bluez\",\n\n default_path = \"/org/bluez/AgentManager1\"\n\n)]\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n", "file_path": "src/platform/bluez/agent.rs", "rank": 63, "score": 34556.50951378313 }, { "content": "use crate::platform::bluez;\n\nuse bluez::device::Device;\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"PascalCase\")]\n\npub enum AgentCapability {\n\n DisplayOnly,\n\n DisplayYesNo,\n\n KeyboardOnly,\n\n NoInputNoOutput,\n\n KeyboardDisplay,\n", "file_path": "src/platform/bluez/agent.rs", "rank": 64, "score": 34549.55344011849 }, { "content": " self.delegate.authorize_push(transfer)\n\n }\n\n fn cancel(&mut self) {\n\n self.delegate.cancel()\n\n }\n\n}\n\n\n\n#[zbus::dbus_proxy(\n\n interface = \"org.bluez.obex.AgentManager1\",\n\n default_service = \"org.bluez.obex\",\n\n default_path = \"/org/bluez/obex\"\n\n)]\n", "file_path": "src/platform/bluez/obex/agent.rs", "rank": 74, "score": 33492.96742900932 }, { "content": " fn cancel(&mut self) {\n\n self.delegate.cancel()\n\n }\n\n\n\n #[dbus_interface(property)]\n\n fn capabilities(&self) -> zvariant::Value<'_> {\n\n use std::string::ToString as _;\n\n use zvariant::Type as _;\n\n let mut arr: zvariant::Array = zvariant::Array::new(String::signature());\n\n for item in &self.capabilities {\n\n let _ = arr.append(zvariant::Value::Str(item.to_string().into()));\n\n }\n\n zvariant::Value::Array(arr)\n\n }\n\n\n\n #[dbus_interface(property)]\n\n fn out_of_band_info(&self) -> zvariant::Value<'_> {\n\n use std::string::ToString as _;\n\n use zvariant::Type as _;\n\n let mut arr: zvariant::Array = zvariant::Array::new(String::signature());\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 75, "score": 31596.044059792814 }, { "content": "}\n\n\n\ncrate::impl_tryfrom_zvariant!(PromptNumericKind);\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"kebab-case\")]\n\npub enum PromptStaticKind {\n\n StaticOob,\n\n InAlpha,\n\n}\n\n\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 76, "score": 31585.86889443486 }, { "content": "crate::impl_tryfrom_zvariant!(PromptStaticKind);\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"kebab-case\")]\n\npub enum DisplayCapability {\n\n Blink,\n\n Beep,\n\n Vibrate,\n\n OutNumeric,\n\n OutAlpha,\n\n Push,\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 77, "score": 31585.639790882808 }, { "content": "crate::impl_tryfrom_zvariant!(DisplayNumericKind);\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"kebab-case\")]\n\npub enum PromptNumericKind {\n\n Blink,\n\n Beep,\n\n Vibrate,\n\n InNumeric,\n\n Push,\n\n Twist,\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 78, "score": 31585.528265917223 }, { "content": "#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"kebab-case\")]\n\npub enum DisplayNumericKind {\n\n Blink,\n\n Beep,\n\n Vibrate,\n\n OutNumeric,\n\n Push,\n\n Twist,\n\n}\n\n\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 79, "score": 31583.62998020837 }, { "content": " Twist,\n\n InNumeric,\n\n InAlpha,\n\n StaticOob,\n\n PublicOob,\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(DisplayCapability);\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n\n Copy,\n\n strum::Display,\n\n strum::EnumString,\n\n zvariant_derive::Type,\n\n serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"kebab-case\")]\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 80, "score": 31583.400080901647 }, { "content": "#[zbus::dbus_interface(name = \"org.bluez.mesh.ProvisionAgent1\")]\n\nimpl<'a, T: ProvisioningAgentDelegate> ProvisioningAgent<T> {\n\n fn private_key(&self) -> Vec<u8> {\n\n self.delegate.private_key()\n\n }\n\n fn public_key(&self) -> Vec<u8> {\n\n self.delegate.public_key()\n\n }\n\n fn display_string(&self, value: String) {\n\n self.delegate.display_string(value)\n\n }\n\n fn display_numeric(&self, display_kind: DisplayNumericKind, number: u32) {\n\n self.delegate.display_numeric(display_kind, number)\n\n }\n\n fn prompt_numeric(&self, display_kind: PromptNumericKind) -> u32 {\n\n self.delegate.prompt_numeric(display_kind)\n\n }\n\n fn prompt_static(&self, display_kind: PromptStaticKind) -> [u8; 16] {\n\n self.delegate.prompt_static(display_kind)\n\n }\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 81, "score": 31583.168877133616 }, { "content": " for item in &self.oob_info {\n\n let _ = arr.append(zvariant::Value::Str(item.to_string().into()));\n\n }\n\n zvariant::Value::Array(arr)\n\n }\n\n\n\n #[dbus_interface(property, name = \"URI\")]\n\n fn uri(&self) -> &str {\n\n &self.uri\n\n }\n\n}\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 82, "score": 31583.023225532994 }, { "content": "pub enum OutOfBandInfoAvailability {\n\n Other,\n\n Uri,\n\n MachineCode2D,\n\n BarCode,\n\n Nfc,\n\n Number,\n\n String,\n\n OnBox,\n\n InBox,\n\n OnPaper,\n\n InManual,\n\n OnDevice,\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(OutOfBandInfoAvailability);\n\n\n", "file_path": "src/platform/bluez/mesh/provision/provisioning_agent.rs", "rank": 83, "score": 31581.473417586818 }, { "content": "use crate::platform::bluez;\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Device {\n\n pub object_path: String,\n\n}\n\n\n\nimpl From<String> for Device {\n\n fn from(object_path: String) -> Self {\n\n Self { object_path }\n\n }\n\n}\n\n\n\nimpl std::str::FromStr for Device {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n", "file_path": "src/platform/bluez/device.rs", "rank": 84, "score": 34.19065355005139 }, { "content": " data_type: u16,\n\n role: HealthApplicationRole,\n\n description: Option<String>,\n\n channel_type: Option<HealthChannelType>,\n\n}\n\n\n\nimpl zvariant::Type for HealthApplicationConfiguration {\n\n fn signature() -> zvariant::Signature<'static> {\n\n zvariant::Signature::from_str_unchecked(\"a{sv}\")\n\n }\n\n}\n\n\n\nimpl std::convert::TryInto<zvariant::Dict<'_, '_>> for HealthApplicationConfiguration {\n\n type Error = crate::NiterError;\n\n fn try_into(mut self) -> crate::NiterResult<zvariant::Dict<'static, 'static>> {\n\n use std::string::ToString as _;\n\n use zvariant::Type as _;\n\n let mut dict = zvariant::Dict::new(String::signature(), zvariant::Value::signature());\n\n dict.add(\"DataType\", self.data_type)?;\n\n dict.add(\"Role\", zvariant::Value::Str(self.role.to_string().into()))?;\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 85, "score": 31.555247301238463 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Input {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for Input {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(Input, InputProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(Input);\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n", "file_path": "src/platform/bluez/misc/input.rs", "rank": 86, "score": 29.86505303551871 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Network {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for Network {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(Network, NetworkProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(Network);\n\n\n\n#[derive(Debug, Clone, strum::EnumString, strum::Display, serde::Serialize, serde::Deserialize)]\n\npub enum NetworkUuid {\n\n Uuid(crate::Uuid),\n", "file_path": "src/platform/bluez/misc/network.rs", "rank": 87, "score": 29.143074580996203 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct MediaPlayer {\n\n object_path: String\n\n}\n\n\n\nimpl std::str::FromStr for MediaPlayer {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(MediaPlayer);\n\ncrate::to_proxy_impl!(MediaPlayer, MediaPlayerProxy, \"org.bluez\");\n\n\n\n#[derive(\n\n Debug,\n\n Clone,\n", "file_path": "src/platform/bluez/media/player.rs", "rank": 88, "score": 29.01902213203649 }, { "content": " serde::Serialize,\n\n serde::Deserialize,\n\n)]\n\n#[strum(serialize_all = \"lowercase\")]\n\npub enum HealthChannelType {\n\n Reliable,\n\n Streaming,\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(HealthChannelType);\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct HealthChannel {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for HealthChannel {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(HealthChannel, HealthChannelProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(HealthChannel);\n\n\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 89, "score": 28.036110761121762 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct Battery {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for Battery {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(Battery, BatteryProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(Battery);\n\n\n\n#[zbus::dbus_proxy(interface = \"org.bluez.Battery1\", default_service = \"org.bluez\")]\n", "file_path": "src/platform/bluez/misc/battery.rs", "rank": 90, "score": 27.865070210900512 }, { "content": " use std::convert::TryInto as _;\n\n let dict: zvariant::Dict = value.try_into()?;\n\n let mut ret = Self::default();\n\n ret.title = (dict.get(\"Title\")? as Option<&str>).unwrap_or_default().into();\n\n ret.artist = (dict.get(\"Artist\")? as Option<&str>).unwrap_or_default().into();\n\n ret.album = (dict.get(\"Album\")? as Option<&str>).unwrap_or_default().into();\n\n ret.genre = (dict.get(\"Genre\")? as Option<&str>).unwrap_or_default().into();\n\n ret.number_of_tracks = (dict.get(\"NumberOfTracks\")?.copied() as Option<u32>).unwrap_or_default();\n\n ret.number = (dict.get(\"Number\")?.copied() as Option<u32>).unwrap_or_default();\n\n ret.duration = (dict.get(\"Duration\")?.copied() as Option<u32>).unwrap_or_default();\n\n\n\n Ok(ret)\n\n }\n\n}\n\n\n\nimpl<'a> Into<zvariant::Value<'a>> for MediaItemMetadata {\n\n fn into(self) -> zvariant::Value<'a> {\n\n use zvariant::Type as _;\n\n let mut dict = zvariant::Dict::new(String::signature(), zvariant::Value::signature());\n\n let _ = dict.add(\"Title\", zvariant::Value::Str(self.title.into()));\n", "file_path": "src/platform/bluez/media/item.rs", "rank": 91, "score": 27.863334517537 }, { "content": " Gn,\n\n Panu,\n\n Nap,\n\n}\n\n\n\nimpl std::convert::TryFrom<zvariant::OwnedValue> for NetworkUuid {\n\n type Error = crate::NiterError;\n\n fn try_from(v: zvariant::OwnedValue) -> crate::NiterResult<Self> {\n\n use std::convert::TryInto as _;\n\n use std::str::FromStr as _;\n\n let s: String = v.try_into()?;\n\n if let Ok(uuid) = crate::Uuid::from_str(&s) {\n\n Ok(Self::Uuid(uuid))\n\n } else {\n\n Ok(Self::from_str(&s)?)\n\n }\n\n }\n\n}\n\n\n\nimpl zvariant::Type for NetworkUuid {\n\n fn signature() -> zvariant::Signature<'static> {\n\n zvariant::Signature::from_str_unchecked(\"s\")\n\n }\n\n}\n\n\n\n#[zbus::dbus_proxy(interface = \"org.bluez.Network1\", default_service = \"org.bluez\")]\n", "file_path": "src/platform/bluez/misc/network.rs", "rank": 92, "score": 27.684969234172883 }, { "content": " type Error = crate::NiterError;\n\n fn try_from(v: zvariant::Value<'a>) -> crate::NiterResult<Self> {\n\n use std::str::FromStr as _;\n\n let s: String = v.downcast().ok_or_else(|| zvariant::Error::IncorrectType)?;\n\n Ok(Self::from_str(&s)?)\n\n }\n\n }\n\n\n\n impl std::convert::TryFrom<zvariant::OwnedValue> for $thing {\n\n type Error = crate::NiterError;\n\n fn try_from(v: zvariant::OwnedValue) -> crate::NiterResult<Self> {\n\n use std::convert::TryInto as _;\n\n use std::str::FromStr as _;\n\n let s: String = v.try_into()?;\n\n Ok(Self::from_str(&s)?)\n\n }\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/platform/bluez/macros.rs", "rank": 93, "score": 27.20889680555473 }, { "content": "#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct SimAccess {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for SimAccess {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(SimAccess, SimAccessProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(SimAccess);\n\n\n\n#[zbus::dbus_proxy(interface = \"org.bluez.SimAccess1\", default_service = \"org.bluez\")]\n", "file_path": "src/platform/bluez/misc/sap.rs", "rank": 94, "score": 26.936109125538675 }, { "content": " Self {\n\n object_path: s.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl std::str::FromStr for Adapter {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::impl_tryfrom_zvariant!(Adapter);\n\ncrate::to_proxy_impl!(Adapter, AdapterProxy, \"org.bluez\");\n\n\n\nimpl Adapter {\n\n pub fn advertising_manager<'a>(\n", "file_path": "src/platform/bluez/adapter.rs", "rank": 95, "score": 26.359097923202945 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct HealthApplication {\n\n object_path: String,\n\n}\n\n\n\nimpl std::str::FromStr for HealthApplication {\n\n type Err = crate::NiterError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Self {\n\n object_path: s.into(),\n\n })\n\n }\n\n}\n\n\n\ncrate::to_proxy_impl!(HealthApplication, HealthManagerProxy, \"org.bluez\");\n\ncrate::impl_tryfrom_zvariant!(HealthApplication);\n\n\n\n#[zbus::dbus_proxy(\n\n interface = \"org.bluez.HealthManager\",\n\n default_service = \"org.bluez\",\n\n default_path = \"/org/bluez\"\n\n)]\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 96, "score": 26.353532133122993 }, { "content": " let _ = dict.append(zvariant::Value::U16(*k), zvariant::Value::Array(v.into()));\n\n }\n\n dict\n\n }\n\n fn solicit_uuids(&self) -> &Vec<crate::Uuid> {\n\n &self.solicit_uuids\n\n }\n\n fn service_data(&self) -> zvariant::Dict<'_, '_> {\n\n use zvariant::Type as _;\n\n let mut dict = zvariant::Dict::new(String::signature(), <Vec<u8>>::signature());\n\n for (k, v) in self.service_data.iter() {\n\n let _ = dict.append(\n\n zvariant::Value::Str(k.clone().into()),\n\n zvariant::Value::Array(v.into()),\n\n );\n\n }\n\n dict\n\n }\n\n fn data(&self) -> zvariant::Dict<'_, '_> {\n\n use zvariant::Type as _;\n", "file_path": "src/platform/bluez/advertising.rs", "rank": 97, "score": 26.010274522100804 }, { "content": "\n\n#[derive(Debug, Clone, Copy, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct ModelCaps {\n\n publish: bool,\n\n subscribe: bool,\n\n}\n\n\n\nimpl<'a> Into<zvariant::Value<'a>> for ModelCaps {\n\n fn into(self) -> zvariant::Value<'a> {\n\n use zvariant::Type as _;\n\n let mut zvdict = zvariant::Dict::new(String::signature(), bool::signature());\n\n let _ = zvdict.add(\"Publish\", self.publish);\n\n let _ = zvdict.add(\"Subscribe\", self.subscribe);\n\n zvariant::Value::Dict(zvdict)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, zvariant_derive::Type, serde::Serialize, serde::Deserialize)]\n\npub struct ModelConfiguration {\n\n bindings: Vec<u16>,\n", "file_path": "src/platform/bluez/mesh/element.rs", "rank": 98, "score": 25.85126141080726 }, { "content": " if let Some(description) = self.description.take() {\n\n dict.add(\"Description\", zvariant::Value::Str(description.into()))?;\n\n }\n\n\n\n if let Some(channel_type) = self.channel_type.take() {\n\n dict.add(\n\n \"ChannelType\",\n\n zvariant::Value::Str(channel_type.to_string().into()),\n\n )?;\n\n }\n\n\n\n Ok(dict)\n\n }\n\n}\n\n\n\nimpl std::convert::TryInto<zvariant::OwnedValue> for HealthApplicationConfiguration {\n\n type Error = crate::NiterError;\n\n fn try_into(self) -> Result<zvariant::OwnedValue, Self::Error> {\n\n let dict: zvariant::Dict = self.try_into()?;\n\n Ok(zvariant::Value::Dict(dict).into())\n", "file_path": "src/platform/bluez/misc/health.rs", "rank": 99, "score": 25.136017151462198 } ]
Rust
server/src/records/users/mod.rs
Deploy-Software/Blog
495868019becb4953ff8ba2a39a8fa936095c772
use async_graphql::{Error, Result, SimpleObject}; use bcrypt::{hash, verify, DEFAULT_COST}; use chrono::DateTime; use regex::Regex; use serde::{Deserialize, Serialize}; use sqlx::PgPool; pub mod session; #[derive(sqlx::FromRow, SimpleObject, Debug, Deserialize, Serialize, Clone)] pub struct SimpleUser { pub id: i32, pub email: String, pub name: String, pub password: String, pub date: DateTime<chrono::Utc>, } impl<'a> SimpleUser { pub fn unwrap_user_session(maybe_user: Option<SimpleUser>) -> Result<SimpleUser> { match maybe_user { Some(user) => Ok(user), None => Err(Error::from("The user session doesn't exist.")), } } pub async fn from_email(pg_pool: &PgPool, email: &'a str) -> Result<Self> { match sqlx::query_as!( Self, r#" SELECT users.id, users.email, users.name, users.password, users.date FROM users WHERE email = $1 "#, email ) .fetch_optional(pg_pool) .await { Ok(maybe_user) => match maybe_user { Some(user) => Ok(user), None => Err(Error::from("The email and password combination failed.")), }, Err(error) => { println!("{}", error.to_string()); Err(Error::from( "An error occured while retrieving the user from the database.", )) } } } pub async fn from_session_token( pg_pool: &PgPool, session_token: &'a str, ) -> Result<Option<Self>> { match sqlx::query_as!( Self, r#" SELECT users.id, users.email, users.name, users.password, users.date FROM users INNER JOIN user_sessions ON users.id = user_sessions.user_id WHERE user_sessions.token = $1 "#, session_token ) .fetch_optional(pg_pool) .await { Ok(maybe_user) => Ok(maybe_user), Err(error) => { println!("{}", error.to_string()); Err(Error::from( "An error occured while retrieving the user from the database.", )) } } } pub async fn password_matches(&self, password_to_test: &'a str) -> Result<bool> { match verify(password_to_test, &self.password) { Ok(matches) => Ok(matches), Err(error) => { println!("{}", error.to_string()); Err(Error::from( "We were unable compare the password with our saved password.", )) } } } } #[derive(sqlx::FromRow, Debug, Deserialize, Serialize)] pub struct NewUser<'a> { pub email: &'a str, pub name: &'a str, pub password: String, } impl<'a> NewUser<'a> { pub fn new(email: &'a str, name: &'a str, password: &'a str) -> Result<Self> { let re = match Regex::new(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)") { Ok(re) => re, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Email regex could not be compiled.")); } }; if !re.is_match(email) { return Err(Error::from("Email is not valid.")); } let re = match Regex::new(r"(^[a-zA-Z0-9]{8,}$)") { Ok(re) => re, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Password regex could not be compiled.")); } }; if !re.is_match(password) { return Err(Error::from("Password is not secure enough.")); } let hashed_password = match hash(&password, DEFAULT_COST) { Ok(hashed) => hashed, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Could not hash password.")); } }; Ok(NewUser { email, name, password: hashed_password, }) } pub async fn insert(&self, pg_pool: &PgPool) -> Result<SimpleUser> { match sqlx::query_as!( SimpleUser, r#" INSERT INTO users (email, name, password) VALUES ($1, $2, $3) RETURNING id, email, name, password, date "#, &self.email, &self.name, &self.password ) .fetch_one(pg_pool) .await { Ok(user) => Ok(user), Err(error) => { println!("{}", error.to_string()); Err(Error::from("Unable to insert user in database.")) } } } }
use async_graphql::{Error, Result, SimpleObject}; use bcrypt::{hash, verify, DEFAULT_COST}; use chrono::DateTime; use regex::Regex; use serde::{Deserialize, Serialize}; use sqlx::PgPool; pub mod session; #[derive(sqlx::FromRow, SimpleObject, Debug, Deserialize, Serialize, Clone)] pub struct SimpleUser { pub id: i32, pub email: String, pub name: String, pub password: String, pub date: DateTime<chrono::Utc>, } impl<'a> SimpleUser { pub fn unwrap_user_session(maybe_user: Option<SimpleUser>) -> Result<SimpleUser> { match maybe_user { Some(user) => Ok(user), None => Err(Error::from("The user session doesn't exist.")), } } pub async fn from_email(pg_pool: &PgPool, email: &'a str) -> Result<Self> { match sqlx::query_as!( Self, r#" SELECT users.id, users.email, users.name, users.password, users.date FROM users WHERE email = $1 "#, email ) .fetch_optional(pg_pool) .await { Ok(maybe_user) => match maybe_user { Some(user) => Ok(user), None => Err(Error::from("The email and password combination failed.")), }, Err(error) => { println!("{}", error.to_string()); Err(Error::from( "An error occured while retrieving the user from the database.", )) } } }
pub async fn password_matches(&self, password_to_test: &'a str) -> Result<bool> { match verify(password_to_test, &self.password) { Ok(matches) => Ok(matches), Err(error) => { println!("{}", error.to_string()); Err(Error::from( "We were unable compare the password with our saved password.", )) } } } } #[derive(sqlx::FromRow, Debug, Deserialize, Serialize)] pub struct NewUser<'a> { pub email: &'a str, pub name: &'a str, pub password: String, } impl<'a> NewUser<'a> { pub fn new(email: &'a str, name: &'a str, password: &'a str) -> Result<Self> { let re = match Regex::new(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)") { Ok(re) => re, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Email regex could not be compiled.")); } }; if !re.is_match(email) { return Err(Error::from("Email is not valid.")); } let re = match Regex::new(r"(^[a-zA-Z0-9]{8,}$)") { Ok(re) => re, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Password regex could not be compiled.")); } }; if !re.is_match(password) { return Err(Error::from("Password is not secure enough.")); } let hashed_password = match hash(&password, DEFAULT_COST) { Ok(hashed) => hashed, Err(error) => { println!("{}", error.to_string()); return Err(Error::from("Could not hash password.")); } }; Ok(NewUser { email, name, password: hashed_password, }) } pub async fn insert(&self, pg_pool: &PgPool) -> Result<SimpleUser> { match sqlx::query_as!( SimpleUser, r#" INSERT INTO users (email, name, password) VALUES ($1, $2, $3) RETURNING id, email, name, password, date "#, &self.email, &self.name, &self.password ) .fetch_one(pg_pool) .await { Ok(user) => Ok(user), Err(error) => { println!("{}", error.to_string()); Err(Error::from("Unable to insert user in database.")) } } } }
pub async fn from_session_token( pg_pool: &PgPool, session_token: &'a str, ) -> Result<Option<Self>> { match sqlx::query_as!( Self, r#" SELECT users.id, users.email, users.name, users.password, users.date FROM users INNER JOIN user_sessions ON users.id = user_sessions.user_id WHERE user_sessions.token = $1 "#, session_token ) .fetch_optional(pg_pool) .await { Ok(maybe_user) => Ok(maybe_user), Err(error) => { println!("{}", error.to_string()); Err(Error::from( "An error occured while retrieving the user from the database.", )) } } }
function_block-full_function
[ { "content": "type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;\n\n\n\npub struct AuthToken(String);\n", "file_path": "server/src/main.rs", "rank": 0, "score": 78968.49216649115 }, { "content": "#[wasm_bindgen(start)]\n\npub fn run_app() {\n\n App::<Model>::new().mount_to_body();\n\n}\n", "file_path": "app/sample.rs", "rank": 1, "score": 78486.01781871542 }, { "content": "#[wasm_bindgen(start)]\n\npub fn run_app() {\n\n App::<RootModel>::new().mount_to_body();\n\n}\n", "file_path": "app/src/lib.rs", "rank": 2, "score": 76115.08072833662 }, { "content": "CREATE UNIQUE INDEX user_sessions_idx ON user_sessions (token);", "file_path": "server/migrations/20210207103126_user_sessions.sql", "rank": 3, "score": 67685.1272122176 }, { "content": "CREATE TABLE user_sessions (\n\n id serial PRIMARY KEY,\n\n user_id integer NOT NULL REFERENCES users (id),\n\n token varchar(255) NOT NULL,\n\n date timestamp with time zone NOT NULL DEFAULT now()\n\n);\n", "file_path": "server/migrations/20210207103126_user_sessions.sql", "rank": 4, "score": 66142.25513503126 }, { "content": "use async_graphql::{Error, Result};\n\nuse rand::{distributions::Alphanumeric, thread_rng, Rng};\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::{postgres::PgQueryResult, PgPool};\n\n\n\n#[derive(sqlx::FromRow, Debug, Deserialize, Serialize)]\n\npub struct NewSession {\n\n pub token: String,\n\n}\n\n\n\nimpl<'a> NewSession {\n\n pub fn get_token(&self) -> String {\n\n self.token.clone()\n\n }\n\n\n\n fn generate_token() -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(50)\n\n .map(char::from)\n", "file_path": "server/src/records/users/session.rs", "rank": 5, "score": 53386.50240767461 }, { "content": " .collect()\n\n }\n\n\n\n pub fn make() -> Self {\n\n NewSession {\n\n token: Self::generate_token(),\n\n }\n\n }\n\n\n\n pub async fn insert(&self, pg_pool: &PgPool, user_id: i32) -> Result<PgQueryResult> {\n\n match sqlx::query!(\n\n r#\"\n\n INSERT INTO user_sessions\n\n (user_id, token)\n\n VALUES\n\n ($1, $2)\n\n \"#,\n\n user_id,\n\n &self.token\n\n )\n", "file_path": "server/src/records/users/session.rs", "rank": 6, "score": 53384.36773454108 }, { "content": " .execute(pg_pool)\n\n .await\n\n {\n\n Ok(done) => Ok(done),\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\"Unable to insert user session in database.\"))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/records/users/session.rs", "rank": 7, "score": 53374.2177286399 }, { "content": "struct Model {\n\n link: ComponentLink<Self>,\n\n value: i64,\n\n}\n\n\n", "file_path": "app/sample.rs", "rank": 17, "score": 43112.19817728059 }, { "content": "struct QueryRoot;\n", "file_path": "server/src/main.rs", "rank": 18, "score": 40383.68483553973 }, { "content": "struct MutationRoot;\n\n\n\npub async fn db_connection() -> Result<PgPool> {\n\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL NOT FOUND\");\n\n Ok(PgPool::connect(&database_url).await?)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pg_pool: PgPool = db_connection().await.expect(\"Database connection failed.\");\n\n sqlx::migrate!()\n\n .run(&pg_pool)\n\n .await\n\n .expect(\"Database migrations failed\");\n\n\n\n let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription)\n\n .data(pg_pool)\n\n .finish();\n\n\n\n let graphql_post = warp::path(\"graphql\")\n", "file_path": "server/src/main.rs", "rank": 19, "score": 40383.68483553973 }, { "content": "CREATE TABLE users (\n\n id serial PRIMARY KEY,\n\n email varchar(255) NOT NULL,\n\n name varchar(255) NOT NULL,\n\n password varchar(255) NOT NULL,\n\n date timestamp with time zone NOT NULL DEFAULT now()\n\n);", "file_path": "server/migrations/20210207103115_users.sql", "rank": 20, "score": 34866.96054657046 }, { "content": " ) -> Result<&'a str> {\n\n settings::add(ctx, key, value).await\n\n }\n\n\n\n async fn sign_up<'a>(\n\n &self,\n\n ctx: &'a Context<'_>,\n\n email: String,\n\n name: String,\n\n password: String,\n\n ) -> Result<&'a str> {\n\n authorization::sign_up(ctx, email, name, password).await\n\n }\n\n\n\n async fn sign_in<'a>(\n\n &self,\n\n ctx: &'a Context<'_>,\n\n email: String,\n\n password: String,\n\n ) -> Result<String> {\n", "file_path": "server/src/schema/mod.rs", "rank": 21, "score": 27829.629353933335 }, { "content": " authorization::sign_in(ctx, email, password).await\n\n }\n\n\n\n async fn new_post<'a>(\n\n &self,\n\n ctx: &'a Context<'_>,\n\n slug: String,\n\n title: String,\n\n text: String,\n\n summary: String,\n\n ) -> Result<Post> {\n\n posts::new(ctx, &slug, &title, &text, &summary).await\n\n }\n\n\n\n async fn update_post<'a>(\n\n &self,\n\n ctx: &'a Context<'_>,\n\n post_id: i32,\n\n title: Option<String>,\n\n text: Option<String>,\n\n ) -> Result<&'a str> {\n\n posts::update(ctx, post_id, title, text).await\n\n }\n\n}\n", "file_path": "server/src/schema/mod.rs", "rank": 22, "score": 27826.290193663397 }, { "content": " async fn post<'a>(&self, ctx: &'a Context<'_>, post_id: i32) -> Result<Option<Post>> {\n\n posts::get(ctx, post_id).await\n\n }\n\n\n\n async fn settings<'a>(&self, ctx: &'a Context<'_>) -> Result<HashMap<String, String>> {\n\n settings::get_all(ctx).await\n\n }\n\n\n\n async fn ping<'a>(&self, _ctx: &'a Context<'_>) -> &'a str {\n\n \"Pong\"\n\n }\n\n}\n\n\n\n#[Object]\n\nimpl MutationRoot {\n\n async fn add_setting<'a>(\n\n &self,\n\n ctx: &'a Context<'_>,\n\n key: String,\n\n value: String,\n", "file_path": "server/src/schema/mod.rs", "rank": 23, "score": 27822.491825605288 }, { "content": "use crate::records::authorities::Authorities;\n\nuse crate::records::posts::Post;\n\nuse crate::{MutationRoot, QueryRoot};\n\nuse async_graphql::{Context, Object, Result};\n\nuse std::collections::HashMap;\n\n\n\nmod authorization;\n\nmod posts;\n\nmod settings;\n\n\n\n#[Object]\n\nimpl QueryRoot {\n\n async fn authorization<'a>(&self, ctx: &'a Context<'_>) -> Result<Option<Authorities>> {\n\n authorization::get(ctx).await\n\n }\n\n\n\n async fn posts<'a>(&self, ctx: &'a Context<'_>) -> Result<Vec<Post>> {\n\n posts::get_all(ctx).await\n\n }\n\n\n", "file_path": "server/src/schema/mod.rs", "rank": 24, "score": 27818.29790907064 }, { "content": "pub mod authorities;\n\npub mod posts;\n\npub mod settings;\n\npub mod users;\n", "file_path": "server/src/records/mod.rs", "rank": 25, "score": 27813.219000402194 }, { "content": "pub mod toolbar;\n", "file_path": "app/src/components/mod.rs", "rank": 26, "score": 27808.499822102694 }, { "content": "use async_graphql::{Error, Result, SimpleObject};\n\nuse chrono::DateTime;\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::PgPool;\n\n\n\n#[derive(sqlx::FromRow, SimpleObject, Debug, Deserialize, Serialize, Clone)]\n\npub struct Settings {\n\n pub id: i32,\n\n pub key: String,\n\n pub value: String,\n\n pub created_at: DateTime<chrono::Utc>,\n\n}\n\n\n\nimpl<'a> Settings {\n\n pub async fn all(pg_pool: &PgPool) -> Result<Vec<Self>> {\n\n match sqlx::query_as!(\n\n Self,\n\n r#\"\n\n SELECT\n\n settings.id,\n", "file_path": "server/src/records/settings.rs", "rank": 27, "score": 42.30473403166777 }, { "content": ") -> Result<&'a str> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let new_user = NewUser::new(&email, &name, &password)?;\n\n new_user.insert(&pg_pool).await?;\n\n Ok(\"OK\")\n\n}\n\n\n\npub async fn sign_in<'a>(ctx: &'a Context<'_>, email: String, password: String) -> Result<String> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let user = SimpleUser::from_email(&pg_pool, &email).await?;\n\n if !user.password_matches(&password).await? {\n\n return Err(Error::from(\"The email and password combination failed.\"));\n\n }\n\n let user_session = NewSession::make();\n\n user_session.insert(&pg_pool, user.id).await?;\n\n Ok(user_session.get_token())\n\n}\n", "file_path": "server/src/schema/authorization.rs", "rank": 28, "score": 40.45093758823065 }, { "content": "use async_graphql::{Error, Result, SimpleObject};\n\nuse chrono::DateTime;\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::PgPool;\n\n\n\n#[derive(sqlx::FromRow, SimpleObject, Debug, Deserialize, Serialize, Clone)]\n\npub struct Post {\n\n pub id: i32,\n\n pub slug: String,\n\n pub title: String,\n\n pub text: String,\n\n pub summary: String,\n\n pub created_at: DateTime<chrono::Utc>,\n\n}\n\n\n\nimpl<'a> Post {\n\n pub async fn all(pg_pool: &PgPool) -> Result<Vec<Self>> {\n\n match sqlx::query_as!(\n\n Self,\n\n r#\"\n", "file_path": "server/src/records/posts.rs", "rank": 29, "score": 39.66785024412935 }, { "content": "use crate::records::users::SimpleUser;\n\nuse async_graphql::{Result, SimpleObject};\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::PgPool;\n\n\n\n#[derive(SimpleObject, Debug, Deserialize, Serialize, Clone)]\n\npub struct Authorities {\n\n valid_token: bool,\n\n}\n\n\n\nimpl Authorities {\n\n pub async fn get(pg_pool: &PgPool, session_token: &str) -> Result<Option<Self>> {\n\n let user = SimpleUser::from_session_token(pg_pool, session_token).await?;\n\n Ok(Some(Self {\n\n valid_token: user.is_some(),\n\n }))\n\n }\n\n}\n", "file_path": "server/src/records/authorities.rs", "rank": 30, "score": 36.44482063473821 }, { "content": "use crate::records::authorities::Authorities;\n\nuse crate::records::users::{session::NewSession, NewUser, SimpleUser};\n\nuse crate::AuthToken;\n\nuse async_graphql::{Context, Error, Result};\n\nuse sqlx::PgPool;\n\n\n\npub async fn get<'a>(ctx: &'a Context<'_>) -> Result<Option<Authorities>> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let token = ctx.data_opt::<AuthToken>();\n\n match token {\n\n Some(token) => Authorities::get(&pg_pool, &token.0).await,\n\n None => Ok(None),\n\n }\n\n}\n\n\n\npub async fn sign_up<'a>(\n\n ctx: &'a Context<'_>,\n\n email: String,\n\n name: String,\n\n password: String,\n", "file_path": "server/src/schema/authorization.rs", "rank": 31, "score": 34.84069404267168 }, { "content": " post_id\n\n )\n\n .fetch_optional(pg_pool)\n\n .await\n\n {\n\n Ok(post) => match post {\n\n Some(exists) => Ok(Some(exists)),\n\n None => Ok(None),\n\n },\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\n\n \"An error occured while retrieving the posts from the database.\",\n\n ))\n\n }\n\n }\n\n }\n\n\n\n pub async fn update(\n\n pg_pool: &PgPool,\n", "file_path": "server/src/records/posts.rs", "rank": 32, "score": 32.699774214491995 }, { "content": "}\n\n\n\npub async fn update<'a>(\n\n ctx: &'a Context<'_>,\n\n post_id: i32,\n\n title: Option<String>,\n\n text: Option<String>,\n\n) -> Result<&'a str> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let token = match ctx.data_opt::<AuthToken>() {\n\n Some(token) => token,\n\n None => {\n\n return Err(Error::from(\"No session token found.\"));\n\n }\n\n };\n\n let maybe_user = SimpleUser::from_session_token(&pg_pool, &token.0).await?;\n\n let user = SimpleUser::unwrap_user_session(maybe_user)?;\n\n Post::update(&pg_pool, post_id, user.id, title, text).await?;\n\n Ok(\"Updated!\")\n\n}\n", "file_path": "server/src/schema/posts.rs", "rank": 33, "score": 31.21478868623603 }, { "content": "}\n\n\n\npub async fn new<'a>(\n\n ctx: &'a Context<'_>,\n\n slug: &'a str,\n\n title: &'a str,\n\n text: &'a str,\n\n summary: &'a str,\n\n) -> Result<Post> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let token = match ctx.data_opt::<AuthToken>() {\n\n Some(token) => token,\n\n None => {\n\n return Err(Error::from(\"No session token found.\"));\n\n }\n\n };\n\n let maybe_user = SimpleUser::from_session_token(&pg_pool, &token.0).await?;\n\n let user = SimpleUser::unwrap_user_session(maybe_user)?;\n\n let new_post = NewPost::new(slug, title, text, summary, user.id)?;\n\n new_post.insert(&pg_pool).await\n", "file_path": "server/src/schema/posts.rs", "rank": 34, "score": 26.565040356448016 }, { "content": " iss: None,\n\n link,\n\n error: None,\n\n success: None,\n\n email: String::from(\"\"),\n\n email_error: None,\n\n name: String::from(\"\"),\n\n name_error: None,\n\n password: String::from(\"\"),\n\n password_error: None,\n\n blog: String::from(\"\"),\n\n blog_error: None,\n\n }\n\n }\n\n fn change(&mut self, _props: Self::Properties) -> bool {\n\n false\n\n }\n\n fn update(&mut self, msg: Self::Message) -> bool {\n\n use Msg::*;\n\n\n", "file_path": "app/src/initial.rs", "rank": 35, "score": 26.375016082020682 }, { "content": "#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"MutationRoot\",\n\n argument_struct = \"InitialArguments\"\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct SignUpConnection {\n\n #[arguments(email = args.email.clone(), name = args.name.clone(), password = args.password.clone())]\n\n sign_up: String,\n\n #[arguments(key = args.key.clone(), value = args.title.clone())]\n\n add_setting: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Msg {\n\n SignUp,\n\n ReceiveResponse(Result<GraphQLResponse<SignUpConnection>, anyhow::Error>),\n\n BlogInputReceived(String),\n\n EmailInputReceived(String),\n", "file_path": "app/src/initial.rs", "rank": 36, "score": 25.507469686519652 }, { "content": "\n\n#[derive(sqlx::FromRow, Debug, Deserialize, Serialize)]\n\npub struct NewSetting<'a> {\n\n pub key: &'a str,\n\n pub value: &'a str,\n\n}\n\n\n\nimpl<'a> NewSetting<'a> {\n\n pub fn new(key: &'a str, value: &'a str) -> Result<Self> {\n\n Ok(Self { key, value })\n\n }\n\n\n\n pub async fn insert(&self, pg_pool: &PgPool) -> Result<Settings> {\n\n match sqlx::query_as!(\n\n Settings,\n\n r#\"\n\n INSERT INTO settings\n\n (key, value)\n\n VALUES\n\n ($1, $2)\n", "file_path": "server/src/records/settings.rs", "rank": 37, "score": 25.36686323497902 }, { "content": " success: None,\n\n email: String::from(\"\"),\n\n email_error: None,\n\n password: String::from(\"\"),\n\n password_error: None,\n\n router_agent: RouteAgent::bridge(link.callback(|_| Msg::Ignore)),\n\n link,\n\n }\n\n }\n\n fn change(&mut self, _props: Self::Properties) -> bool {\n\n false\n\n }\n\n fn update(&mut self, msg: Self::Message) -> bool {\n\n use Msg::*;\n\n\n\n match msg {\n\n SignIn => {\n\n if self.email.is_empty() {\n\n self.email_error = Some(\"Your email is not valid\".into());\n\n return true;\n", "file_path": "app/src/sign_in.rs", "rank": 38, "score": 24.077570512365128 }, { "content": "use {\n\n crate::{\n\n records::{\n\n posts::{NewPost, Post},\n\n users::SimpleUser,\n\n },\n\n AuthToken,\n\n },\n\n async_graphql::{Context, Error, Result},\n\n sqlx::PgPool,\n\n};\n\n\n\npub async fn get_all<'a>(ctx: &'a Context<'_>) -> Result<Vec<Post>> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n Post::all(&pg_pool).await\n\n}\n\n\n\npub async fn get<'a>(ctx: &'a Context<'_>, post_id: i32) -> Result<Option<Post>> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n Post::get(&pg_pool, post_id).await\n", "file_path": "server/src/schema/posts.rs", "rank": 39, "score": 23.977279074414344 }, { "content": "#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"MutationRoot\",\n\n argument_struct = \"SignInArguments\"\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct SignInConnection {\n\n #[arguments(email = args.email.clone(), password = args.password.clone())]\n\n sign_in: String,\n\n}\n\n\n\n#[derive(PartialEq, Properties, Clone)]\n\npub struct Props {}\n\n\n\n#[derive(Debug)]\n\npub enum Msg {\n\n SignIn,\n\n ReceiveResponse(Result<GraphQLResponse<SignInConnection>, anyhow::Error>),\n\n EmailInputReceived(String),\n", "file_path": "app/src/sign_in.rs", "rank": 40, "score": 23.569827060358886 }, { "content": " NameInputReceived(String),\n\n PasswordInputReceived(String),\n\n ClearNotifications,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InitialModel {\n\n fetch_task: Option<FetchTask>,\n\n iss: Option<SignUpConnection>,\n\n link: ComponentLink<Self>,\n\n email: String,\n\n email_error: Option<String>,\n\n name: String,\n\n name_error: Option<String>,\n\n password: String,\n\n password_error: Option<String>,\n\n blog: String,\n\n blog_error: Option<String>,\n\n error: Option<String>,\n\n success: Option<String>,\n", "file_path": "app/src/initial.rs", "rank": 41, "score": 23.476977976060148 }, { "content": " .execute(pg_pool)\n\n .await\n\n {\n\n Ok(_post) => {}\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n return Err(Error::from(\n\n \"An error occured while updating the post title in the database.\",\n\n ));\n\n }\n\n }\n\n }\n\n None => {}\n\n }\n\n\n\n match text {\n\n Some(updated_text) => {\n\n match sqlx::query_as!(\n\n Self,\n\n r#\"\n", "file_path": "server/src/records/posts.rs", "rank": 42, "score": 23.151624516914886 }, { "content": " }\n\n }\n\n }\n\n\n\n pub async fn get(pg_pool: &PgPool, post_id: i32) -> Result<Option<Self>> {\n\n match sqlx::query_as!(\n\n Self,\n\n r#\"\n\n SELECT\n\n posts.id,\n\n posts.slug,\n\n posts.title,\n\n posts.text,\n\n posts.summary,\n\n posts.created_at\n\n FROM\n\n posts\n\n WHERE\n\n posts.id = $1\n\n \"#,\n", "file_path": "server/src/records/posts.rs", "rank": 43, "score": 22.633544184248038 }, { "content": " post_id: i32,\n\n _user_id: i32,\n\n title: Option<String>,\n\n text: Option<String>,\n\n ) -> Result<()> {\n\n match title {\n\n Some(updated_title) => {\n\n match sqlx::query_as!(\n\n Self,\n\n r#\"\n\n UPDATE\n\n posts\n\n SET\n\n title = $1\n\n WHERE\n\n id = $2\n\n \"#,\n\n updated_title,\n\n post_id\n\n )\n", "file_path": "server/src/records/posts.rs", "rank": 44, "score": 22.104395731457004 }, { "content": "use crate::query_dsl;\n\nuse cynic::GraphQLResponse;\n\nuse cynic::MutationBuilder;\n\nuse serde::Deserialize;\n\nuse yew::{\n\n format::Json,\n\n prelude::*,\n\n services::fetch::{FetchService, FetchTask, Request, Response},\n\n};\n\n\n\n#[derive(cynic::FragmentArguments)]\n\npub struct InitialArguments {\n\n email: String,\n\n name: String,\n\n password: String,\n\n key: String,\n\n title: String,\n\n}\n\n\n\n#[derive(cynic::QueryFragment, Debug, Deserialize)]\n", "file_path": "app/src/initial.rs", "rank": 45, "score": 21.684725977735255 }, { "content": " SELECT\n\n posts.id,\n\n posts.slug,\n\n posts.title,\n\n posts.text,\n\n posts.summary,\n\n posts.created_at\n\n FROM\n\n posts\n\n \"#,\n\n )\n\n .fetch_all(pg_pool)\n\n .await\n\n {\n\n Ok(posts) => Ok(posts),\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\n\n \"An error occured while retrieving the posts from the database.\",\n\n ))\n", "file_path": "server/src/records/posts.rs", "rank": 46, "score": 21.622338539131068 }, { "content": " }\n\n }\n\n None => {}\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(sqlx::FromRow, Debug, Deserialize, Serialize)]\n\npub struct NewPost<'a> {\n\n pub slug: &'a str,\n\n pub title: &'a str,\n\n pub text: &'a str,\n\n pub summary: &'a str,\n\n pub created_by: i32,\n\n}\n\n\n\nimpl<'a> NewPost<'a> {\n\n pub fn new(\n\n slug: &'a str,\n", "file_path": "server/src/records/posts.rs", "rank": 47, "score": 21.50978045677668 }, { "content": " }\n\n\n\n let operation = SignUpConnection::build(InitialArguments {\n\n key: String::from(\"title\"),\n\n title: self.blog.clone(),\n\n email: self.email.clone(),\n\n name: self.name.clone(),\n\n password: self.password.clone(),\n\n });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n // 2. construct a callback\n\n let callback = self.link.callback(\n\n |response: Response<\n\n Json<Result<GraphQLResponse<SignUpConnection>, anyhow::Error>>,\n", "file_path": "app/src/initial.rs", "rank": 48, "score": 21.163089065559625 }, { "content": " title: &'a str,\n\n text: &'a str,\n\n summary: &'a str,\n\n created_by: i32,\n\n ) -> Result<Self> {\n\n Ok(Self {\n\n slug,\n\n title,\n\n text,\n\n summary,\n\n created_by,\n\n })\n\n }\n\n\n\n pub async fn insert(&self, pg_pool: &PgPool) -> Result<Post> {\n\n match sqlx::query_as!(\n\n Post,\n\n r#\"\n\n INSERT INTO posts\n\n (slug, title, text, summary, created_by)\n", "file_path": "server/src/records/posts.rs", "rank": 49, "score": 20.276926454754236 }, { "content": "pub struct Post {\n\n pub id: i32,\n\n pub slug: String,\n\n pub title: String,\n\n pub text: String,\n\n pub summary: String,\n\n pub created_at: crate::DateTime,\n\n}\n\n\n\n#[derive(cynic::FragmentArguments)]\n\npub struct PostArguments {\n\n pub post_id: i32,\n\n}\n\n\n\n#[derive(cynic::QueryFragment, Deserialize)]\n\n#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"QueryRoot\",\n\n argument_struct = \"PostArguments\"\n", "file_path": "app/src/post.rs", "rank": 50, "score": 20.223534109934317 }, { "content": "use {\n\n crate::records::settings::{NewSetting, Settings},\n\n async_graphql::{Context, Result},\n\n sqlx::PgPool,\n\n std::collections::HashMap,\n\n};\n\n\n\npub async fn get_all<'a>(ctx: &'a Context<'_>) -> Result<HashMap<String, String>> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let settings = Settings::all(&pg_pool).await?;\n\n let mut settings_map = HashMap::new();\n\n\n\n for setting in settings {\n\n settings_map.insert(setting.key.clone(), setting.value.clone());\n\n }\n\n\n\n Ok(settings_map)\n\n}\n\n\n\npub async fn add<'a>(ctx: &'a Context<'_>, key: String, value: String) -> Result<&'a str> {\n\n let pg_pool = ctx.data::<PgPool>()?;\n\n let new_setting = NewSetting::new(&key, &value)?;\n\n let _setting = new_setting.insert(&pg_pool).await?;\n\n Ok(\"OK\")\n\n}\n", "file_path": "server/src/schema/settings.rs", "rank": 51, "score": 19.795991497610995 }, { "content": "}\n\n\n\n#[derive(cynic::QueryFragment, Deserialize)]\n\n#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"MutationRoot\",\n\n argument_struct = \"UpdatePostArguments\"\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UpdatePostConnection {\n\n #[arguments(post_id = args.post_id, title = args.title.clone(), text = args.text.clone())]\n\n update_post: String,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Properties)]\n\npub struct UpdatePostModelProps {\n\n pub id: i32,\n\n}\n\n\n", "file_path": "app/src/update_post.rs", "rank": 52, "score": 18.10795453689877 }, { "content": " UPDATE\n\n posts\n\n SET\n\n text = $1\n\n WHERE\n\n id = $2\n\n \"#,\n\n updated_text,\n\n post_id\n\n )\n\n .execute(pg_pool)\n\n .await\n\n {\n\n Ok(_post) => {}\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n return Err(Error::from(\n\n \"An error occured while updating the post text in the database.\",\n\n ));\n\n }\n", "file_path": "server/src/records/posts.rs", "rank": 53, "score": 17.904717778007516 }, { "content": " RETURNING\n\n id,\n\n key,\n\n value,\n\n created_at\n\n \"#,\n\n &self.key,\n\n &self.value\n\n )\n\n .fetch_one(pg_pool)\n\n .await\n\n {\n\n Ok(user) => Ok(user),\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\"Unable to insert setting in database.\"))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/records/settings.rs", "rank": 54, "score": 17.833261154542164 }, { "content": " match msg {\n\n SignUp => {\n\n if self.blog.is_empty() {\n\n self.blog_error = Some(\"Your blog name is not valid\".into());\n\n return true;\n\n }\n\n\n\n if self.email.is_empty() {\n\n self.email_error = Some(\"Your email is not valid\".into());\n\n return true;\n\n }\n\n\n\n if self.name.is_empty() {\n\n self.name_error = Some(\"Your name is not valid\".into());\n\n return true;\n\n }\n\n\n\n if self.password.is_empty() {\n\n self.password_error = Some(\"Your password is not valid\".into());\n\n return true;\n", "file_path": "app/src/initial.rs", "rank": 55, "score": 17.726909870628464 }, { "content": "use crate::query_dsl;\n\nuse crate::AppRoute;\n\nuse cynic::GraphQLResponse;\n\nuse cynic::MutationBuilder;\n\nuse serde::Deserialize;\n\nuse yew::{\n\n format::Json,\n\n prelude::*,\n\n services::fetch::{FetchService, FetchTask, Request, Response},\n\n services::storage::{Area, StorageService},\n\n};\n\nuse yew_router::{agent::RouteRequest::ChangeRoute, prelude::*};\n\n\n\n#[derive(cynic::FragmentArguments)]\n\npub struct SignInArguments {\n\n email: String,\n\n password: String,\n\n}\n\n\n\n#[derive(cynic::QueryFragment, Debug, Deserialize)]\n", "file_path": "app/src/sign_in.rs", "rank": 56, "score": 17.624320953210272 }, { "content": " PasswordInputReceived(String),\n\n RememberMeInputReceived,\n\n ClearNotifications,\n\n Ignore,\n\n}\n\n\n\npub struct SignInModel {\n\n fetch_task: Option<FetchTask>,\n\n sign_in_action: Option<SignInConnection>,\n\n link: ComponentLink<Self>,\n\n email: String,\n\n email_error: Option<String>,\n\n password: String,\n\n password_error: Option<String>,\n\n router_agent: Box<dyn Bridge<RouteAgent>>,\n\n error: Option<String>,\n\n success: Option<String>,\n\n}\n\n\n\nimpl SignInModel {\n", "file_path": "app/src/sign_in.rs", "rank": 57, "score": 17.47295178118697 }, { "content": " settings.key,\n\n settings.value,\n\n settings.created_at\n\n FROM\n\n settings\n\n \"#,\n\n )\n\n .fetch_all(pg_pool)\n\n .await\n\n {\n\n Ok(settings) => Ok(settings),\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\n\n \"An error occured while retrieving the settings from the database.\",\n\n ))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/records/settings.rs", "rank": 58, "score": 17.44452200691807 }, { "content": " .unwrap()\n\n .into_iter()\n\n .map(|error| error.message)\n\n .collect(),\n\n );\n\n }\n\n if graphql_response.data.is_some() {\n\n self.success = Some(\"OK\".into());\n\n self.email = String::from(\"\");\n\n self.name = String::from(\"\");\n\n self.password = String::from(\"\");\n\n self.blog = String::from(\"\");\n\n }\n\n }\n\n Err(error) => self.error = Some(error.to_string()),\n\n }\n\n self.fetch_task = None;\n\n // we want to redraw so that the page displays the location of the ISS instead of\n\n // 'fetching...'\n\n true\n", "file_path": "app/src/initial.rs", "rank": 59, "score": 17.271195587595322 }, { "content": "\n\n html! {{vnode}}\n\n }\n\n None => html! {{\"\"}},\n\n }\n\n }\n\n}\n\n\n\npub enum Msg {\n\n ReceiveResponse(Result<GraphQLResponse<PostConnection>, anyhow::Error>),\n\n}\n\n\n\nimpl Component for PostModel {\n\n type Message = Msg;\n\n type Properties = PostModelProps;\n\n fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let operation = PostConnection::build(PostArguments { post_id: props.id });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n", "file_path": "app/src/post.rs", "rank": 60, "score": 16.85021152258965 }, { "content": " }\n\n\n\n if self.password.is_empty() {\n\n self.password_error = Some(\"Your password is not valid\".into());\n\n return true;\n\n }\n\n\n\n let operation = SignInConnection::build(SignInArguments {\n\n email: self.email.clone(),\n\n password: self.password.clone(),\n\n });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n // 2. construct a callback\n\n let callback = self.link.callback(\n", "file_path": "app/src/sign_in.rs", "rank": 61, "score": 16.807190433355263 }, { "content": "pub struct DateTime(chrono::DateTime<chrono::Utc>);\n\n\n\n#[derive(cynic::Scalar, Deserialize)]\n\npub struct Jsonobject(serde_json::value::Value);\n\n\n\nmod authorization;\n\nmod components;\n\nmod index;\n\nmod initial;\n\nmod new_post;\n\nmod post;\n\nmod sign_in;\n\nmod update_post;\n\n\n\npub mod query_dsl {\n\n use crate::{DateTime, Jsonobject};\n\n\n\n cynic::query_dsl!(\"schema.graphql\");\n\n}\n\n\n", "file_path": "app/src/lib.rs", "rank": 62, "score": 16.02222759061791 }, { "content": " html! {}\n\n }\n\n }\n\n}\n\n\n\npub enum Msg {\n\n SubmitNewPost,\n\n Change(String),\n\n ReceiveResponse(Result<GraphQLResponse<PostConnection>, anyhow::Error>),\n\n UpdateReceiveResponse(Result<GraphQLResponse<UpdatePostConnection>, anyhow::Error>),\n\n ClearNotifications,\n\n}\n\n\n\nimpl Component for UpdatePostModel {\n\n type Message = Msg;\n\n type Properties = UpdatePostModelProps;\n\n fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let operation = PostConnection::build(PostArguments { post_id: props.id });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n", "file_path": "app/src/update_post.rs", "rank": 63, "score": 15.551040152768486 }, { "content": "#[derive(Switch, Clone, Debug)]\n\npub enum AppRoute {\n\n #[to = \"/posts/new\"]\n\n NewPost,\n\n #[to = \"/posts/update/{id}\"]\n\n UpdatePost(i32),\n\n #[to = \"/post/{id}\"]\n\n Post(i32),\n\n #[to = \"/sign/in\"]\n\n SignIn,\n\n #[to = \"/\"]\n\n Index,\n\n}\n\n\n\n#[wasm_bindgen(start)]\n", "file_path": "app/src/lib.rs", "rank": 64, "score": 15.304539865275121 }, { "content": " },\n\n }\n\n }\n\n}\n\n\n\npub enum Msg {\n\n ReceiveResponse(Result<GraphQLResponse<ToolbarConnection>, anyhow::Error>),\n\n}\n\n\n\nimpl Component for ToolbarModel {\n\n type Message = Msg;\n\n type Properties = ();\n\n fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let storage = StorageService::new(Area::Local).unwrap();\n\n\n\n let token: String = match storage.restore(\"auth_token\") {\n\n Ok(token) => token,\n\n Err(_err) => String::from(\"\"),\n\n };\n\n\n", "file_path": "app/src/components/toolbar.rs", "rank": 65, "score": 15.161753063387343 }, { "content": "\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n let callback = link.callback(\n\n |response: Response<Json<Result<GraphQLResponse<PostConnection>, anyhow::Error>>>| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n\n },\n\n );\n\n let target = FetchService::fetch(request, callback).expect(\"failed to start request\");\n\n Self {\n\n props,\n\n fetch_task: Some(target),\n\n post: None,\n\n text: String::from(\"\"),\n\n text_error: None,\n\n error: None,\n\n success: None,\n", "file_path": "app/src/update_post.rs", "rank": 66, "score": 14.995367100690624 }, { "content": " }\n\n\n\n let operation = UpdatePostConnection::build(UpdatePostArguments {\n\n post_id: self.props.id,\n\n title: None,\n\n text: Some(self.text.clone()),\n\n });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"token\", token)\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n // 2. construct a callback\n\n let callback = self.link.callback(\n\n |response: Response<\n\n Json<Result<GraphQLResponse<UpdatePostConnection>, anyhow::Error>>,\n\n >| {\n", "file_path": "app/src/update_post.rs", "rank": 67, "score": 14.837597261548236 }, { "content": " type Properties = ();\n\n fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n Self {\n\n fetch_task: None,\n\n slug: String::from(\"\"),\n\n slug_error: None,\n\n summary: String::from(\"\"),\n\n summary_error: None,\n\n title: String::from(\"\"),\n\n title_error: None,\n\n text: String::from(\"\"),\n\n text_error: None,\n\n error: None,\n\n success: None,\n\n router_agent: RouteAgent::bridge(link.callback(|_| Msg::Ignore)),\n\n link,\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n", "file_path": "app/src/new_post.rs", "rank": 68, "score": 14.697445686029871 }, { "content": " id=\"email\"\n\n name=\"email\"\n\n type=\"email\"\n\n autoComplete=\"email\"\n\n class=\"appearance-none block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm\"\n\n placeholder=\"[email protected]\"\n\n oninput=self.link.callback(|input_data: InputData| Msg::EmailInputReceived(input_data.value))\n\n value=&self.email\n\n />\n\n </div>\n\n { self.view_email_error() }\n\n </div>\n\n\n\n <div class=\"space-y-1\">\n\n <label\n\n htmlFor=\"password\"\n\n class=\"block text-sm font-medium text-gray-700\"\n\n >\n\n {\"Password\"}\n\n </label>\n", "file_path": "app/src/sign_in.rs", "rank": 69, "score": 14.599061961367353 }, { "content": " NameInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n\n self.name_error = None;\n\n self.name = value;\n\n true\n\n }\n\n PasswordInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n\n self.password_error = None;\n\n self.password = value;\n\n true\n\n }\n\n }\n\n }\n\n fn view(&self) -> Html {\n\n html! {\n\n <>\n\n <div class=\"min-h-screen bg-gray-50 flex flex-col justify-center py-12 sm:px-6 lg:px-8\">\n", "file_path": "app/src/initial.rs", "rank": 70, "score": 14.356301485049634 }, { "content": ")]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PostConnection {\n\n #[arguments(post_id = args.post_id)]\n\n pub post: Option<Post>,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Properties)]\n\npub struct PostModelProps {\n\n pub id: i32,\n\n}\n\n\n\npub struct PostModel {\n\n fetch_target: Option<FetchTask>,\n\n post: Option<Post>,\n\n}\n\n\n\nimpl PostModel {\n\n pub fn title(&self) -> Html {\n\n match &self.post {\n", "file_path": "app/src/post.rs", "rank": 71, "score": 14.293888245660842 }, { "content": " let target = FetchService::fetch(request, callback).expect(\"failed to start request\");\n\n\n\n Self {\n\n settings: Map::new(),\n\n fetch_target: Some(target),\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::ReceiveResponse(response) => {\n\n match response {\n\n Ok(graphql_response) => match graphql_response.data {\n\n Some(data) => {\n\n self.settings = match data.settings.0.as_object() {\n\n Some(map) => map.clone(),\n\n None => Map::new(),\n\n };\n\n }\n\n None => {}\n", "file_path": "app/src/lib.rs", "rank": 72, "score": 14.05982891839916 }, { "content": "use crate::components::toolbar::ToolbarModel;\n\nuse crate::post::{Post, PostArguments, PostConnection};\n\nuse crate::query_dsl;\n\nuse cynic::GraphQLResponse;\n\nuse cynic::{MutationBuilder, QueryBuilder};\n\nuse serde::Deserialize;\n\nuse web_sys::Node;\n\nuse yew::virtual_dom::VNode;\n\nuse yew::{\n\n format::Json,\n\n prelude::*,\n\n services::fetch::{FetchService, FetchTask, Request, Response},\n\n services::storage::{Area, StorageService},\n\n};\n\n\n\n#[derive(cynic::FragmentArguments)]\n\npub struct UpdatePostArguments {\n\n post_id: i32,\n\n title: Option<String>,\n\n text: Option<String>,\n", "file_path": "app/src/update_post.rs", "rank": 73, "score": 13.943008841227712 }, { "content": "pub enum Msg {\n\n ReceiveResponse(Result<GraphQLResponse<PostsConnection>, anyhow::Error>),\n\n}\n\n\n\nimpl Component for IndexModel {\n\n type Message = Msg;\n\n type Properties = ();\n\n fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let operation = PostsConnection::build(());\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n let callback = link.callback(\n\n |response: Response<Json<Result<GraphQLResponse<PostsConnection>, anyhow::Error>>>| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n", "file_path": "app/src/index.rs", "rank": 74, "score": 13.562635696831597 }, { "content": " VALUES\n\n ($1, $2, $3, $4, $5)\n\n RETURNING\n\n id,\n\n slug,\n\n title,\n\n text,\n\n summary,\n\n created_at\n\n \"#,\n\n &self.slug,\n\n &self.title,\n\n &self.text,\n\n &self.summary,\n\n &self.created_by\n\n )\n\n .fetch_one(pg_pool)\n\n .await\n\n {\n\n Ok(post) => Ok(post),\n\n Err(error) => {\n\n println!(\"{}\", error.to_string());\n\n Err(Error::from(\"Unable to insert post in database.\"))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/records/posts.rs", "rank": 75, "score": 13.56123444443198 }, { "content": " self.fetch_task = None;\n\n // we want to redraw so that the page displays the location of the ISS instead of\n\n // 'fetching...'\n\n true\n\n }\n\n ClearNotifications => {\n\n self.error = None;\n\n self.success = None;\n\n true\n\n }\n\n EmailInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n\n self.email_error = None;\n\n self.email = value;\n\n true\n\n }\n\n PasswordInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n", "file_path": "app/src/sign_in.rs", "rank": 76, "score": 13.509602519096852 }, { "content": " text: String,\n\n slug: String,\n\n summary: String,\n\n}\n\n\n\n#[derive(cynic::QueryFragment, Deserialize)]\n\n#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"MutationRoot\",\n\n argument_struct = \"NewPostArguments\"\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct NewPostConnection {\n\n #[arguments(title = args.title.clone(), text = args.text.clone(), slug = args.slug.clone(), summary = args.summary.clone())]\n\n new_post: Post,\n\n}\n\n\n\npub struct NewPostModel {\n\n fetch_task: Option<FetchTask>,\n", "file_path": "app/src/new_post.rs", "rank": 77, "score": 13.446173753016218 }, { "content": " graphql_response\n\n .errors\n\n .unwrap()\n\n .into_iter()\n\n .map(|error| error.message)\n\n .collect(),\n\n );\n\n }\n\n if graphql_response.data.is_some() {\n\n let token = graphql_response.data.unwrap().sign_in;\n\n let mut storage = StorageService::new(Area::Local).unwrap();\n\n storage.store(\"auth_token\", Ok(token.clone()));\n\n self.success = Some(\"OK\".into());\n\n self.router_agent.send(ChangeRoute(AppRoute::Index.into()));\n\n self.email = String::from(\"\");\n\n self.password = String::from(\"\");\n\n }\n\n }\n\n Err(error) => self.error = Some(error.to_string()),\n\n }\n", "file_path": "app/src/sign_in.rs", "rank": 78, "score": 13.326425245483428 }, { "content": " .send(ChangeRoute(AppRoute::Post(post.id).into()));\n\n self.success = Some(\"OK\".into());\n\n self.text = String::from(\"\");\n\n self.title = String::from(\"\");\n\n self.slug = String::from(\"\");\n\n self.summary = String::from(\"\");\n\n }\n\n }\n\n Err(error) => self.error = Some(error.to_string()),\n\n }\n\n self.fetch_task = None;\n\n }\n\n Msg::Ignore => return false,\n\n Msg::ClearNotifications => {\n\n self.error = None;\n\n self.success = None;\n\n }\n\n }\n\n true\n\n }\n", "file_path": "app/src/new_post.rs", "rank": 79, "score": 13.22997520571586 }, { "content": " let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n let callback = link.callback(\n\n |response: Response<Json<Result<GraphQLResponse<PostConnection>, anyhow::Error>>>| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n\n },\n\n );\n\n let target = FetchService::fetch(request, callback).expect(\"failed to start request\");\n\n Self {\n\n fetch_target: Some(target),\n\n post: None,\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::ReceiveResponse(response) => {\n", "file_path": "app/src/post.rs", "rank": 80, "score": 13.122015762870117 }, { "content": " {\"Email address\"}\n\n </label>\n\n <div class=\"mt-1\">\n\n <input\n\n id=\"email\"\n\n name=\"email\"\n\n type=\"email\"\n\n autoComplete=\"email\"\n\n class=\"appearance-none block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm\"\n\n oninput=self.link.callback(|input_data: InputData| Msg::EmailInputReceived(input_data.value))\n\n value=&self.email\n\n placeholder=\"[email protected]\"\n\n />\n\n { self.view_email_error() }\n\n </div>\n\n </div>\n\n\n\n <div>\n\n <label\n\n htmlFor=\"name\"\n", "file_path": "app/src/initial.rs", "rank": 81, "score": 13.069790449585414 }, { "content": " },\n\n );\n\n let target = FetchService::fetch(request, callback).expect(\"failed to start request\");\n\n Self {\n\n posts: vec![],\n\n settings: Map::new(),\n\n fetch_target: Some(target),\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::ReceiveResponse(response) => {\n\n match response {\n\n Ok(graphql_response) => match graphql_response.data {\n\n Some(data) => {\n\n self.posts = data.posts;\n\n self.settings = match data.settings.0.as_object() {\n\n Some(map) => map.clone(),\n\n None => Map::new(),\n", "file_path": "app/src/index.rs", "rank": 82, "score": 12.955337226860411 }, { "content": " Err(error) => self.error = Some(error.to_string()),\n\n }\n\n self.fetch_task = None;\n\n }\n\n }\n\n true\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n // Should only return \"true\" if new properties are different to\n\n // previously received properties.\n\n // This component has no properties so we will always return \"false\".\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n\n html! {\n\n <div>\n\n <ToolbarModel />\n\n <div id=\"markdown\">\n", "file_path": "app/src/update_post.rs", "rank": 83, "score": 12.839189736137286 }, { "content": " <label\n\n htmlFor=\"password\"\n\n class=\"block text-sm font-medium text-gray-700\"\n\n >\n\n {\"Password\"}\n\n </label>\n\n <div class=\"mt-1\">\n\n <input\n\n id=\"password\"\n\n name=\"password\"\n\n type=\"password\"\n\n autoComplete=\"current-password\"\n\n class=\"appearance-none block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm\"\n\n placeholder=\"*************\"\n\n oninput=self.link.callback(|input_data: InputData| Msg::PasswordInputReceived(input_data.value))\n\n value=&self.password\n\n />\n\n { self.view_password_error() }\n\n </div>\n\n </div>\n", "file_path": "app/src/initial.rs", "rank": 84, "score": 12.061033117502474 }, { "content": " match response {\n\n Ok(graphql_response) => {\n\n self.post = graphql_response.data.and_then(|data| data.post);\n\n }\n\n Err(error) => ConsoleService::info(&format!(\"Error: {}\", error.to_string())),\n\n };\n\n self.fetch_target = None;\n\n }\n\n }\n\n true\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n // Should only return \"true\" if new properties are different to\n\n // previously received properties.\n\n // This component has no properties so we will always return \"false\".\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n", "file_path": "app/src/post.rs", "rank": 85, "score": 11.99428316986254 }, { "content": "#[derive(cynic::QueryFragment, Deserialize)]\n\n#[cynic(\n\n schema_path = \"schema.graphql\",\n\n query_module = \"query_dsl\",\n\n graphql_type = \"QueryRoot\"\n\n)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct SettingsConnection {\n\n settings: Jsonobject,\n\n}\n\n\n\npub struct RootModel {\n\n settings: Map<String, Value>,\n\n fetch_target: Option<FetchTask>,\n\n}\n\n\n\npub enum Msg {\n\n ReceiveResponse(Result<GraphQLResponse<SettingsConnection>, anyhow::Error>),\n\n}\n\n\n", "file_path": "app/src/lib.rs", "rank": 86, "score": 11.827356290158573 }, { "content": " </div>\n\n </div>\n\n }\n\n } else {\n\n html! {}\n\n }\n\n }\n\n}\n\n\n\n/// Some of the code to render the UI is split out into smaller functions here to make the code\n\n/// cleaner and show some useful design patterns.\n\nimpl Component for SignInModel {\n\n type Message = Msg;\n\n type Properties = Props;\n\n\n\n fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n Self {\n\n fetch_task: None,\n\n sign_in_action: None,\n\n error: None,\n", "file_path": "app/src/sign_in.rs", "rank": 87, "score": 11.79481808252546 }, { "content": " .errors\n\n .unwrap()\n\n .into_iter()\n\n .map(|error| error.message)\n\n .collect(),\n\n );\n\n }\n\n if graphql_response.data.is_some() {\n\n self.post = graphql_response.data.unwrap().post;\n\n if let Some(post) = &self.post {\n\n self.text = post.text.clone();\n\n }\n\n }\n\n }\n\n Err(error) => self.error = Some(error.to_string()),\n\n }\n\n self.fetch_task = None;\n\n }\n\n Msg::ClearNotifications => {\n\n self.error = None;\n", "file_path": "app/src/update_post.rs", "rank": 88, "score": 11.793291184865952 }, { "content": " <div class=\"mt-1\">\n\n <input\n\n id=\"password\"\n\n name=\"password\"\n\n type=\"password\"\n\n autoComplete=\"current-password\"\n\n class=\"appearance-none block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm\"\n\n placeholder=\"*************\"\n\n oninput=self.link.callback(|input_data: InputData| Msg::PasswordInputReceived(input_data.value))\n\n value=&self.password\n\n />\n\n </div>\n\n </div>\n\n\n\n <div class=\"flex items-center justify-between\">\n\n <div class=\"flex items-center\">\n\n <input\n\n id=\"remember_me\"\n\n name=\"remember_me\"\n\n type=\"checkbox\"\n", "file_path": "app/src/sign_in.rs", "rank": 89, "score": 11.786001845247782 }, { "content": " settings: Map::new(),\n\n authorization: None,\n\n fetch_target: Some(target),\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::ReceiveResponse(response) => {\n\n match response {\n\n Ok(graphql_response) => match graphql_response.data {\n\n Some(data) => {\n\n self.settings = match data.settings.0.as_object() {\n\n Some(map) => map.clone(),\n\n None => Map::new(),\n\n };\n\n self.authorization = data.authorization;\n\n }\n\n None => {}\n\n },\n", "file_path": "app/src/components/toolbar.rs", "rank": 90, "score": 11.7729517138187 }, { "content": " };\n\n }\n\n None => {}\n\n },\n\n Err(error) => ConsoleService::info(&format!(\"Error: {}\", error.to_string())),\n\n };\n\n self.fetch_target = None;\n\n }\n\n }\n\n true\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n\n html! {\n\n <div class=\"min-h-screen bg-gray-100\">\n\n <ToolbarModel />\n", "file_path": "app/src/index.rs", "rank": 91, "score": 11.635542817565028 }, { "content": " link: ComponentLink<Self>,\n\n slug: String,\n\n slug_error: Option<String>,\n\n summary: String,\n\n summary_error: Option<String>,\n\n title: String,\n\n title_error: Option<String>,\n\n text: String,\n\n text_error: Option<String>,\n\n error: Option<String>,\n\n success: Option<String>,\n\n router_agent: Box<dyn Bridge<RouteAgent>>,\n\n}\n\n\n\nimpl NewPostModel {\n\n pub fn markdown_node(&self) -> Html {\n\n let div = web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n", "file_path": "app/src/new_post.rs", "rank": 92, "score": 11.633862998916559 }, { "content": " }\n\n ClearNotifications => {\n\n self.error = None;\n\n self.success = None;\n\n true\n\n }\n\n BlogInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n\n self.blog_error = None;\n\n self.blog = value;\n\n true\n\n }\n\n EmailInputReceived(value) => {\n\n self.error = None;\n\n self.success = None;\n\n self.email_error = None;\n\n self.email = value;\n\n true\n\n }\n", "file_path": "app/src/initial.rs", "rank": 93, "score": 11.47228576368705 }, { "content": "pub struct UpdatePostModel {\n\n props: UpdatePostModelProps,\n\n fetch_task: Option<FetchTask>,\n\n post: Option<Post>,\n\n link: ComponentLink<Self>,\n\n text: String,\n\n text_error: Option<String>,\n\n error: Option<String>,\n\n success: Option<String>,\n\n}\n\n\n\nimpl UpdatePostModel {\n\n pub fn markdown_node(&self) -> Html {\n\n let div = web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n\n .create_element(\"div\")\n\n .unwrap();\n\n\n", "file_path": "app/src/update_post.rs", "rank": 94, "score": 11.278730767541221 }, { "content": "use async_graphql::http::{playground_source, GraphQLPlaygroundConfig};\n\nuse async_graphql::{EmptySubscription, Schema};\n\nuse async_graphql_warp::Response;\n\nuse sqlx::postgres::PgPool;\n\nuse std::convert::Infallible;\n\nuse std::env;\n\nuse warp::{http::Response as HttpResponse, Filter};\n\n\n\nmod records;\n\nmod schema;\n\n\n", "file_path": "server/src/main.rs", "rank": 95, "score": 11.19400468821268 }, { "content": " slug: self.slug.clone(),\n\n summary: self.summary.clone(),\n\n });\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"token\", token)\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n // 2. construct a callback\n\n let callback = self.link.callback(\n\n |response: Response<\n\n Json<Result<GraphQLResponse<NewPostConnection>, anyhow::Error>>,\n\n >| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n\n },\n\n );\n", "file_path": "app/src/new_post.rs", "rank": 96, "score": 11.110027815581603 }, { "content": " |response: Response<\n\n Json<Result<GraphQLResponse<SignInConnection>, anyhow::Error>>,\n\n >| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n\n },\n\n );\n\n // 3. pass the request and callback to the fetch service\n\n let task = FetchService::fetch(request, callback).expect(\"failed to start request\");\n\n // 4. store the task so it isn't canceled immediately\n\n self.fetch_task = Some(task);\n\n // we want to redraw so that the page displays a 'fetching...' message to the user\n\n // so return 'true'\n\n true\n\n }\n\n ReceiveResponse(response) => {\n\n match response {\n\n Ok(graphql_response) => {\n\n if graphql_response.errors.is_some() {\n\n self.error = Some(\n", "file_path": "app/src/sign_in.rs", "rank": 97, "score": 10.989644863775384 }, { "content": "impl Component for RootModel {\n\n type Message = Msg;\n\n type Properties = ();\n\n fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let operation = SettingsConnection::build(());\n\n\n\n let query = serde_json::to_string(&operation).unwrap();\n\n\n\n let request = Request::post(\"/graphql\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .body(Ok(query))\n\n .expect(\"Failed to build request.\");\n\n let callback = link.callback(\n\n |response: Response<\n\n Json<Result<GraphQLResponse<SettingsConnection>, anyhow::Error>>,\n\n >| {\n\n let Json(data) = response.into_body();\n\n Msg::ReceiveResponse(data)\n\n },\n\n );\n", "file_path": "app/src/lib.rs", "rank": 98, "score": 10.98388807832645 }, { "content": " match msg {\n\n Msg::SubmitNewPost => {\n\n let storage = StorageService::new(Area::Local).unwrap();\n\n\n\n let token: String = match storage.restore(\"auth_token\") {\n\n Ok(token) => token,\n\n Err(_err) => {\n\n self.error = Some(\"Your authorization token is not valid\".into());\n\n return true;\n\n }\n\n };\n\n\n\n if self.text.is_empty() {\n\n self.text_error = Some(\"Your text is not valid\".into());\n\n return true;\n\n }\n\n\n\n let operation = NewPostConnection::build(NewPostArguments {\n\n title: self.title.clone(),\n\n text: self.text.clone(),\n", "file_path": "app/src/new_post.rs", "rank": 99, "score": 10.821388301414704 } ]
Rust
src/hash_tree.rs
ChosunOne/binary_merkle_tree
c42d5c980030e28afcf5c2819ea3507a017b00cc
#[cfg(not(any(feature = "use_hashbrown")))] use std::collections::HashMap; use std::path::PathBuf; #[cfg(feature = "use_hashbrown")] use hashbrown::HashMap; use crate::merkle_bit::{BinaryMerkleTreeResult, MerkleBIT}; use crate::traits::{Array, Decode, Encode}; use crate::tree::tree_branch::TreeBranch; use crate::tree::tree_data::TreeData; use crate::tree::tree_leaf::TreeLeaf; use crate::tree::tree_node::TreeNode; use crate::tree_db::HashTreeDB; use crate::tree_hasher::TreeHasher; type Tree<ArrayType, ValueType> = MerkleBIT< HashTreeDB<ArrayType>, TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, TreeNode<ArrayType>, TreeHasher, ValueType, ArrayType, >; pub struct HashTree<ArrayType = [u8; 32], ValueType = Vec<u8>> where ValueType: Encode + Decode, ArrayType: Array, { tree: Tree<ArrayType, ValueType>, } impl<ValueType, ArrayType> HashTree<ArrayType, ValueType> where ValueType: Encode + Decode, ArrayType: Array, { #[inline] pub fn new(depth: usize) -> BinaryMerkleTreeResult<Self> { let path = PathBuf::new(); let tree = MerkleBIT::new(&path, depth)?; Ok(Self { tree }) } #[inline] pub fn open(path: &PathBuf, depth: usize) -> BinaryMerkleTreeResult<Self> { let tree = MerkleBIT::new(path, depth)?; Ok(Self { tree }) } #[inline] pub fn get( &self, root_hash: &ArrayType, keys: &mut [ArrayType], ) -> BinaryMerkleTreeResult<HashMap<ArrayType, Option<ValueType>>> { self.tree.get(root_hash, keys) } #[inline] pub fn insert( &mut self, previous_root: Option<&ArrayType>, keys: &mut [ArrayType], values: &[ValueType], ) -> BinaryMerkleTreeResult<ArrayType> { self.tree.insert(previous_root, keys, values) } #[inline] pub fn remove(&mut self, root_hash: &ArrayType) -> BinaryMerkleTreeResult<()> { self.tree.remove(root_hash) } #[inline] pub fn generate_inclusion_proof( &self, root: &ArrayType, key: ArrayType, ) -> BinaryMerkleTreeResult<Vec<(ArrayType, bool)>> { self.tree.generate_inclusion_proof(root, key) } #[inline] pub fn verify_inclusion_proof( root: &ArrayType, key: ArrayType, value: &ValueType, proof: &[(ArrayType, bool)], ) -> BinaryMerkleTreeResult<()> { Tree::verify_inclusion_proof(root, key, value, proof) } #[inline] pub fn get_one( &self, root: &ArrayType, key: &ArrayType, ) -> BinaryMerkleTreeResult<Option<ValueType>> { self.tree.get_one(root, key) } #[inline] pub fn insert_one( &mut self, previous_root: Option<&ArrayType>, key: &ArrayType, value: &ValueType, ) -> BinaryMerkleTreeResult<ArrayType> { self.tree.insert_one(previous_root, key, value) } }
#[cfg(not(any(feature = "use_hashbrown")))] use std::collections::HashMap; use std::path::PathBuf; #[cfg(feature = "use_hashbrown")] use hashbrown::HashMap; use crate::merkle_bit::{BinaryMerkleTreeResult, MerkleBIT}; use crate::traits::{Array, Decode, Encode}; use crate::tree::tree_branch::TreeBranch; use crate::tree::tree_data::TreeData; use crate::tree::tree_leaf::TreeLeaf; use crate::tree::tree_node::TreeNode; use crate::tree_db::HashTreeDB; use crate::tree_hasher::TreeHasher; type Tree<ArrayType, ValueType> = MerkleBIT< HashTreeDB<ArrayType>, TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, TreeNode<ArrayType>, TreeHasher, ValueType, ArrayType, >; pub struct HashTree<ArrayType = [u8; 32], ValueType = Vec<u8>> where ValueType: Encode + Decode, ArrayType: Array, { tree: Tree<ArrayType, ValueType>, } impl<ValueType, ArrayType> HashTree<ArrayType, ValueType> where ValueType: Encode + Decode, ArrayType: Array, { #[inline] pub fn new(depth: usize) -> BinaryMerkleTreeResult<Self> { let path = PathBuf::new(); let tree = MerkleBIT::new(&path, depth)?; Ok(Self { tree }) } #[inline] pub fn open(path: &PathBuf, depth: usize) -> BinaryMerkleTreeResult<Self> { let tree = MerkleBIT::new(path, depth)?; Ok(Self { tree }) } #[inline] pub fn ge
#[inline] pub fn insert( &mut self, previous_root: Option<&ArrayType>, keys: &mut [ArrayType], values: &[ValueType], ) -> BinaryMerkleTreeResult<ArrayType> { self.tree.insert(previous_root, keys, values) } #[inline] pub fn remove(&mut self, root_hash: &ArrayType) -> BinaryMerkleTreeResult<()> { self.tree.remove(root_hash) } #[inline] pub fn generate_inclusion_proof( &self, root: &ArrayType, key: ArrayType, ) -> BinaryMerkleTreeResult<Vec<(ArrayType, bool)>> { self.tree.generate_inclusion_proof(root, key) } #[inline] pub fn verify_inclusion_proof( root: &ArrayType, key: ArrayType, value: &ValueType, proof: &[(ArrayType, bool)], ) -> BinaryMerkleTreeResult<()> { Tree::verify_inclusion_proof(root, key, value, proof) } #[inline] pub fn get_one( &self, root: &ArrayType, key: &ArrayType, ) -> BinaryMerkleTreeResult<Option<ValueType>> { self.tree.get_one(root, key) } #[inline] pub fn insert_one( &mut self, previous_root: Option<&ArrayType>, key: &ArrayType, value: &ValueType, ) -> BinaryMerkleTreeResult<ArrayType> { self.tree.insert_one(previous_root, key, value) } }
t( &self, root_hash: &ArrayType, keys: &mut [ArrayType], ) -> BinaryMerkleTreeResult<HashMap<ArrayType, Option<ValueType>>> { self.tree.get(root_hash, keys) }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn generate_tree_ref_queue<ArrayType: Array>(\n\n tree_refs: &mut Vec<TreeRef<ArrayType>>,\n\n tree_ref_queue: &mut HashMap<usize, Vec<(usize, usize, usize)>>,\n\n) -> BinaryMerkleTreeResult<HashSet<usize>> {\n\n let mut unique_split_bits = HashSet::new();\n\n for i in 0..tree_refs.len() - 1 {\n\n let left_key = tree_refs[i].key.as_ref();\n\n let right_key = tree_refs[i + 1].key.as_ref();\n\n let key_len = left_key.len();\n\n\n\n for j in 0..key_len {\n\n if j == key_len - 1 && left_key[j] == right_key[j] {\n\n // The keys are the same and don't diverge\n\n return Err(Exception::new(\n\n \"Attempted to insert item with duplicate keys\",\n\n ));\n\n }\n\n // Skip bytes until we find a difference\n\n if left_key[j] == right_key[j] {\n\n continue;\n", "file_path": "src/utils/tree_utils.rs", "rank": 0, "score": 180017.62552727136 }, { "content": "#[inline]\n\npub fn choose_zero<ArrayType>(key_array: ArrayType, bit: usize) -> Result<bool, Exception>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n let key = key_array.as_ref();\n\n let index = bit >> 3;\n\n let shift = bit % 8;\n\n let extracted_bit = usize::try_from(key[index])? >> (7 - shift) & 1;\n\n Ok(extracted_bit == 0)\n\n}\n\n\n\n/// This function splits the list of sorted pairs into two lists, one for going down the zero branch,\n\n/// and the other for going down the one branch.\n\n/// # Errors\n\n/// `Exception` generated from a failure to convert an `u8` to an `usize`\n", "file_path": "src/utils/tree_utils.rs", "rank": 1, "score": 170749.44941342427 }, { "content": "#[inline]\n\npub fn split_pairs<ArrayType>(\n\n sorted_pairs: &[ArrayType],\n\n bit: usize,\n\n) -> Result<(&[ArrayType], &[ArrayType]), Exception>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n if sorted_pairs.is_empty() {\n\n return Ok((&[], &[]));\n\n }\n\n\n\n let mut min = 0;\n\n let mut max = sorted_pairs.len();\n\n\n\n if choose_zero(sorted_pairs[max - 1], bit)? {\n\n return Ok((&sorted_pairs[..], &[]));\n\n }\n\n\n\n if !choose_zero(sorted_pairs[0], bit)? {\n\n return Ok((&[], &sorted_pairs[..]));\n", "file_path": "src/utils/tree_utils.rs", "rank": 2, "score": 166974.6670197818 }, { "content": "#[inline]\n\npub fn check_descendants<'a, ArrayType>(\n\n keys: &'a [ArrayType],\n\n branch_split_index: usize,\n\n branch_key: &ArrayType,\n\n min_split_index: usize,\n\n) -> Result<&'a [ArrayType], Exception>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n let b_key = branch_key.as_ref();\n\n let mut start = 0;\n\n let mut end = 0;\n\n let mut found_start = false;\n\n for (i, k) in keys.iter().enumerate() {\n\n let key = k.as_ref();\n\n let mut descendant = true;\n\n for j in (min_split_index..branch_split_index).step_by(8) {\n\n let byte = j >> 3;\n\n if b_key[byte] == key[byte] {\n\n continue;\n", "file_path": "src/utils/tree_utils.rs", "rank": 3, "score": 163553.95932002366 }, { "content": "#[inline]\n\npub fn generate_leaf_map<ArrayType, ValueType>(\n\n keys: &[ArrayType],\n\n) -> HashMap<ArrayType, Option<ValueType>>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n let mut leaf_map = HashMap::new();\n\n for &key in keys.iter() {\n\n leaf_map.insert(key, None);\n\n }\n\n leaf_map\n\n}\n\n\n\n/// This function performs a fast log2 operation for single byte unsigned integers.\n\n#[inline]\n\n#[must_use]\n\npub const fn fast_log_2(num: u8) -> u8 {\n\n let mut log = num;\n\n log |= log >> 1;\n\n log |= log >> 2;\n\n log |= log >> 4;\n\n MULTIPLY_DE_BRUIJN_BIT_POSITION[((0x1d_usize * log as usize) as u8 >> 5) as usize]\n\n}\n\n\n\n/// Generates the `TreeRef`s that will be made into the new tree.\n\n/// # Errors\n\n/// `Exception` generated from a failure to convert a `u8` to a `usize`\n", "file_path": "src/utils/tree_utils.rs", "rank": 4, "score": 160909.95591415738 }, { "content": "#[inline]\n\npub fn calc_min_split_index<ArrayType>(\n\n keys: &[ArrayType],\n\n branch_key: &ArrayType,\n\n) -> Result<usize, Exception>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n if keys.is_empty() {\n\n return Err(Exception::new(\"keys must not be empty.\"));\n\n }\n\n let b_key = branch_key.as_ref();\n\n let mut min_key;\n\n let mut max_key;\n\n if let Some(key) = keys.iter().min() {\n\n min_key = key.as_ref();\n\n } else {\n\n return Err(Exception::new(\"Failed to get min key from list of keys.\"));\n\n }\n\n if let Some(key) = keys.iter().max() {\n\n max_key = key.as_ref();\n", "file_path": "src/utils/tree_utils.rs", "rank": 5, "score": 158262.53850233107 }, { "content": "/// Internal type alias for the underlying tree.\n\ntype Tree<ArrayType, ValueType> = MerkleBIT<\n\n RocksDB<ArrayType>,\n\n TreeBranch<ArrayType>,\n\n TreeLeaf<ArrayType>,\n\n TreeData,\n\n TreeNode<ArrayType>,\n\n TreeHasher,\n\n ValueType,\n\n ArrayType,\n\n>;\n\n\n\npub struct RocksTree<ArrayType = [u8; 32], ValueType = Vec<u8>>\n\nwhere\n\n ArrayType: Array + Serialize + DeserializeOwned,\n\n ValueType: Encode + Decode,\n\n{\n\n tree: Tree<ArrayType, ValueType>,\n\n}\n\n\n\nimpl<ArrayType, ValueType> RocksTree<ArrayType, ValueType>\n", "file_path": "src/rocks_tree.rs", "rank": 6, "score": 136505.58809170517 }, { "content": "#[cfg(not(any(feature = \"use_rocksdb\")))]\n\ntype Tree = HashTree<[u8; KEY_LEN], Vec<u8>>;\n\n\n", "file_path": "benches/big_benches.rs", "rank": 8, "score": 127945.34148063017 }, { "content": "#[cfg(feature = \"use_rocksdb\")]\n\ntype Tree = RocksTree<[u8; KEY_LEN], Vec<u8>>;\n\n\n\n/** Benchmarks 1000, 2000, 5000, 10000 inserts to a tree with no previous state */\n", "file_path": "benches/big_benches.rs", "rank": 9, "score": 127945.34148063017 }, { "content": "#[cfg(not(any(feature = \"use_rocksdb\")))]\n\ntype Tree = HashTree<[u8; KEY_LEN], Vec<u8>>;\n\n\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 10, "score": 125680.41176266033 }, { "content": "#[cfg(feature = \"use_rocksdb\")]\n\ntype Tree = RocksTree<[u8; KEY_LEN], Vec<u8>>;\n\n\n\n/** Benchmarks 1, 10 , and 100 inserts to a tree with no previous state */\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 11, "score": 125680.41176266033 }, { "content": "/// The required interface for structs representing leaves in the tree.\n\npub trait Leaf<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// Creates a new `Leaf` node.\n\n fn new() -> Self;\n\n /// Gets the associated key with this node.\n\n fn get_key(&self) -> &ArrayType;\n\n /// Gets the location of the `Data` node.\n\n fn get_data(&self) -> &ArrayType;\n\n /// Sets the associated key with this node.\n\n fn set_key(&mut self, key: ArrayType);\n\n /// Sets the location of the `Data` node.\n\n fn set_data(&mut self, data: ArrayType);\n\n /// Decomposes the `Leaf` into its constituent parts.\n\n fn decompose(self) -> (ArrayType, ArrayType);\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 12, "score": 108715.76050649564 }, { "content": "/// The required interface for structs representing branches in the tree.\n\npub trait Branch<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// Creates a new `Branch`.\n\n fn new() -> Self;\n\n /// Gets the count of leaves beneath this node.\n\n fn get_count(&self) -> u64;\n\n /// Gets the location of the zero branch beneath this node.\n\n fn get_zero(&self) -> &ArrayType;\n\n /// Gets the location of the one branch beneath this node.\n\n fn get_one(&self) -> &ArrayType;\n\n /// Gets the index on which to split keys when traversing this node.\n\n fn get_split_index(&self) -> usize;\n\n /// Gets the associated key with this node.\n\n fn get_key(&self) -> &ArrayType;\n\n /// Sets the count of leaves below this node.\n\n fn set_count(&mut self, count: u64);\n\n /// Sets the location of the zero branch beneath this node.\n\n fn set_zero(&mut self, zero: ArrayType);\n\n /// Sets the location of the one branch beneath this node..\n\n fn set_one(&mut self, one: ArrayType);\n\n /// Sets the index on which to split keys when traversing this node.\n\n fn set_split_index(&mut self, index: usize);\n\n /// Sets the associated key for this node.\n\n fn set_key(&mut self, key: ArrayType);\n\n /// Decomposes the `Branch` into its constituent parts.\n\n fn decompose(self) -> (u64, ArrayType, ArrayType, usize, ArrayType);\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 13, "score": 108715.76050649564 }, { "content": "/// The required interface for structs representing a hasher.\n\npub trait Hasher<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// The type of hasher.\n\n type HashType;\n\n /// Creates a new `HashType`.\n\n fn new(size: usize) -> Self::HashType;\n\n /// Adds data to be hashed.\n\n fn update(&mut self, data: &[u8]);\n\n /// Outputs the hash from updated data.\n\n fn finalize(self) -> ArrayType;\n\n}\n\n\n\n#[cfg(feature = \"use_digest\")]\n\nimpl<T, ArrayType> Hasher<ArrayType> for T\n\nwhere\n\n T: Digest,\n\n ArrayType: Array,\n\n{\n", "file_path": "src/traits.rs", "rank": 14, "score": 108712.53542566986 }, { "content": "/// This trait defines the required interface for connecting a storage mechanism to the `MerkleBIT`.\n\npub trait Database<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// The type of node to insert into the database.\n\n type NodeType;\n\n /// The type of entry for insertion. Primarily for convenience and tracking what goes into the database.\n\n type EntryType;\n\n /// Opens an existing `Database`.\n\n /// # Errors\n\n /// `Exception` generated if the `open` does not succeed.\n\n fn open(path: &PathBuf) -> Result<Self, Exception>\n\n where\n\n Self: Sized;\n\n /// Gets a value from the database based on the given key.\n\n /// # Errors\n\n /// `Exception` generated if the `get_node` does not succeed.\n\n fn get_node(&self, key: ArrayType) -> Result<Option<Self::NodeType>, Exception>;\n\n /// Queues a key and its associated value for insertion to the database.\n\n /// # Errors\n", "file_path": "src/traits.rs", "rank": 15, "score": 108708.2153056831 }, { "content": "/// The required interface for structs representing nodes in the tree.\n\npub trait Node<BranchType, LeafType, DataType, ArrayType>\n\nwhere\n\n BranchType: Branch<ArrayType>,\n\n LeafType: Leaf<ArrayType>,\n\n DataType: Data,\n\n ArrayType: Array,\n\n{\n\n /// Creates a new `Node`.\n\n fn new(node_variant: NodeVariant<BranchType, LeafType, DataType, ArrayType>) -> Self;\n\n /// Gets the number of references to this node.\n\n fn get_references(&self) -> u64;\n\n /// Decomposes the struct into its inner type.\n\n fn get_variant(self) -> NodeVariant<BranchType, LeafType, DataType, ArrayType>;\n\n /// Sets the number of references to this node.\n\n fn set_references(&mut self, references: u64);\n\n /// Sets the node to contain a `Branch` node. Mutually exclusive with `set_data` and `set_leaf`.\n\n fn set_branch(&mut self, branch: BranchType);\n\n /// Sets the node to contain a `Leaf` node. Mututally exclusive with `set_data` and `set_branch`.\n\n fn set_leaf(&mut self, leaf: LeafType);\n\n /// Sets the node to contain a `Data` node. Mutually exclusive with `set_leaf` and `set_branch`.\n", "file_path": "src/traits.rs", "rank": 16, "score": 106194.35651191062 }, { "content": "/// The required interface for an object that functions like an array.\n\npub trait Array: AsRef<[u8]> + AsMut<[u8]> + Clone + Copy + Default + Hash + Ord + Sized {}\n\n\n\nimpl<T> Array for T where T: AsRef<[u8]> + AsMut<[u8]> + Clone + Copy + Default + Hash + Ord + Sized {}\n\n\n", "file_path": "src/traits.rs", "rank": 17, "score": 103015.37357722013 }, { "content": "fn get_key_and_value(data: &[u8]) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {\n\n if data.is_empty() || data.len() < 2 {\n\n return (vec![vec![0]], vec![vec![0]])\n\n }\n\n let split = data.split_at(data.len() / 2);\n\n (vec![split.0.to_vec()], vec![split.1.to_vec()])\n\n}", "file_path": "fuzz/fuzz_targets/round_trip_empty_tree.rs", "rank": 18, "score": 101535.81056215848 }, { "content": "fn prepare_inserts(num_entries: usize, rng: &mut StdRng) -> (Vec<[u8; KEY_LEN]>, Vec<Vec<u8>>) {\n\n let mut keys = Vec::with_capacity(num_entries);\n\n let mut data = Vec::with_capacity(num_entries);\n\n for _ in 0..num_entries {\n\n let mut key_value = [0u8; KEY_LEN];\n\n rng.fill(&mut key_value);\n\n keys.push(key_value);\n\n\n\n let mut data_value = [0u8; KEY_LEN];\n\n rng.fill(data_value.as_mut());\n\n data.push(data_value.to_vec());\n\n }\n\n\n\n keys.sort();\n\n\n\n (keys, data)\n\n}\n", "file_path": "benches/big_benches.rs", "rank": 19, "score": 89335.27140519528 }, { "content": "fn prepare_inserts(num_entries: usize, rng: &mut StdRng) -> (Vec<[u8; KEY_LEN]>, Vec<Vec<u8>>) {\n\n let mut keys = Vec::with_capacity(num_entries);\n\n let mut data = Vec::with_capacity(num_entries);\n\n for _ in 0..num_entries {\n\n let mut key_value = [0u8; KEY_LEN];\n\n rng.fill(&mut key_value);\n\n keys.push(key_value);\n\n\n\n let data_value = (0..KEY_LEN).map(|_| rng.gen()).collect();\n\n data.push(data_value);\n\n }\n\n\n\n keys.sort();\n\n\n\n (keys, data)\n\n}\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 20, "score": 87717.56206169736 }, { "content": "/// This trait must be implemented to allow an arbitrary sized buffer to be deserialized.\n\n/// # Errors\n\n/// `Exception` generated when the buffer fails to be decoded to the target type.\n\npub trait Decode {\n\n /// Decodes bytes into a `Sized` struct.\n\n /// # Errors\n\n /// `Exception` generated when the buffer fails to be decoded to the target type.\n\n fn decode(buffer: &[u8]) -> Result<Self, Exception>\n\n where\n\n Self: Sized;\n\n}\n\n\n\nimpl Decode for Vec<u8> {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> Result<Self, Exception> {\n\n Ok(buffer.to_vec())\n\n }\n\n}\n\n\n\n/// A generic error that implements `Error`.\n\n/// Mostly intended to be used to standardize errors across the crate.\n\n#[derive(Debug)]\n\npub struct Exception {\n", "file_path": "src/traits.rs", "rank": 21, "score": 83958.6328743683 }, { "content": "/// This trait must be implemented to allow a struct to be serialized.\n\npub trait Encode {\n\n /// Encodes a struct into bytes.\n\n /// # Errors\n\n /// `Exception` generated when the method encoding the structure fails.\n\n fn encode(&self) -> Result<Vec<u8>, Exception>;\n\n}\n\n\n\nimpl Encode for Vec<u8> {\n\n #[inline]\n\n fn encode(&self) -> Result<Self, Exception> {\n\n Ok(self.clone())\n\n }\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 22, "score": 83955.39517792036 }, { "content": "fn main() -> BinaryMerkleTreeResult<()> {\n\n let mut tree: HashTree<[u8; KEY_LEN], Vec<u8>> = HashTree::new(16)?;\n\n\n\n let key = [0x00; KEY_LEN];\n\n let value = vec![0x00; KEY_LEN];\n\n\n\n // Inserting and getting from a tree\n\n let new_root = tree.insert(None, &mut [key], &vec![value.clone()])?;\n\n let retrieved_value = tree.get_one(&new_root, &key)?.unwrap();\n\n assert_eq!(retrieved_value, value.clone());\n\n\n\n // Generating an inclusion proof of an element in the tree\n\n let inclusion_proof = tree.generate_inclusion_proof(&new_root, key)?;\n\n\n\n // Verifying an inclusion proof.\n\n HashTree::verify_inclusion_proof(&new_root, key, &value, &inclusion_proof)?;\n\n\n\n // Attempting to get from a removed root will yield None\n\n tree.remove(&new_root)?;\n\n let item_map2 = tree.get(&new_root, &mut [key])?;\n\n assert_eq!(item_map2[&key], None);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/main.rs", "rank": 23, "score": 73955.15084492849 }, { "content": "/// Enum used for splitting nodes into either the left or right path during tree traversal\n\nenum SplitNodeType<'a, BranchType, LeafType, DataType, NodeType, ArrayType>\n\nwhere\n\n BranchType: Branch<ArrayType>,\n\n LeafType: Leaf<ArrayType>,\n\n DataType: Data,\n\n NodeType: Node<BranchType, LeafType, DataType, ArrayType>,\n\n ArrayType: Array,\n\n{\n\n /// Used for building the `proof_nodes` variable during tree traversal\n\n Ref(TreeRef<ArrayType>),\n\n /// Used for appending to the `cell_queue` during tree traversal.\n\n Cell(TreeCell<'a, NodeType, ArrayType>),\n\n /// PhantomData marker\n\n _UnusedBranch(PhantomData<BranchType>),\n\n /// PhantomData marker\n\n _UnusedLeaf(PhantomData<LeafType>),\n\n /// PhantomData marker\n\n _UnusedData(PhantomData<DataType>),\n\n}\n\n\n", "file_path": "src/merkle_bit.rs", "rank": 24, "score": 71294.36721554525 }, { "content": "fn hash_tree_empty_tree_insert_big_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function_over_inputs(\n\n \"Big Tree Empty Insert\",\n\n move |b, index| {\n\n let (mut keys, values) = prepare_inserts(10000, &mut rng);\n\n\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n b.iter(|| {\n\n let root = bmt\n\n .insert(None, &mut keys[0..*index], &values[0..*index])\n\n .unwrap();\n\n criterion::black_box(root);\n\n });\n\n },\n\n vec![1000, 2000, 5000, 10000],\n\n );\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n\n/** Benchmarks 1000, 2000, 5000, 10000 inserts into a tree with existing root */\n", "file_path": "benches/big_benches.rs", "rank": 25, "score": 70259.57830239621 }, { "content": "fn hash_tree_existing_tree_insert_big_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function_over_inputs(\n\n \"Big Tree Non Empty Insert\",\n\n move |b, index| {\n\n let (mut keys, values) = prepare_inserts(10000, &mut rng);\n\n\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n let root_hash = bmt.insert(None, &mut keys, &values).unwrap();\n\n let (mut second_keys, second_values) = prepare_inserts(10000, &mut rng);\n\n\n\n b.iter(|| {\n\n let root = bmt\n\n .insert(\n\n Some(&root_hash),\n\n &mut second_keys[0..*index],\n\n &second_values[0..*index],\n\n )\n", "file_path": "benches/big_benches.rs", "rank": 26, "score": 70259.57830239621 }, { "content": "fn hash_tree_empty_tree_insert_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function_over_inputs(\n\n \"Tree Empty Insert\",\n\n move |b, index| {\n\n let (mut keys, values) = prepare_inserts(1000, &mut rng);\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n b.iter(|| {\n\n let root = bmt\n\n .insert(None, &mut keys[0..*index], &values[0..*index])\n\n .unwrap();\n\n criterion::black_box(root);\n\n });\n\n },\n\n vec![1, 10, 100, 200, 500, 1000],\n\n );\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n\n/** Benchmarks 1, 10, and 100 inserts into a tree with existing root */\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 27, "score": 70259.57830239621 }, { "content": "fn hash_tree_existing_tree_insert_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function_over_inputs(\n\n \"Tree Non Empty Insert\",\n\n move |b, index| {\n\n let (mut keys, values) = prepare_inserts(4096, &mut rng);\n\n\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n let root_hash = bmt.insert(None, &mut keys, &values).unwrap();\n\n let (mut second_keys, second_values) = prepare_inserts(1000, &mut rng);\n\n\n\n b.iter(|| {\n\n let root = bmt\n\n .insert(\n\n Some(&root_hash),\n\n &mut second_keys[0..*index],\n\n &second_values[0..*index],\n\n )\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 28, "score": 70259.57830239621 }, { "content": "fn remove_from_tree_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n\n\n c.bench_function(\"Tree Remove Benchmark/4096\", move |b| {\n\n let (mut keys, values) = prepare_inserts(4096, &mut rng);\n\n let mut tree = Tree::open(&path.clone(), 160).unwrap();\n\n let root_hash = tree.insert(None, &mut keys, &values).unwrap();\n\n b.iter(|| {\n\n criterion::black_box(tree.remove(&root_hash).unwrap());\n\n })\n\n });\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n hash_tree_empty_tree_insert_benchmark,\n\n hash_tree_existing_tree_insert_benchmark,\n\n get_from_hash_tree_benchmark,\n\n remove_from_tree_benchmark\n\n);\n\ncriterion_main!(benches);\n\n\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 29, "score": 65939.32658575663 }, { "content": "fn remove_from_tree_big_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n\n\n c.bench_function(\"Big Tree Remove Benchmark/10000\", move |b| {\n\n let (mut keys, values) = prepare_inserts(10000, &mut rng);\n\n let mut tree = Tree::open(&path.clone(), 160).unwrap();\n\n\n\n let root_hash = tree.insert(None, &mut keys, &values).unwrap();\n\n b.iter(|| {\n\n tree.remove(&root_hash).unwrap();\n\n })\n\n });\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n\ncriterion_group!(\n\n big_benches,\n\n hash_tree_empty_tree_insert_big_benchmark,\n\n hash_tree_existing_tree_insert_big_benchmark,\n\n get_from_hash_tree_big_benchmark,\n\n remove_from_tree_big_benchmark\n\n);\n\ncriterion_main!(big_benches);\n\n\n", "file_path": "benches/big_benches.rs", "rank": 30, "score": 65939.32658575663 }, { "content": "fn get_from_hash_tree_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function(\"Tree Get Benchmark/4096\", move |b| {\n\n let (mut keys, values) = prepare_inserts(4096, &mut rng);\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n let root_hash = bmt.insert(None, &mut keys, &values).unwrap();\n\n\n\n b.iter(|| {\n\n let items = bmt.get(&root_hash, &mut keys).unwrap();\n\n criterion::black_box(items);\n\n })\n\n });\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n", "file_path": "benches/merkle_bit_benches.rs", "rank": 31, "score": 64185.81838666991 }, { "content": "fn get_from_hash_tree_big_benchmark(c: &mut Criterion) {\n\n let path = PathBuf::from(\"db\");\n\n let seed = [0xBBu8; KEY_LEN];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n\n c.bench_function(\"Big Tree Get Benchmark/10000\", move |b| {\n\n let (mut keys, values) = prepare_inserts(10000, &mut rng);\n\n\n\n let mut bmt = Tree::open(&path, 160).unwrap();\n\n let root_hash = bmt.insert(None, &mut keys, &values).unwrap();\n\n\n\n b.iter(|| {\n\n let items = bmt.get(&root_hash, &mut keys).unwrap();\n\n criterion::black_box(items);\n\n })\n\n });\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n let path = PathBuf::from(\"db\");\n\n #[cfg(any(feature = \"use_rocksdb\"))]\n\n remove_dir_all(&path).unwrap();\n\n}\n\n\n", "file_path": "benches/big_benches.rs", "rank": 32, "score": 64185.81838666991 }, { "content": "fn main() {}\n", "file_path": "build.rs", "rank": 33, "score": 55322.587414434165 }, { "content": "/// The required interface for structs representing data stored in the tree.\n\npub trait Data {\n\n /// Creates a new `Data` node.\n\n fn new() -> Self;\n\n /// Gets the value for the `Data` node.\n\n fn get_value(&self) -> &[u8];\n\n /// Sets the value for the `Data` node.\n\n fn set_value(&mut self, value: &[u8]);\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 34, "score": 48789.16763607644 }, { "content": "impl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(ron::ser::to_string(&self)?.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(deserialize(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_node.rs", "rank": 35, "score": 40496.58437247722 }, { "content": " Self::set_split_index(self, index)\n\n }\n\n #[inline]\n\n fn set_key(&mut self, key: ArrayType) {\n\n Self::set_key(self, key)\n\n }\n\n\n\n #[inline]\n\n fn decompose(self) -> (u64, ArrayType, ArrayType, usize, ArrayType) {\n\n Self::decompose(self)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Encode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n", "file_path": "src/tree/tree_branch.rs", "rank": 36, "score": 40494.955279099755 }, { "content": "}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_pickle::to_vec(&self, true)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n", "file_path": "src/tree/tree_leaf.rs", "rank": 37, "score": 40494.511032083385 }, { "content": "#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_cbor::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_yaml::to_vec(&self)?)\n\n }\n", "file_path": "src/tree/tree_leaf.rs", "rank": 38, "score": 40494.26499847452 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serialize(self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n", "file_path": "src/tree/tree_node.rs", "rank": 39, "score": 40494.02809605451 }, { "content": " ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_yaml::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_pickle::to_vec(&self, true)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n", "file_path": "src/tree/tree_node.rs", "rank": 40, "score": 40493.90005223718 }, { "content": "\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_cbor::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_yaml::from_slice(buffer)?)\n", "file_path": "src/tree/tree_branch.rs", "rank": 41, "score": 40493.7942816659 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_pickle::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(ron::de::from_bytes(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_branch.rs", "rank": 42, "score": 40493.7942816659 }, { "content": " ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serialize(self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n let encoded = serde_json::to_string(&self)?;\n\n Ok(encoded.as_bytes().to_vec())\n\n }\n\n}\n\n\n", "file_path": "src/tree/tree_leaf.rs", "rank": 43, "score": 40493.77195278739 }, { "content": " ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(ron::ser::to_string(&self)?.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl From<ron::error::Error> for Exception {\n\n #[inline]\n\n fn from(error: ron::error::Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize + DeserializeOwned,\n", "file_path": "src/tree/tree_branch.rs", "rank": 44, "score": 40493.5473482179 }, { "content": " fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n let encoded = serde_json::to_string(&self)?;\n\n Ok(encoded.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_cbor::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl<ArrayType> Encode for TreeNode<ArrayType>\n\nwhere\n", "file_path": "src/tree/tree_node.rs", "rank": 45, "score": 40493.4377687703 }, { "content": "\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n let decoded_string = String::from_utf8(buffer.to_vec())?;\n\n let decoded = serde_json::from_str(&decoded_string)?;\n\n Ok(decoded)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n", "file_path": "src/tree/tree_node.rs", "rank": 46, "score": 40493.41373791853 }, { "content": " #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n let decoded_string = String::from_utf8(buffer.to_vec())?;\n\n let decoded = serde_json::from_str(&decoded_string)?;\n\n Ok(decoded)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_cbor::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n", "file_path": "src/tree/tree_leaf.rs", "rank": 47, "score": 40493.229451173436 }, { "content": "impl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_yaml::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_pickle::from_slice(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_leaf.rs", "rank": 48, "score": 40493.227643575934 }, { "content": " fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_cbor::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_yaml::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n", "file_path": "src/tree/tree_node.rs", "rank": 49, "score": 40492.92841711381 }, { "content": "{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_pickle::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl<ArrayType> Decode for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(ron::de::from_bytes(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_node.rs", "rank": 50, "score": 40492.894381685684 }, { "content": "{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n let a = deserialize(buffer)?;\n\n Ok(a)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Decode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n let decoded_string = String::from_utf8(buffer.to_vec())?;\n\n let decoded = serde_json::from_str(&decoded_string)?;\n\n Ok(decoded)\n\n }\n\n}\n", "file_path": "src/tree/tree_branch.rs", "rank": 51, "score": 40492.88817222499 }, { "content": "impl<ArrayType> Encode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_yaml::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl From<serde_yaml::Error> for Exception {\n\n #[inline]\n\n fn from(error: serde_yaml::Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl<ArrayType> Encode for TreeBranch<ArrayType>\n", "file_path": "src/tree/tree_branch.rs", "rank": 52, "score": 40492.62570345869 }, { "content": " Ok(ron::ser::to_string(&self)?.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(deserialize(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n", "file_path": "src/tree/tree_leaf.rs", "rank": 53, "score": 40492.27403822287 }, { "content": "\n\n#[cfg(feature = \"use_ron\")]\n\nimpl<ArrayType> Decode for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array + DeserializeOwned,\n\n{\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(ron::de::from_bytes(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_leaf.rs", "rank": 54, "score": 40492.05200740701 }, { "content": " Ok(serialize(self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl From<Box<bincode::ErrorKind>> for Exception {\n\n #[inline]\n\n fn from(error: Box<bincode::ErrorKind>) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl<ArrayType> Encode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n let encoded = serde_json::to_string(&self)?;\n", "file_path": "src/tree/tree_branch.rs", "rank": 55, "score": 40491.88533035594 }, { "content": "where\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_pickle::to_vec(&self, true)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl From<serde_pickle::Error> for Exception {\n\n #[inline]\n\n fn from(error: serde_pickle::Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl<ArrayType> Encode for TreeBranch<ArrayType>\n\nwhere\n", "file_path": "src/tree/tree_branch.rs", "rank": 56, "score": 40491.73219404889 }, { "content": "#[cfg(feature = \"use_serialization\")]\n\nuse crate::traits::{Decode, Encode};\n\nuse crate::tree::tree_branch::TreeBranch;\n\nuse crate::tree::tree_data::TreeData;\n\nuse crate::tree::tree_leaf::TreeLeaf;\n\n#[cfg(feature = \"use_evmap\")]\n\nuse evmap::ShallowCopy;\n\n\n\n/// A node in the tree.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\n#[cfg_attr(any(feature = \"use_serde\"), derive(Serialize, Deserialize))]\n\npub struct TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// The number of references to this node.\n\n pub references: u64,\n\n /// The `NodeVariant` of the node.\n\n pub node: NodeVariant<TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, ArrayType>,\n\n}\n", "file_path": "src/tree/tree_node.rs", "rank": 57, "score": 40491.682042208085 }, { "content": "#[cfg(feature = \"use_cbor\")]\n\nimpl<ArrayType> Encode for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array + Serialize,\n\n{\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_cbor::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl From<serde_cbor::error::Error> for Exception {\n\n #[inline]\n\n fn from(error: serde_cbor::error::Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n", "file_path": "src/tree/tree_branch.rs", "rank": 58, "score": 40491.64205980714 }, { "content": "#[cfg(feature = \"use_serialization\")]\n\nuse crate::traits::{Decode, Encode};\n\n\n\n/// Represents a leaf of the tree. Holds a pointer to the location of the underlying `Data` node.\n\n#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]\n\n#[cfg_attr(feature = \"use_serde\", derive(Serialize, Deserialize))]\n\npub struct TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// The associated key with this node.\n\n key: ArrayType,\n\n /// The location of the `Data` node in the tree.\n\n data: ArrayType,\n\n}\n\n\n\nimpl<ArrayType> TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n", "file_path": "src/tree/tree_leaf.rs", "rank": 59, "score": 40490.754406495646 }, { "content": "#[cfg(feature = \"use_serde\")]\n\nuse crate::merkle_bit::BinaryMerkleTreeResult;\n\nuse crate::traits::{Array, Branch};\n\n#[cfg(feature = \"use_serde\")]\n\nuse crate::traits::{Decode, Encode, Exception};\n\n\n\n/// A struct representing a branch in the tree.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\n#[cfg_attr(any(feature = \"use_serde\"), derive(Serialize, Deserialize))]\n\npub struct TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// The number of leaf nodes under this branch.\n\n count: u64,\n\n /// The location of the next node when traversing the zero branch.\n\n zero: ArrayType,\n\n /// The location of the next node when traversing the one branch.\n\n one: ArrayType,\n\n /// The index bit of the associated key on which to make a decision to go down the zero or one branch.\n", "file_path": "src/tree/tree_branch.rs", "rank": 60, "score": 40488.71173942735 }, { "content": " Ok(serde_pickle::to_vec(&self, true)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(ron::ser::to_string(&self)?.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl Decode for TreeData {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(deserialize(buffer)?)\n\n }\n\n}\n\n\n", "file_path": "src/tree/tree_data.rs", "rank": 61, "score": 40488.55288414132 }, { "content": " fn set_key(&mut self, key: ArrayType) {\n\n Self::set_key(self, key)\n\n }\n\n\n\n /// Sets the location for the `Data` node.\n\n #[inline]\n\n fn set_data(&mut self, data: ArrayType) {\n\n Self::set_data(self, data)\n\n }\n\n\n\n /// Decomposes the struct into its constituent parts.\n\n #[inline]\n\n fn decompose(self) -> (ArrayType, ArrayType) {\n\n Self::decompose(self)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl<ArrayType> Encode for TreeLeaf<ArrayType>\n\nwhere\n", "file_path": "src/tree/tree_leaf.rs", "rank": 62, "score": 40487.16648338736 }, { "content": " /// Creates a new `TreeLeaf`.\n\n #[inline]\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self {\n\n key: ArrayType::default(),\n\n data: ArrayType::default(),\n\n }\n\n }\n\n\n\n /// Gets the associated key with the node.\n\n fn get_key(&self) -> &ArrayType {\n\n &self.key\n\n }\n\n\n\n /// Gets the location of the `Data` node from this node.\n\n fn get_data(&self) -> &ArrayType {\n\n &self.data\n\n }\n\n\n", "file_path": "src/tree/tree_leaf.rs", "rank": 63, "score": 40486.658889087834 }, { "content": "#[cfg(feature = \"use_cbor\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_cbor::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serde_yaml::to_vec(&self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n", "file_path": "src/tree/tree_data.rs", "rank": 64, "score": 40486.599037402615 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n Ok(serialize(self)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl Encode for TreeData {\n\n #[inline]\n\n fn encode(&self) -> BinaryMerkleTreeResult<Vec<u8>> {\n\n let encoded = serde_json::to_string(&self)?;\n\n Ok(encoded.as_bytes().to_vec())\n\n }\n\n}\n\n\n", "file_path": "src/tree/tree_data.rs", "rank": 65, "score": 40485.913192339125 }, { "content": " #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_yaml::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_pickle\")]\n\nimpl Decode for TreeData {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_pickle::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_ron\")]\n\nimpl Decode for TreeData {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(ron::de::from_bytes(buffer)?)\n\n }\n\n}\n", "file_path": "src/tree/tree_data.rs", "rank": 66, "score": 40485.80559022038 }, { "content": "#[cfg(feature = \"use_json\")]\n\nimpl Decode for TreeData {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n let decoded_string = String::from_utf8(buffer.to_vec())?;\n\n let decoded = serde_json::from_str(&decoded_string)?;\n\n Ok(decoded)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_cbor\")]\n\nimpl Decode for TreeData {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> BinaryMerkleTreeResult<Self> {\n\n Ok(serde_cbor::from_slice(buffer)?)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_yaml\")]\n\nimpl Decode for TreeData {\n", "file_path": "src/tree/tree_data.rs", "rank": 67, "score": 40485.54397940814 }, { "content": "\n\nimpl<ArrayType> TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// Creates a new `TreeNode`.\n\n #[inline]\n\n pub fn new(\n\n node_variant: NodeVariant<TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, ArrayType>,\n\n ) -> Self {\n\n Self {\n\n references: 0,\n\n node: node_variant,\n\n }\n\n }\n\n\n\n /// Gets the number of references to the node.\n\n fn get_references(&self) -> u64 {\n\n self.references\n\n }\n", "file_path": "src/tree/tree_node.rs", "rank": 68, "score": 40485.04641092572 }, { "content": "}\n\n\n\nimpl<ArrayType> Node<TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, ArrayType>\n\n for TreeNode<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n #[inline]\n\n fn new(\n\n node_variant: NodeVariant<TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, ArrayType>,\n\n ) -> Self {\n\n Self::new(node_variant)\n\n }\n\n\n\n #[inline]\n\n fn get_references(&self) -> u64 {\n\n Self::get_references(self)\n\n }\n\n #[inline]\n\n fn get_variant(\n", "file_path": "src/tree/tree_node.rs", "rank": 69, "score": 40483.63034900898 }, { "content": " self,\n\n ) -> NodeVariant<TreeBranch<ArrayType>, TreeLeaf<ArrayType>, TreeData, ArrayType> {\n\n self.node\n\n }\n\n\n\n #[inline]\n\n fn set_references(&mut self, references: u64) {\n\n Self::set_references(self, references)\n\n }\n\n #[inline]\n\n fn set_branch(&mut self, branch: TreeBranch<ArrayType>) {\n\n Self::set_branch(self, branch)\n\n }\n\n #[inline]\n\n fn set_leaf(&mut self, leaf: TreeLeaf<ArrayType>) {\n\n Self::set_leaf(self, leaf)\n\n }\n\n #[inline]\n\n fn set_data(&mut self, data: TreeData) {\n\n Self::set_data(self, data)\n", "file_path": "src/tree/tree_node.rs", "rank": 70, "score": 40482.8255106863 }, { "content": " /// Sets the index of the key to split on when deciding which child to traverse.\n\n fn set_split_index(&mut self, split_index: usize) {\n\n self.split_index = split_index;\n\n }\n\n\n\n /// Sets the associated key for this node.\n\n fn set_key(&mut self, key: ArrayType) {\n\n self.key = key;\n\n }\n\n\n\n /// Decomposes the `TreeBranch` into its constituent parts.\n\n fn decompose(self) -> (u64, ArrayType, ArrayType, usize, ArrayType) {\n\n (self.count, self.zero, self.one, self.split_index, self.key)\n\n }\n\n}\n\n\n\nimpl<ArrayType> Branch<ArrayType> for TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n", "file_path": "src/tree/tree_branch.rs", "rank": 71, "score": 40481.8870964392 }, { "content": " }\n\n #[inline]\n\n fn get_key(&self) -> &ArrayType {\n\n Self::get_key(self)\n\n }\n\n\n\n #[inline]\n\n fn set_count(&mut self, count: u64) {\n\n Self::set_count(self, count)\n\n }\n\n #[inline]\n\n fn set_zero(&mut self, zero: ArrayType) {\n\n Self::set_zero(self, zero)\n\n }\n\n #[inline]\n\n fn set_one(&mut self, one: ArrayType) {\n\n Self::set_one(self, one)\n\n }\n\n #[inline]\n\n fn set_split_index(&mut self, index: usize) {\n", "file_path": "src/tree/tree_branch.rs", "rank": 72, "score": 40481.812565303655 }, { "content": " split_index: usize,\n\n /// The associated key with this branch.\n\n key: ArrayType,\n\n}\n\n\n\nimpl<ArrayType> TreeBranch<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n\n /// Create a new `TreeBranch`\n\n fn new() -> Self {\n\n Self {\n\n count: 0,\n\n zero: ArrayType::default(),\n\n one: ArrayType::default(),\n\n split_index: 0,\n\n key: ArrayType::default(),\n\n }\n\n }\n\n\n", "file_path": "src/tree/tree_branch.rs", "rank": 73, "score": 40481.796990538896 }, { "content": " #[inline]\n\n fn new() -> Self {\n\n Self::new()\n\n }\n\n\n\n #[inline]\n\n fn get_count(&self) -> u64 {\n\n Self::get_count(self)\n\n }\n\n #[inline]\n\n fn get_zero(&self) -> &ArrayType {\n\n Self::get_zero(self)\n\n }\n\n #[inline]\n\n fn get_one(&self) -> &ArrayType {\n\n Self::get_one(self)\n\n }\n\n #[inline]\n\n fn get_split_index(&self) -> usize {\n\n Self::get_split_index(self)\n", "file_path": "src/tree/tree_branch.rs", "rank": 74, "score": 40481.60437793696 }, { "content": " /// Creates a new `TreeLeaf`\n\n #[inline]\n\n fn new() -> Self {\n\n Self::new()\n\n }\n\n\n\n /// Gets the associated key with this node.\n\n #[inline]\n\n fn get_key(&self) -> &ArrayType {\n\n Self::get_key(self)\n\n }\n\n\n\n /// Gets the location of the `Data` node.\n\n #[inline]\n\n fn get_data(&self) -> &ArrayType {\n\n Self::get_data(self)\n\n }\n\n\n\n /// Sets the associated key with this node.\n\n #[inline]\n", "file_path": "src/tree/tree_leaf.rs", "rank": 75, "score": 40480.698523710504 }, { "content": "\n\n/// `TreeData` represents the data to be stored in the tree for a given key.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\n#[cfg_attr(any(feature = \"use_serde\"), derive(Serialize, Deserialize))]\n\npub struct TreeData {\n\n /// The value to be stored in the tree.\n\n value: Vec<u8>,\n\n}\n\n\n\nimpl TreeData {\n\n /// Creates a new `TreeData` node.\n\n fn new() -> Self {\n\n Self { value: vec![] }\n\n }\n\n\n\n /// Gets the value for this node.\n\n fn get_value(&self) -> &[u8] {\n\n &self.value\n\n }\n\n\n", "file_path": "src/tree/tree_data.rs", "rank": 76, "score": 40478.814467338954 }, { "content": " /// Sets the associated key with the node.\n\n fn set_key(&mut self, key: ArrayType) {\n\n self.key = key;\n\n }\n\n\n\n /// Sets the location of the `Data` node.\n\n fn set_data(&mut self, data: ArrayType) {\n\n self.data = data;\n\n }\n\n\n\n /// Decomposes the `TreeLeaf` into its constituent parts.\n\n fn decompose(self) -> (ArrayType, ArrayType) {\n\n (self.key, self.data)\n\n }\n\n}\n\n\n\nimpl<ArrayType> Leaf<ArrayType> for TreeLeaf<ArrayType>\n\nwhere\n\n ArrayType: Array,\n\n{\n", "file_path": "src/tree/tree_leaf.rs", "rank": 77, "score": 40478.55610382708 }, { "content": "#[cfg(feature = \"use_bincode\")]\n\nuse bincode::{deserialize, serialize};\n\n#[cfg(feature = \"use_ron\")]\n\nuse ron;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"use_cbor\")]\n\nuse serde_cbor;\n\n#[cfg(feature = \"use_json\")]\n\nuse serde_json;\n\n#[cfg(feature = \"use_pickle\")]\n\nuse serde_pickle;\n\n#[cfg(feature = \"use_yaml\")]\n\nuse serde_yaml;\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nuse crate::merkle_bit::BinaryMerkleTreeResult;\n\nuse crate::traits::Data;\n\n#[cfg(feature = \"use_serde\")]\n\nuse crate::traits::{Decode, Encode};\n", "file_path": "src/tree/tree_data.rs", "rank": 78, "score": 40477.67297179301 }, { "content": " /// Sets the value for this node.\n\n fn set_value(&mut self, value: Vec<u8>) {\n\n self.value = value\n\n }\n\n}\n\n\n\nimpl Data for TreeData {\n\n #[inline]\n\n fn new() -> Self {\n\n Self::new()\n\n }\n\n\n\n #[inline]\n\n fn get_value(&self) -> &[u8] {\n\n Self::get_value(self)\n\n }\n\n\n\n #[inline]\n\n fn set_value(&mut self, value: &[u8]) {\n\n Self::set_value(self, value.to_vec())\n", "file_path": "src/tree/tree_data.rs", "rank": 79, "score": 40476.85166841665 }, { "content": " Ok(encoded.as_bytes().to_vec())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl From<serde_json::Error> for Exception {\n\n #[inline]\n\n fn from(error: serde_json::Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"use_json\")]\n\nimpl From<FromUtf8Error> for Exception {\n\n #[inline]\n\n fn from(error: FromUtf8Error) -> Self {\n\n Self::new(&error.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/tree/tree_branch.rs", "rank": 80, "score": 40476.52766543445 }, { "content": " /// Get the count of leaf nodes under this branch.\n\n fn get_count(&self) -> u64 {\n\n self.count\n\n }\n\n\n\n /// Get the location of the next node when going down the zero side.\n\n fn get_zero(&self) -> &ArrayType {\n\n &self.zero\n\n }\n\n\n\n /// Get the location of the next node when going down the one side.\n\n fn get_one(&self) -> &ArrayType {\n\n &self.one\n\n }\n\n\n\n /// Get the index to split on when deciding which child to traverse.\n\n fn get_split_index(&self) -> usize {\n\n self.split_index\n\n }\n\n\n", "file_path": "src/tree/tree_branch.rs", "rank": 81, "score": 40476.17488765346 }, { "content": "\n\n /// Sets the number of references to the node.\n\n fn set_references(&mut self, references: u64) {\n\n self.references = references;\n\n }\n\n\n\n /// Sets the node as a `NodeVariant::Branch`.\n\n fn set_branch(&mut self, branch: TreeBranch<ArrayType>) {\n\n self.node = NodeVariant::Branch(branch);\n\n }\n\n\n\n /// Sets the node as a `NodeVariant::Leaf`.\n\n fn set_leaf(&mut self, leaf: TreeLeaf<ArrayType>) {\n\n self.node = NodeVariant::Leaf(leaf);\n\n }\n\n\n\n /// Sets the node as a `NodeVariant::Data`.\n\n fn set_data(&mut self, data: TreeData) {\n\n self.node = NodeVariant::Data(data);\n\n }\n", "file_path": "src/tree/tree_node.rs", "rank": 82, "score": 40475.958613646464 }, { "content": "#[cfg(feature = \"use_bincode\")]\n\nuse bincode::{deserialize, serialize};\n\n#[cfg(feature = \"use_ron\")]\n\nuse ron;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::de::DeserializeOwned;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"use_cbor\")]\n\nuse serde_cbor;\n\n#[cfg(feature = \"use_json\")]\n\nuse serde_json;\n\n#[cfg(feature = \"use_pickle\")]\n\nuse serde_pickle;\n\n#[cfg(feature = \"use_yaml\")]\n\nuse serde_yaml;\n\n\n\n#[cfg(feature = \"use_serialization\")]\n\nuse crate::merkle_bit::BinaryMerkleTreeResult;\n\nuse crate::traits::{Array, Leaf};\n", "file_path": "src/tree/tree_leaf.rs", "rank": 83, "score": 40474.9382383843 }, { "content": "#[cfg(feature = \"use_bincode\")]\n\nuse bincode::{deserialize, serialize};\n\n#[cfg(feature = \"use_ron\")]\n\nuse ron;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::de::DeserializeOwned;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"use_cbor\")]\n\nuse serde_cbor;\n\n#[cfg(feature = \"use_json\")]\n\nuse serde_json;\n\n#[cfg(feature = \"use_pickle\")]\n\nuse serde_pickle;\n\n#[cfg(feature = \"use_yaml\")]\n\nuse serde_yaml;\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nuse crate::merkle_bit::BinaryMerkleTreeResult;\n\nuse crate::traits::{Array, Node, NodeVariant};\n", "file_path": "src/tree/tree_node.rs", "rank": 84, "score": 40474.88315755137 }, { "content": " /// Get the associated key with this branch.\n\n fn get_key(&self) -> &ArrayType {\n\n &self.key\n\n }\n\n\n\n /// Set the number of leaf nodes under this branch.\n\n fn set_count(&mut self, count: u64) {\n\n self.count = count;\n\n }\n\n\n\n /// Set the location of the next node to traverse when going down the zero side.\n\n fn set_zero(&mut self, zero: ArrayType) {\n\n self.zero = zero;\n\n }\n\n\n\n /// Set the location of the next node to traverse when going down the one side.\n\n fn set_one(&mut self, one: ArrayType) {\n\n self.one = one;\n\n }\n\n\n", "file_path": "src/tree/tree_branch.rs", "rank": 85, "score": 40473.81657496472 }, { "content": "#[cfg(feature = \"use_json\")]\n\nuse std::string::FromUtf8Error;\n\n\n\n#[cfg(feature = \"use_bincode\")]\n\nuse bincode::{deserialize, serialize};\n\n#[cfg(feature = \"use_ron\")]\n\nuse ron;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::de::DeserializeOwned;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n#[cfg(feature = \"use_cbor\")]\n\nuse serde_cbor;\n\n#[cfg(feature = \"use_json\")]\n\nuse serde_json;\n\n#[cfg(feature = \"use_pickle\")]\n\nuse serde_pickle;\n\n#[cfg(feature = \"use_yaml\")]\n\nuse serde_yaml;\n\n\n", "file_path": "src/tree/tree_branch.rs", "rank": 86, "score": 40470.49216288221 }, { "content": "where\n\n ArrayType: Array + Serialize + DeserializeOwned,\n\n ValueType: Encode + Decode,\n\n{\n\n #[inline]\n\n pub fn open(path: &PathBuf, depth: usize) -> BinaryMerkleTreeResult<Self> {\n\n let db = RocksDB::open(path)?;\n\n let tree = MerkleBIT::from_db(db, depth)?;\n\n Ok(Self { tree })\n\n }\n\n\n\n #[inline]\n\n pub fn from_db(db: RocksDB<ArrayType>, depth: usize) -> BinaryMerkleTreeResult<Self> {\n\n let tree = MerkleBIT::from_db(db, depth)?;\n\n Ok(Self { tree })\n\n }\n\n\n\n #[inline]\n\n pub fn get(\n\n &self,\n", "file_path": "src/rocks_tree.rs", "rank": 87, "score": 33541.45521576359 }, { "content": " root_hash: &ArrayType,\n\n keys: &mut [ArrayType],\n\n ) -> BinaryMerkleTreeResult<HashMap<ArrayType, Option<ValueType>>> {\n\n self.tree.get(root_hash, keys)\n\n }\n\n\n\n #[inline]\n\n pub fn get_one(\n\n &self,\n\n root: &ArrayType,\n\n key: &ArrayType,\n\n ) -> BinaryMerkleTreeResult<Option<ValueType>> {\n\n self.tree.get_one(&root, &key)\n\n }\n\n\n\n #[inline]\n\n pub fn insert(\n\n &mut self,\n\n previous_root: Option<&ArrayType>,\n\n keys: &mut [ArrayType],\n", "file_path": "src/rocks_tree.rs", "rank": 90, "score": 33524.35364949943 }, { "content": " #[inline]\n\n pub fn generate_inclusion_proof(\n\n &self,\n\n root: &ArrayType,\n\n key: ArrayType,\n\n ) -> BinaryMerkleTreeResult<Vec<(ArrayType, bool)>> {\n\n self.tree.generate_inclusion_proof(root, key)\n\n }\n\n\n\n #[inline]\n\n pub fn verify_inclusion_proof(\n\n root: &ArrayType,\n\n key: ArrayType,\n\n value: &ValueType,\n\n proof: &Vec<(ArrayType, bool)>,\n\n ) -> BinaryMerkleTreeResult<()> {\n\n Tree::verify_inclusion_proof(root, key, value, proof)\n\n }\n\n}\n", "file_path": "src/rocks_tree.rs", "rank": 91, "score": 33524.27911378449 }, { "content": " values: &[ValueType],\n\n ) -> BinaryMerkleTreeResult<ArrayType> {\n\n self.tree.insert(previous_root, keys, values)\n\n }\n\n\n\n #[inline]\n\n pub fn insert_one(\n\n &mut self,\n\n previous_root: Option<&ArrayType>,\n\n key: &ArrayType,\n\n value: &ValueType,\n\n ) -> BinaryMerkleTreeResult<ArrayType> {\n\n self.tree.insert_one(previous_root, key, value)\n\n }\n\n\n\n #[inline]\n\n pub fn remove(&mut self, root_hash: &ArrayType) -> BinaryMerkleTreeResult<()> {\n\n self.tree.remove(root_hash)\n\n }\n\n\n", "file_path": "src/rocks_tree.rs", "rank": 92, "score": 33524.184350695374 }, { "content": "#[cfg(not(any(feature = \"use_hashbrown\")))]\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\n\n\n#[cfg(feature = \"use_hashbrown\")]\n\nuse hashbrown::HashMap;\n\n\n\nuse crate::merkle_bit::{BinaryMerkleTreeResult, MerkleBIT};\n\nuse crate::traits::{Array, Database, Decode, Encode};\n\nuse crate::tree::tree_branch::TreeBranch;\n\nuse crate::tree::tree_data::TreeData;\n\nuse crate::tree::tree_leaf::TreeLeaf;\n\nuse crate::tree::tree_node::TreeNode;\n\nuse crate::tree_db::rocksdb::RocksDB;\n\nuse crate::tree_hasher::TreeHasher;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::de::DeserializeOwned;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::Serialize;\n\n\n\n/// Internal type alias for the underlying tree.\n", "file_path": "src/rocks_tree.rs", "rank": 97, "score": 33522.27417590878 }, { "content": "/// Holds the `TreeBranch` struct.\n\npub mod tree_branch;\n\n/// Holds the `TreeData` struct.\n\npub mod tree_data;\n\n/// Holds the `TreeLeaf` struct.\n\npub mod tree_leaf;\n\n/// Holds the `TreeNode` struct.\n\npub mod tree_node;\n", "file_path": "src/tree/mod.rs", "rank": 99, "score": 33512.673517146024 } ]
Rust
src/util/coalesce.rs
tikue/inverted_index
c912e7c982116591ebd88f2e146f8aa5f7814bd6
use std::collections::BTreeMap; use std::collections::btree_map::Entry::*; use std::iter::FromIterator; pub trait Merge: Ord + Copy { fn merge(self, other: Self) -> Option<Self>; } pub trait Coalesce: Sized + IntoIterator where Self::Item: Ord + Copy + Merge { fn coalesce(&mut self, index: usize, el: Self::Item); fn search_coalesce(&mut self, start: usize, el: Self::Item) -> usize; fn merge_coalesce<Iter>(&mut self, other: Iter) where Iter: IntoIterator<Item = Self::Item> { let mut idx = 0; for element in other { idx = self.search_coalesce(idx, element); } } } impl<T: Ord + Copy + Merge> Coalesce for Vec<T> { fn coalesce(&mut self, index: usize, el: T) { let merge = T::merge; if self.is_empty() { self.insert(index, el); } else if index == 0 { if let Some(coalesced) = merge(el, self[0]) { self[index] = coalesced; } else { self.insert(index, el); } } else if index == self.len() { if let Some(coalesced) = merge(self[index - 1], el) { self[index - 1] = coalesced; } else { self.insert(index, el); } } else { if let Some(coalesced) = merge(self[index - 1], el) { self[index - 1] = coalesced; if let Some(coalesced) = merge(self[index - 1], self[index]) { self[index - 1] = coalesced; self.remove(index); } } else if let Some(coalesced) = merge(el, self[index]) { self[index] = coalesced; } else { self.insert(index, el); } } } fn search_coalesce(&mut self, start: usize, el: T) -> usize { match self[start..].binary_search(&el) { Ok(idx) => start + idx, Err(idx) => { let idx = start + idx; self.coalesce(idx, el); idx } } } } pub struct MergeCoalesceMap<K, V>(pub BTreeMap<K, V>); impl<'a, 'b, K, V> FromIterator<(&'a K, &'b V)> for MergeCoalesceMap<K, V> where K: 'a + Ord + Clone, V: 'b + Coalesce + Clone, V::Item: Ord + Copy + Merge { fn from_iter<It>(iterator: It) -> Self where It: IntoIterator<Item = (&'a K, &'b V)> { let mut map = BTreeMap::new(); for (k, v) in iterator { match map.entry(k.clone()) { Vacant(entry) => { entry.insert(v.clone()); } Occupied(mut entry) => entry.get_mut().merge_coalesce(v.clone()), } } MergeCoalesceMap(map) } } impl<K, V> FromIterator<(K, V)> for MergeCoalesceMap<K, V> where K: Ord + Clone, V: Coalesce + Clone, V::Item: Ord + Copy + Merge { fn from_iter<It>(iterator: It) -> Self where It: IntoIterator<Item = (K, V)> { let mut map = BTreeMap::new(); for (k, v) in iterator { match map.entry(k) { Vacant(entry) => { entry.insert(v); } Occupied(mut entry) => entry.get_mut().merge_coalesce(v.into_iter()), } } MergeCoalesceMap(map) } } macro_rules! impl_merge_tuples { ($tp:ident) => ( impl Merge for ($tp, $tp) { fn merge(self, (begin2, end2): ($tp, $tp)) -> Option<($tp, $tp)> { let (begin1, end1) = self; assert!(begin2 >= begin1, "Input's begin must be >= self's begin"); if end1 >= begin2 { Some(if end1 < end2 { (begin1, end2) } else { (begin1, end1) }) } else { None } } } ) } impl_merge_tuples!(isize); impl_merge_tuples!(usize); impl_merge_tuples!(u32); impl_merge_tuples!(u16); impl_merge_tuples!(u8); impl_merge_tuples!(i32); impl_merge_tuples!(i16); impl_merge_tuples!(i8); #[test] fn test_coalesce_empty() { let mut v = vec![]; v.coalesce(0, (0, 1)); assert_eq!(v, [(0, 1)]); } #[test] fn test_coalesce_first() { let mut v = vec![(1, 1)]; v.coalesce(0, (0, 1)); assert_eq!(v, [(0, 1)]) } #[test] fn test_coalesce_last() { let mut v = vec![(1, 1)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(1, 2)]) } #[test] fn test_coalesce_both() { let mut v = vec![(1, 1), (2, 2)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(1, 2)]) } #[test] fn test_coalesce_none() { let mut v = vec![(1, 1), (3, 3)]; v.coalesce(1, (2, 2)); assert_eq!(v, [(1, 1), (2, 2), (3, 3)]) } #[test] fn test_coalesce_twice() { let mut v = vec![]; v.coalesce(0, (0, 1)); v.coalesce(0, (-2, -1)); v.coalesce(1, (-1, 0)); assert_eq!(v, [(-2, 1)]); } #[test] fn test_search_and_coalesce() { let mut v = vec![]; for el in vec![(0, 1), (-2, -1), (-1, 0)] { let index = v.binary_search(&el).err().unwrap(); v.coalesce(index, el); } assert_eq!(v, [(-2, 1)]); } #[test] fn test_coalesce_subrange() { let mut v = vec![(0, 3)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(0, 3)]); } #[test] fn test_search_coalesce() { let mut v = vec![(0, 1), (2, 3), (4, 5), (6, 7)]; assert_eq!(2, v.search_coalesce(1, (4, 5))); } #[test] fn test_search_coalesce_2() { let mut v = vec![(0, 1), (2, 3), (4, 5), (6, 7)]; assert_eq!(3, v.search_coalesce(1, (5, 6))); assert_eq!(v, [(0, 1), (2, 3), (4, 7)]); }
use std::collections::BTreeMap; use std::collections::btree_map::Entry::*; use std::iter::FromIterator; pub trait Merge: Ord + Copy { fn merge(self, other: Self) -> Option<Self>; } pub trait Coalesce: Sized + IntoIterator where Self::Item: Ord + Copy + Merge { fn coalesce(&mut self, index: usize, el: Self::Item); fn search_coalesce(&mut self, start: usize, el: Self::Item) -> usize; fn merge_coalesce<Iter>(&mut self, other: Iter) where Iter: IntoIterator<Item = Self::Item> { let mut idx = 0; for element in other { idx = self.search_coalesce(idx, element); } } } impl<T: Ord + Copy + Merge> Coalesce for Vec<T> { fn coalesce(&mut self, index: usize, el: T) { let merge = T::merge; if self.is_empty() { self.insert(index, el); } else if index == 0 { if let Some(coalesced) = merge(el, self[0]) { self[index] = coalesced; } else { self.insert(index, el); } } else if index == self.len() { if let Some(coalesced) = merge(self[index - 1], el) { self[index - 1] = coalesced; } else { self.insert(index, el); } } else { if let Some(coalesced) = merge(self[index - 1], el) { self[index - 1] = coalesced; if let Some(coalesced) = merge(self[index - 1], self[index]) { self[index - 1] = coalesced; self.remove(index); } } else if let Some(coalesced) = merge(el, self[index]) { self[index] = coalesced; } else { self.insert(index, el); } } } fn search_coalesce(&mut self, start: usize, el: T) -> usize { match se
} pub struct MergeCoalesceMap<K, V>(pub BTreeMap<K, V>); impl<'a, 'b, K, V> FromIterator<(&'a K, &'b V)> for MergeCoalesceMap<K, V> where K: 'a + Ord + Clone, V: 'b + Coalesce + Clone, V::Item: Ord + Copy + Merge { fn from_iter<It>(iterator: It) -> Self where It: IntoIterator<Item = (&'a K, &'b V)> { let mut map = BTreeMap::new(); for (k, v) in iterator { match map.entry(k.clone()) { Vacant(entry) => { entry.insert(v.clone()); } Occupied(mut entry) => entry.get_mut().merge_coalesce(v.clone()), } } MergeCoalesceMap(map) } } impl<K, V> FromIterator<(K, V)> for MergeCoalesceMap<K, V> where K: Ord + Clone, V: Coalesce + Clone, V::Item: Ord + Copy + Merge { fn from_iter<It>(iterator: It) -> Self where It: IntoIterator<Item = (K, V)> { let mut map = BTreeMap::new(); for (k, v) in iterator { match map.entry(k) { Vacant(entry) => { entry.insert(v); } Occupied(mut entry) => entry.get_mut().merge_coalesce(v.into_iter()), } } MergeCoalesceMap(map) } } macro_rules! impl_merge_tuples { ($tp:ident) => ( impl Merge for ($tp, $tp) { fn merge(self, (begin2, end2): ($tp, $tp)) -> Option<($tp, $tp)> { let (begin1, end1) = self; assert!(begin2 >= begin1, "Input's begin must be >= self's begin"); if end1 >= begin2 { Some(if end1 < end2 { (begin1, end2) } else { (begin1, end1) }) } else { None } } } ) } impl_merge_tuples!(isize); impl_merge_tuples!(usize); impl_merge_tuples!(u32); impl_merge_tuples!(u16); impl_merge_tuples!(u8); impl_merge_tuples!(i32); impl_merge_tuples!(i16); impl_merge_tuples!(i8); #[test] fn test_coalesce_empty() { let mut v = vec![]; v.coalesce(0, (0, 1)); assert_eq!(v, [(0, 1)]); } #[test] fn test_coalesce_first() { let mut v = vec![(1, 1)]; v.coalesce(0, (0, 1)); assert_eq!(v, [(0, 1)]) } #[test] fn test_coalesce_last() { let mut v = vec![(1, 1)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(1, 2)]) } #[test] fn test_coalesce_both() { let mut v = vec![(1, 1), (2, 2)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(1, 2)]) } #[test] fn test_coalesce_none() { let mut v = vec![(1, 1), (3, 3)]; v.coalesce(1, (2, 2)); assert_eq!(v, [(1, 1), (2, 2), (3, 3)]) } #[test] fn test_coalesce_twice() { let mut v = vec![]; v.coalesce(0, (0, 1)); v.coalesce(0, (-2, -1)); v.coalesce(1, (-1, 0)); assert_eq!(v, [(-2, 1)]); } #[test] fn test_search_and_coalesce() { let mut v = vec![]; for el in vec![(0, 1), (-2, -1), (-1, 0)] { let index = v.binary_search(&el).err().unwrap(); v.coalesce(index, el); } assert_eq!(v, [(-2, 1)]); } #[test] fn test_coalesce_subrange() { let mut v = vec![(0, 3)]; v.coalesce(1, (1, 2)); assert_eq!(v, [(0, 3)]); } #[test] fn test_search_coalesce() { let mut v = vec![(0, 1), (2, 3), (4, 5), (6, 7)]; assert_eq!(2, v.search_coalesce(1, (4, 5))); } #[test] fn test_search_coalesce_2() { let mut v = vec![(0, 1), (2, 3), (4, 5), (6, 7)]; assert_eq!(3, v.search_coalesce(1, (5, 6))); assert_eq!(v, [(0, 1), (2, 3), (4, 7)]); }
lf[start..].binary_search(&el) { Ok(idx) => start + idx, Err(idx) => { let idx = start + idx; self.coalesce(idx, el); idx } } }
function_block-function_prefixed
[ { "content": "/// A trait for types whose values have well-defined successors.\n\npub trait Successor: Sized {\n\n /// Returns the successor to self, if any exists.\n\n fn successor(&self) -> Option<Self>;\n\n}\n\n\n\nimpl Successor for char {\n\n #[inline]\n\n // Implementation lifted from https://github.com/huonw/char-iter/blob/master/src/lib.rs#L77\n\n fn successor(&self) -> Option<char> {\n\n const SUR_START: u32 = 0xD800;\n\n const SUR_END: u32 = 0xDFFF;\n\n const BEFORE_SUR: u32 = SUR_START - 1;\n\n const AFTER_SUR: u32 = SUR_END + 1;\n\n let val = *self as u32;\n\n char::from_u32(if val == BEFORE_SUR {\n\n AFTER_SUR\n\n } else {\n\n val + 1\n\n })\n\n }\n\n}\n", "file_path": "src/util/successor.rs", "rank": 2, "score": 83636.30312801422 }, { "content": "#[inline]\n\npub fn encode_utf8(character: char, dst: &mut [u8]) -> Option<usize> {\n\n let code = character as u32;\n\n if code < MAX_ONE_B && !dst.is_empty() {\n\n dst[0] = code as u8;\n\n Some(1)\n\n } else if code < MAX_TWO_B && dst.len() >= 2 {\n\n dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;\n\n dst[1] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(2)\n\n } else if code < MAX_THREE_B && dst.len() >= 3 {\n\n dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;\n\n dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(3)\n\n } else if dst.len() >= 4 {\n\n dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;\n\n dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[3] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(4)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n/// Converts a sequence of bytes into a char, if possible. Also returns the byte len of the char.\n", "file_path": "src/util/char_utf8.rs", "rank": 3, "score": 77004.14852115023 }, { "content": "/// A type that can output a sequence of tokens\n\npub trait Tokenizer {\n\n /// Returns the next token read from the input.\n\n fn read(&mut self, tok: &mut Token) -> io::Result<bool>;\n\n\n\n /// Returns the tokenizer output as an iterator.\n\n fn into_iter(self) -> Iter<Self>\n\n where Self: Sized\n\n {\n\n Iter {\n\n tokenizer: self,\n\n err: false,\n\n }\n\n }\n\n}\n\n\n\n/// Iterator over a tokenizer's output.\n\npub struct Iter<Tknzr> {\n\n tokenizer: Tknzr,\n\n err: bool,\n\n}\n", "file_path": "src/tokenizers.rs", "rank": 4, "score": 71347.95745575921 }, { "content": "/// An extension trait for positionally intersecting two types. A positional intersection is\n\n/// broadly defined as an intersection in which each element returned is close to an element\n\n/// not in its own set.\n\npub trait PositionalIntersect {\n\n /// The return type of the positional intersection. Typically this will be the same type\n\n /// as the inputs, but for some cases it needs to be different, e.g. when the inputs are slices\n\n /// then the output will be an owned vec.\n\n type Intersection;\n\n\n\n /// Intersect positionally, returning an Intersection\n\n /// whose terms are present at position X in self's postings list for document D\n\n /// and position X + delta (for some delta) in the input's postings list for document D.\n\n fn intersect_positionally(&self, &Self) -> Self::Intersection;\n\n}\n\n\n\nimpl PositionalIntersect for [Position] {\n\n type Intersection = Vec<Position>;\n\n\n\n fn intersect_positionally(&self, other: &[Position]) -> Vec<Position> {\n\n let mut intersection = vec![];\n\n let mut this = self.iter().cloned();\n\n let mut other = other.iter().cloned();\n\n let mut lval = this.next();\n", "file_path": "src/postings.rs", "rank": 5, "score": 68902.13546619384 }, { "content": "/// An extension trait for slices of `PostingsMap`s\n\n/// that enables computing their intersection.\n\npub trait PostingsIntersect {\n\n /// Computes the map containing the intersection of the the maps in self\n\n fn intersect_postings(self) -> PostingsMap;\n\n}\n\n\n\nimpl<'a> PostingsIntersect for &'a [PostingsMap] {\n\n fn intersect_postings(self) -> PostingsMap {\n\n match self {\n\n [] => PostingsMap::new(),\n\n [ref posting] => posting.clone(),\n\n [ref posting0, rest..] => {\n\n self.intersection()\n\n .map(|doc_id| {\n\n let mut positions = posting0[doc_id].clone();\n\n for posting in rest {\n\n positions.merge_coalesce(posting[doc_id].iter().cloned());\n\n }\n\n (doc_id.clone(), positions)\n\n })\n\n .collect()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/postings.rs", "rank": 6, "score": 68899.42020711186 }, { "content": "#[inline]\n\npub fn utf8_char_width(b: u8) -> usize {\n\n UTF8_CHAR_WIDTH[b as usize] as usize\n\n}\n", "file_path": "src/util/char_utf8.rs", "rank": 7, "score": 65990.71699740624 }, { "content": "/// An extension trait for slices of BTreeMaps that enables\n\n/// computing intersections\n\npub trait BTreeMapExt {\n\n /// The type of the map's keys.\n\n type Key;\n\n\n\n /// The type of the map's keys iterator.\n\n type Iter: Iterator<Item=Self::Key>;\n\n\n\n /// Visits the values representing the intersection, in ascending order.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::collections::BTreeMap;\n\n /// use inverted_index::util::BTreeMapExt;\n\n ///\n\n /// let mut a = BTreeMap::new();\n\n /// a.insert(1, ());\n\n /// a.insert(2, ());\n\n ///\n\n /// let mut b = BTreeMap::new();\n", "file_path": "src/util/btree_map_ext.rs", "rank": 8, "score": 59874.7688555127 }, { "content": "#[inline]\n\npub fn decode_utf8(src: &[u8]) -> Option<(usize, char)> {\n\n if src.len() == 0 {\n\n return None;\n\n }\n\n match utf8_char_width(src[0]) {\n\n 0 => None,\n\n 1 => Some((1, src[0] as char)),\n\n n if src.len() < n => None,\n\n n => {\n\n match str::from_utf8(&src[0..n]) {\n\n Ok(s) => Some((n, s.chars().next().unwrap())),\n\n Err(_) => None,\n\n }\n\n }\n\n }\n\n}\n\n\n\n// https://tools.ietf.org/html/rfc3629\n\nstatic UTF8_CHAR_WIDTH: [u8; 256] = [\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n", "file_path": "src/util/char_utf8.rs", "rank": 10, "score": 59472.07424029314 }, { "content": "/// Creates a lowercase-ngrams tokenizer by chaining two filters.\n\npub fn lowercase_ngrams<B>(bytes: B)\n\n -> LowercaseFilter<NgramsFilter<EnglishUtf8<io::Cursor<Vec<u8>>>>>\n\n where B: Into<Vec<u8>>\n\n{\n\n LowercaseFilter::after_tokenizer(NgramsFilter::from_bytes(bytes))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io;\n\n use super::{Tokenizer, Token, EnglishUtf8};\n\n\n\n fn collect<T: Tokenizer>(tokenizer: T) -> Vec<Token> {\n\n tokenizer.into_iter().collect::<Result<Vec<_>, _>>().unwrap()\n\n }\n\n\n\n #[test]\n\n fn tiny_buffer() {\n\n let bytes = &b\"Hi, Dave! How are you?\"[..];\n\n let buf = io::BufReader::with_capacity(1, bytes);\n\n let toks = collect(EnglishUtf8::new(buf));\n\n assert_eq!(toks,\n\n vec![Token::new(\"Hi\", (0, 2), 0),\n\n Token::new(\"Dave\", (4, 8), 1),\n\n Token::new(\"How\", (10, 13), 2),\n\n Token::new(\"are\", (14, 17), 3),\n\n Token::new(\"you\", (18, 21), 4)]);\n\n }\n\n}\n", "file_path": "src/tokenizers.rs", "rank": 20, "score": 52756.18176251506 }, { "content": "#[test]\n\nfn test_intersection_last_min() {\n\n let mut map1 = BTreeMap::new();\n\n map1.insert(2, ());\n\n map1.insert(3, ());\n\n map1.insert(4, ());\n\n map1.insert(5, ());\n\n let mut map2 = BTreeMap::new();\n\n map2.insert(2, ());\n\n map2.insert(3, ());\n\n map2.insert(4, ());\n\n let mut map3 = BTreeMap::new();\n\n map3.insert(1, ());\n\n map3.insert(2, ());\n\n map3.insert(3, ());\n\n let maps = vec![map1, map2, map3];\n\n let maps = &*maps;\n\n let intersection: Vec<_> = maps.intersection().collect();\n\n assert_eq!(intersection, vec![&2, &3]);\n\n}\n", "file_path": "src/util/btree_map_ext.rs", "rank": 21, "score": 28925.12525150395 }, { "content": "#[test]\n\nfn test_intersection_first_min() {\n\n let mut map1 = BTreeMap::new();\n\n map1.insert(1, ());\n\n map1.insert(2, ());\n\n map1.insert(3, ());\n\n map1.insert(4, ());\n\n let mut map2 = BTreeMap::new();\n\n map2.insert(2, ());\n\n map2.insert(3, ());\n\n map2.insert(4, ());\n\n let mut map3 = BTreeMap::new();\n\n map3.insert(1, ());\n\n map3.insert(2, ());\n\n map3.insert(3, ());\n\n let maps = vec![map1, map2, map3];\n\n let maps = &*maps;\n\n let intersection: Vec<_> = maps.intersection().collect();\n\n assert_eq!(intersection, vec![&2, &3]);\n\n}\n\n\n", "file_path": "src/util/btree_map_ext.rs", "rank": 22, "score": 28925.12525150395 }, { "content": "use std::collections::Bound::{Included, Excluded, Unbounded};\n\nuse std::collections::BTreeMap;\n\nuse std::hash::Hasher;\n\n\n\nuse itertools::Itertools;\n\n\n\nuse Query::*;\n\nuse super::*;\n\nuse util::*;\n\n\n\n/// A basic implementation of an `Index`, the inverted index is a data structure that maps\n\n/// from words to postings.\n\n#[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd, \n\n RustcEncodable, RustcDecodable)]\n\npub struct InvertedIndex {\n\n // Maps terms to their postings\n\n index: BTreeMap<String, PostingsMap>,\n\n // Maps doc ids to their docs\n\n docs: BTreeMap<usize, Document>,\n\n}\n", "file_path": "src/index.rs", "rank": 23, "score": 23750.676283832097 }, { "content": " use Query::*;\n\n use Document;\n\n use InvertedIndex;\n\n use Position;\n\n use SearchResult;\n\n use std::collections::BTreeMap;\n\n\n\n #[test]\n\n fn ngrams() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"learn to program in rust today\");\n\n let doc2 = Document::new(2, \"what did you today do\");\n\n index.index(doc1.clone());\n\n index.index(doc2.clone());\n\n let search_results = index.search(\"to\");\n\n let expected: BTreeMap<_, _> =\n\n [(doc1.id.clone(),\n\n vec![Position::new((6, 8), 1), Position::new((25, 27), 5)]),\n\n (doc2.id.clone(), vec![Position::new((13, 15), 3)])]\n\n .iter()\n", "file_path": "src/index.rs", "rank": 24, "score": 23749.52078109046 }, { "content": " assert_eq!(search_results[0].positions, vec![Position::new((0, 2), 0)]);\n\n }\n\n\n\n #[test]\n\n fn and() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"learn to program in rust today\");\n\n let doc2 = Document::new(2, \"what did you today do\");\n\n let doc3 = Document::new(3, \"what did you do yesterday\");\n\n index.index(doc1.clone());\n\n index.index(doc2.clone());\n\n index.index(doc3.clone());\n\n let search_results = index.query(&And(&[Match(\"today\"), Match(\"you\")]));\n\n let expected: BTreeMap<_, _> = [(doc2,\n\n vec![Position::new((9, 12), 2),\n\n Position::new((13, 18), 3)])]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n assert_eq!(search_results.len(), expected.len());\n", "file_path": "src/index.rs", "rank": 25, "score": 23748.40727133787 }, { "content": "\n\nimpl InvertedIndex {\n\n /// Constructs a new, empty InvertedIndex\n\n pub fn new() -> InvertedIndex {\n\n InvertedIndex {\n\n index: BTreeMap::new(),\n\n docs: BTreeMap::new(),\n\n }\n\n }\n\n\n\n /// Inserts the document.\n\n /// Insertings a document involves tokenizing the document's content\n\n /// and inserting each token into the index, pointing to the document and its position in the\n\n /// document.\n\n pub fn index(&mut self, doc: Document) {\n\n let previous_version = self.docs.insert(doc.id, doc.clone());\n\n if let Some(previous_version) = previous_version {\n\n let previous_analyzed = lowercase_ngrams(previous_version.content)\n\n .into_iter()\n\n .map(Result::unwrap);\n", "file_path": "src/index.rs", "rank": 26, "score": 23748.398794738318 }, { "content": " };\n\n self.index\n\n .range(min, max)\n\n .map(|(_k, v)| v)\n\n .flat_map(|map| map)\n\n .collect::<MergePostingsMap>()\n\n .0\n\n\n\n }\n\n\n\n fn query_rec(&self, query: &Query) -> PostingsMap {\n\n match *query {\n\n Match(query) => self.postings(query),\n\n And(queries) => {\n\n let postings: Vec<_> = queries.iter().map(|q| self.query_rec(q)).collect();\n\n postings.intersect_postings()\n\n }\n\n Or(queries) => queries.into_iter()\n\n .map(|q| self.query_rec(q))\n\n .flat_map(|map| map)\n", "file_path": "src/index.rs", "rank": 27, "score": 23748.283206190157 }, { "content": " .search_coalesce(0, position);\n\n }\n\n }\n\n\n\n /// Performs a search to the specification of the given query\n\n pub fn query(&self, query: &Query) -> Vec<SearchResult> {\n\n let postings = self.query_rec(query);\n\n self.compute_results(postings)\n\n }\n\n\n\n /// A helper method for performing a Match query\n\n pub fn search(&self, query: &str) -> Vec<SearchResult> {\n\n self.query(&Match(query))\n\n }\n\n\n\n fn postings(&self, query: &str) -> PostingsMap {\n\n LowercaseFilter::from_bytes(query)\n\n .into_iter()\n\n .map(Result::unwrap)\n\n .unique()\n", "file_path": "src/index.rs", "rank": 28, "score": 23748.058845874304 }, { "content": " index.index(doc1.clone());\n\n let expected = \"Won\\u{2019}t this split the *e*cosystem? Will *e*veryone use?\";\n\n let search_results = index.search(\"e\");\n\n assert_eq!(1, search_results.len());\n\n assert_eq!(search_results[0].highlight(\"*\", \"*\"), expected);\n\n }\n\n\n\n #[test]\n\n fn unicode() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"嗨, 您好\");\n\n index.index(doc.clone());\n\n let to_search = \"您\";\n\n let search_results = index.search(to_search);\n\n let &SearchResult { ref doc, ref positions, .. } = search_results.iter().next().unwrap();\n\n let Position{offsets:(begin, end), ..} = positions[0];\n\n assert_eq!(&doc.content()[begin..end], to_search);\n\n }\n\n\n\n #[test]\n", "file_path": "src/index.rs", "rank": 29, "score": 23747.85865228243 }, { "content": " // \"beat\" should be first, since it's a closer match\n\n assert_eq!(search_results[0].doc.id, doc.id);\n\n }\n\n\n\n #[test]\n\n fn duplicate_term() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"beat\");\n\n index.index(doc.clone());\n\n let search_results = index.search(\"be be\");\n\n assert_eq!(search_results.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn duplicate_term2() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"beat\");\n\n index.index(doc.clone());\n\n let search_results = index.search(\"be b\");\n\n assert_eq!(search_results.len(), 1);\n", "file_path": "src/index.rs", "rank": 30, "score": 23747.724130742074 }, { "content": " for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[search_result.doc])\n\n }\n\n }\n\n\n\n #[test]\n\n fn and_or() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"learn to program in rust today\");\n\n let doc2 = Document::new(2, \"what did you today do\");\n\n let doc3 = Document::new(3, \"what did you do yesterday\");\n\n index.index(doc1.clone());\n\n index.index(doc2.clone());\n\n index.index(doc3.clone());\n\n let search_results = index.query(&Or(&[Match(\"you\"),\n\n And(&[Match(\"today\"), Match(\"you\")])]));\n\n let expected: BTreeMap<_, _> = [(doc2.id,\n\n vec![Position::new((9, 12), 2),\n\n Position::new((13, 18), 3)]),\n\n (doc3.id, vec![Position::new((9, 12), 2)])]\n", "file_path": "src/index.rs", "rank": 31, "score": 23747.663758561383 }, { "content": " fn prefix() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"is is is\");\n\n index.index(doc1.clone());\n\n let expected: BTreeMap<_, _> = [(doc1.id.clone(),\n\n vec![Position::new((0, 2), 0),\n\n Position::new((3, 5), 1),\n\n Position::new((6, 8), 2)])]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n let search_results = index.query(&Prefix(\"i\"));\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn char_len_change() {\n\n let mut index = InvertedIndex::new();\n\n let s: String = \"İİ\".into();\n\n let doc1 = Document::new(1, s.clone());\n\n index.index(doc1.clone());\n\n assert_eq!(index.index[\"i̇i̇\"][&1][0].offsets.1, 4);\n\n }\n\n}\n", "file_path": "src/index.rs", "rank": 32, "score": 23747.0513196723 }, { "content": " .iter()\n\n .cloned()\n\n .collect();\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id])\n\n }\n\n }\n\n\n\n #[test]\n\n fn phrase() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"learn to program in rust today\");\n\n index.index(doc1.clone());\n\n let search_results = index.query(&Phrase(\"learn to program\"));\n\n let expected: BTreeMap<_, _> = [(doc1.id.clone(),\n\n vec![Position::new((0, 5), 0),\n\n Position::new((6, 8), 1),\n\n Position::new((9, 16), 2)])]\n\n .iter()\n", "file_path": "src/index.rs", "rank": 33, "score": 23746.94531595024 }, { "content": " fn update_doc() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"abc åäö\");\n\n index.index(doc);\n\n let doc = Document::new(0, \"different\");\n\n index.index(doc);\n\n let search_results = index.search(\"å\");\n\n assert!(search_results.is_empty());\n\n assert_eq!(index.docs.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn ranking() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"beat\");\n\n index.index(doc.clone());\n\n let doc2 = Document::new(1, \"beast\");\n\n index.index(doc2);\n\n let search_results = index.search(\"be\");\n\n assert_eq!(index.docs.len(), 2);\n", "file_path": "src/index.rs", "rank": 34, "score": 23746.65351962811 }, { "content": " #[test]\n\n fn phrase2() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(1, \"is is is\");\n\n index.index(doc1.clone());\n\n let expected: BTreeMap<_, _> = [(doc1.id.clone(),\n\n vec![Position::new((0, 1), 0),\n\n Position::new((3, 4), 1),\n\n Position::new((6, 7), 2)])]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n let search_results = index.query(&Phrase(\"i i\"));\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id]);\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/index.rs", "rank": 35, "score": 23746.51454665118 }, { "content": " .cloned()\n\n .collect();\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id])\n\n }\n\n assert_eq!(\"learn <span class=highlight>to</span> program in rust <span \\\n\n class=highlight>to</span>day\",\n\n search_results.iter()\n\n .find(|search_result| search_result.doc.id == doc1.id)\n\n .unwrap()\n\n .highlight(\"<span class=highlight>\", \"</span>\"));\n\n\n\n }\n\n\n\n #[test]\n\n fn highlight() {\n\n let mut index = InvertedIndex::new();\n\n let doc1 = Document::new(2,\n\n \"Won\\u{2019}t this split the ecosystem? Will everyone use?\");\n", "file_path": "src/index.rs", "rank": 36, "score": 23746.438650292377 }, { "content": " for Token { token, .. } in previous_analyzed {\n\n let is_empty = {\n\n let docs_for_ngram = self.index.get_mut(&token).unwrap();\n\n docs_for_ngram.remove(&doc.id);\n\n docs_for_ngram.is_empty()\n\n };\n\n if is_empty {\n\n self.index.remove(&token);\n\n }\n\n }\n\n }\n\n\n\n let analyzed = lowercase_ngrams(doc.content).into_iter().map(Result::unwrap);\n\n\n\n for Token { token, position } in analyzed {\n\n self.index\n\n .entry(token)\n\n .or_insert_with(BTreeMap::new)\n\n .entry(doc.id)\n\n .or_insert_with(Vec::new)\n", "file_path": "src/index.rs", "rank": 37, "score": 23746.434712874052 }, { "content": " assert_eq!(search_results[0].positions, vec![Position::new((0, 2), 0)]);\n\n }\n\n\n\n #[test]\n\n fn lowercase_search() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"BeAt\");\n\n index.index(doc.clone());\n\n let search_results = index.search(\"bE\");\n\n assert_eq!(search_results.len(), 1);\n\n assert_eq!(search_results[0].positions, vec![Position::new((0, 2), 0)]);\n\n }\n\n\n\n #[test]\n\n fn lowercase_index() {\n\n let mut index = InvertedIndex::new();\n\n let doc = Document::new(0, \"BeAt\");\n\n index.index(doc.clone());\n\n let search_results = index.search(\"be\");\n\n assert_eq!(search_results.len(), 1);\n", "file_path": "src/index.rs", "rank": 38, "score": 23746.406376807194 }, { "content": " .flat_map(|token| self.index.get(&token.token))\n\n .flat_map(|map| map)\n\n .collect::<MergePostingsMap>()\n\n .0\n\n\n\n }\n\n\n\n fn phrase(&self, phrase: &str) -> PostingsMap {\n\n let terms: Vec<_> = LowercaseFilter::from_bytes(phrase)\n\n .into_iter()\n\n .map(Result::unwrap)\n\n .map(|token| token.token)\n\n .collect();\n\n let postings: Vec<_> = terms.windows(2)\n\n .map(|adjacent_terms| {\n\n let term0 = &adjacent_terms[0];\n\n let term1 = &adjacent_terms[1];\n\n if let (Some(posting0), Some(posting1)) =\n\n (self.index.get(term0), self.index.get(term1)) {\n\n posting0.intersect_positionally(posting1)\n", "file_path": "src/index.rs", "rank": 39, "score": 23745.963531879668 }, { "content": " .collect::<MergePostingsMap>()\n\n .0,\n\n Phrase(phrase) => self.phrase(phrase),\n\n Prefix(prefix) => self.prefix(prefix),\n\n }\n\n }\n\n\n\n fn compute_results(&self, postings: PostingsMap) -> Vec<SearchResult> {\n\n let mut results: Vec<_> = postings.into_iter()\n\n .map(|(doc_id, positions)| {\n\n SearchResult::new(&self.docs[&doc_id], positions)\n\n })\n\n .collect();\n\n results.sort_by(|result1, result2| result2.score.partial_cmp(&result1.score).unwrap());\n\n results\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/index.rs", "rank": 40, "score": 23745.23432122984 }, { "content": " .cloned()\n\n .collect();\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id]);\n\n }\n\n let search_results = index.query(&Phrase(\"lear t pro\"));\n\n let expected: BTreeMap<_, _> = [(doc1.id,\n\n vec![Position::new((0, 4), 0),\n\n Position::new((6, 7), 1),\n\n Position::new((9, 12), 2)])]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n assert_eq!(search_results.len(), expected.len());\n\n for search_result in &search_results {\n\n assert_eq!(&search_result.positions, &expected[&search_result.doc.id]);\n\n }\n\n }\n\n\n", "file_path": "src/index.rs", "rank": 41, "score": 23743.481604536944 }, { "content": " } else {\n\n PostingsMap::new()\n\n }\n\n })\n\n .collect();\n\n postings.intersect_postings()\n\n }\n\n\n\n fn prefix(&self, prefix: &str) -> PostingsMap {\n\n if prefix.is_empty() {\n\n return PostingsMap::new();\n\n }\n\n\n\n let min = Included(prefix);\n\n let mut max: String = prefix.into();\n\n let max = if let Some(next_char) = max.pop().unwrap().successor() {\n\n max.push(next_char);\n\n Excluded(&max)\n\n } else {\n\n Unbounded\n", "file_path": "src/index.rs", "rank": 42, "score": 23742.708177488268 }, { "content": "# inverted_index\n\nThis library provides an in-memory (subject to change) `InvertedIndex` that indexes documents \n\nto make them searchable. Below are a few details about its features. For more examples, see \n\nthe tests.\n\n\n\n## Indexing\n\n```\n\nlet mut index = InvertedIndex::new();\n\nindex.index(Document::new(1, \"learn to program in rust today\"));\n\n```\n\n\n\nIndexing is the process of inserting a document into the `InvertedIndex` to make it searchable.\n\nThe general process is:\n\n\n\n1. Tokenize the document's text, typically by splitting the text on word boundaries.\n\n2. Insert each token into the index with the original document as its payload.\n\n3. Optionally store additional metadata along with each document, such as positional \n\n information.\n\n\n\n## Searching\n\n```\n\nlet results = index.search(\"prog\");\n\n```\n\n\n\nSearches returns a set of search results. Each search result consists of a matching document, \n\nthe positions within the document that matched the query, and the document's search score.\n\n\n\nSearches can be performed via the `query` method using the composable `Query` enum, which \n\ncurrently has four variants:\n\n\n\n* `Match` - The simplest query. Takes a string argument and returns any documents that match \n\n the string. `index.search(str)` is shorthand for `index.query(Match(str))`.\n\n* `Phrase` - An exact-match query. Takes a string argument and returns any documents that \n\n contain the exact string. n.b. the `InvertedIndex` may return false positives in \n\n some cases.\n\n* `And` - Composes a number of queries into a single query that restricts the results to the\n\n documents that are returned for each of the sub-queries.\n\n* `Or` - Composes a number of queries into a single query that returns all the documents that\n\n are returned for any of the sub-queries.\n\n\n", "file_path": "README.md", "rank": 50, "score": 13607.99020114044 }, { "content": "## Scoring\n\nThe returned search results are ordered based on document relevance to the search query, sorted\n\ndescending. Currently, relevance for each document is computed based on the length of matching \n\ncontent divided by the square root of the document length. This helps to ensure that longer \n\ndocuments don't receive too unfair of an advantage over shorter documents.\n\n\n\n## Highlighting\n\nSearch results include the positions in the document that matched the query. There is a helper\n\nmethod defined on the `SearchResult` struct to highlight the matching content. It accepts \n\n`before` and `after` string arguments to wrap the matching sections of the document in \n\nhighlights.\n\n```\n\nfor search_result in &results {\n\n println!(\"{:?}\", search_result.highlight(\"<b>\", \"</b>\"));\n\n}\n\n```\n", "file_path": "README.md", "rank": 51, "score": 13603.049175087024 }, { "content": "/// Contains extension traits related to BTreeMaps.\n\npub mod btree_map_ext;\n\n/// Contains the Coalesce trait, for performing coalescence on collections and its items.\n\npub mod coalesce;\n\n/// Contains the Successor trait, which is the same thing as `std::iter::Step`, except it's\n\n/// implemented for chars.\n\npub mod successor;\n\n\n\n/// Utility functions for encoding and decoding utf-8 to and from bytes.\n\npub mod char_utf8;\n\n\n\npub use self::btree_map_ext::{BTreeMapExt, Intersection};\n\npub use self::coalesce::{Coalesce, Merge, MergeCoalesceMap};\n\npub use self::successor::Successor;\n", "file_path": "src/util/mod.rs", "rank": 52, "score": 13.758755274147079 }, { "content": "use std::collections::btree_map::{BTreeMap, Keys};\n\n\n\n/// A lazy iterator producing elements in the set intersection (in-order).\n\n#[derive(Clone)]\n\npub struct Intersection<K, Iter: Iterator<Item = K>> {\n\n iters: Vec<Iter>,\n\n}\n\n\n\nimpl<K: Ord, V: Iterator<Item=K>> Iterator for Intersection<K, V> {\n\n type Item = K;\n\n\n\n fn next(&mut self) -> Option<K> {\n\n let mut maximum = match self.iters.first_mut().and_then(Iterator::next) {\n\n Some(k) => k,\n\n _ => return None,\n\n };\n\n\n\n // Where the maximum came from\n\n let mut skip_nth = 0;\n\n\n", "file_path": "src/util/btree_map_ext.rs", "rank": 53, "score": 12.30506693796854 }, { "content": "use std::cmp::Ordering;\n\nuse std::collections::BTreeMap;\n\n\n\nuse tokenizers::Position;\n\nuse util::*;\n\n\n\nimpl Merge for Position {\n\n fn merge(self, other: Position) -> Option<Position> {\n\n if self.position == other.position {\n\n self.offsets.merge(other.offsets).map(|offsets| Position::new(offsets, self.position))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n/// A postings map (doc id => positions) for a single term.\n\n/// Records which Documents contain the term, and at which locations in the documents.\n\npub type PostingsMap = BTreeMap<usize, Vec<Position>>;\n\n\n\n/// A MergeCoalesceMap for postings.\n\npub type MergePostingsMap = MergeCoalesceMap<usize, Vec<Position>>;\n\n\n\n/// An extension trait for slices of `PostingsMap`s\n\n/// that enables computing their intersection.\n", "file_path": "src/postings.rs", "rank": 54, "score": 11.662454417752688 }, { "content": " // Keep trying to...\n\n loop {\n\n let mut retry_with = None;\n\n\n\n // ...match all iters front element\n\n // with the chosen maximum\n\n for (i, iter) in self.iters.iter_mut().enumerate() {\n\n if i == skip_nth {\n\n continue;\n\n }\n\n\n\n match iter.find(|x| x >= &maximum) {\n\n Some(val) => if val > maximum {\n\n retry_with = Some(val);\n\n skip_nth = i;\n\n break;\n\n },\n\n\n\n // Intersection is empty\n\n None => return None,\n", "file_path": "src/util/btree_map_ext.rs", "rank": 55, "score": 9.598133507202604 }, { "content": "mod postings;\n\nmod query;\n\nmod search_result;\n\nmod tokenizers;\n\n\n\npub use index::InvertedIndex;\n\npub use document::Document;\n\npub use search_result::SearchResult;\n\npub use postings::{MergePostingsMap, PostingsMap, PostingsIntersect, PositionalIntersect};\n\npub use query::Query;\n\npub use tokenizers::{EnglishUtf8, NgramsFilter, LowercaseFilter, Position, Tokenizer, Token,\n\n lowercase_ngrams};\n", "file_path": "src/lib.rs", "rank": 56, "score": 9.56858891361108 }, { "content": " score: positions.iter()\n\n .map(|&Position { offsets: (begin, end), .. }| end - begin)\n\n .sum::<usize>() as f32 /\n\n (doc.content().len() as f32).sqrt(),\n\n doc: doc,\n\n positions: positions,\n\n }\n\n }\n\n\n\n /// Returns the document\n\n pub fn doc(&self) -> &Document {\n\n &self.doc\n\n }\n\n\n\n /// Returns the highlighted indices.\n\n ///\n\n /// Each `(usize, usize)` indicates the start and end of a term in the document's content\n\n /// that should be highlighted.\n\n pub fn positions(&self) -> &[Position] {\n\n &self.positions\n", "file_path": "src/search_result.rs", "rank": 57, "score": 8.627292737304721 }, { "content": " token: token.into(),\n\n position: Position::new(offsets, position),\n\n }\n\n }\n\n\n\n /// Creates an empty token with capacity reserved for 5 bytes;\n\n pub fn empty() -> Token {\n\n Token::new(String::with_capacity(5), (0, 0), 0)\n\n }\n\n}\n\n\n\n/// Information about the position of a single term within a document\n\n#[derive(Copy, Clone, Debug, Hash, Eq, Ord, PartialEq, PartialOrd, RustcDecodable, RustcEncodable)]\n\npub struct Position {\n\n /// Pair of byte indexes into the document at the beginning (inclusive) and end (exclusive) of \n\n /// the term.\n\n pub offsets: (usize, usize),\n\n /// The token position of the term, i.e., the number of tokens that occur before it in the doc.\n\n /// For example, for the sentence \"I have to go to the store\",\n\n /// the term \"to\" has positions [2, 4].\n", "file_path": "src/tokenizers.rs", "rank": 58, "score": 8.408055544042632 }, { "content": "/// A Query organizes a search of an inverted index.\n\n/// It is recursively hierarchical, allowing flexibility\n\n/// in exactly how a search is specified\n\n#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, RustcEncodable)]\n\npub enum Query<'a> {\n\n /// The simplest query, represents a search using\n\n /// the given string\n\n Match(&'a str),\n\n\n\n /// A query requesting the intersection of the documents \n\n /// returned in each sub-query\n\n And(&'a [Query<'a>]),\n\n\n\n /// A query requesting the union of the documents returned\n\n /// in each sub-query\n\n Or(&'a [Query<'a>]),\n\n\n\n /// An exact-match query. The given phrase must appear in all documents returned.\n\n /// False positives may occur.\n\n Phrase(&'a str),\n\n\n\n /// A prefix query that returns all documents containing terms with the given prefix.\n\n /// Note that, unlike `Match` and `Phrase`, this query is not tokenized before searching\n\n /// the index. Thus, Prefix(\"hi bob\") is likely to match zero documents, since indexed\n\n /// documents typically have their content tokenized upon spaces.\n\n Prefix(&'a str),\n\n}\n", "file_path": "src/query.rs", "rank": 59, "score": 8.233095110658958 }, { "content": "// Original authorship BurntSushi\n\n\n\nuse std::io;\n\n\n\nuse util::char_utf8::decode_utf8;\n\n\n\n/// A token represents a single atomic unit of information present in a document.\n\n#[derive(Clone, Debug, Hash, Eq, Ord, PartialEq, PartialOrd, RustcDecodable, RustcEncodable)]\n\npub struct Token {\n\n /// A single parsed input from the document, possibly after undergoing some series of\n\n /// transformations.\n\n pub token: String,\n\n /// Information about the position of the token within the document.\n\n pub position: Position,\n\n}\n\n\n\nimpl Token {\n\n /// Creates a new token from a string, offsets, and position.\n\n pub fn new<S: Into<String>>(token: S, offsets: (usize, usize), position: usize) -> Token {\n\n Token {\n", "file_path": "src/tokenizers.rs", "rank": 60, "score": 8.094041575880935 }, { "content": "//!\n\n//! ```\n\n//! use inverted_index::{Document, InvertedIndex};\n\n//! let mut index = InvertedIndex::new();\n\n//! index.index(Document::new(1, \"learn to program in rust today\"));\n\n//! let results = index.search(\"prog\");\n\n//! for search_result in &results {\n\n//! println!(\"{:?}\", search_result.highlight(\"<b>\", \"</b>\"));\n\n//! }\n\n//! ```\n\n\n\nextern crate itertools;\n\nextern crate rustc_serialize;\n\nextern crate core;\n\n\n\n/// Contains utility methods used in the rest of the crate.\n\npub mod util;\n\n\n\nmod document;\n\nmod index;\n", "file_path": "src/lib.rs", "rank": 61, "score": 7.981371422089143 }, { "content": "//! 1. Tokenize the document's text, typically by splitting the text on word boundaries.\n\n//! 2. Insert each token into the index with the original document as its payload.\n\n//! 3. Optionally store additional metadata along with each document, such as positional\n\n//! information.\n\n//!\n\n//! ## Searching\n\n//! ```\n\n//! use inverted_index::{Document, InvertedIndex};\n\n//! let mut index = InvertedIndex::new();\n\n//! index.index(Document::new(1, \"learn to program in rust today\"));\n\n//! let results = index.search(\"prog\");\n\n//! ```\n\n//!\n\n//! Searches returns a set of search results. Each search result consists of a matching document,\n\n//! the positions within the document that matched the query, and the document's search score.\n\n//!\n\n//! Searches can be performed via the `query` method using the composable `Query` enum, which\n\n//! currently has four variants:\n\n//!\n\n//! * `Match` - The simplest query. Takes a string argument and returns any documents that match\n", "file_path": "src/lib.rs", "rank": 62, "score": 7.9378368227166725 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use std::iter;\n\n use super::super::{MergePostingsMap, Position, PostingsMap};\n\n\n\n #[test]\n\n fn test_merge() {\n\n let postings = [iter::once((1, vec![Position::new((0, 1), 0), Position::new((2, 3), 1)]))\n\n .collect::<PostingsMap>(),\n\n iter::once((1, vec![Position::new((4, 5), 2), Position::new((6, 7), 3)]))\n\n .collect()];\n\n assert_eq!(postings.iter().flat_map(|map| map).collect::<MergePostingsMap>().0,\n\n iter::once((1,\n\n vec![Position::new((0, 1), 0),\n\n Position::new((2, 3), 1),\n\n Position::new((4, 5), 2),\n\n Position::new((6, 7), 3)]))\n\n .collect());\n\n }\n\n}\n", "file_path": "src/postings.rs", "rank": 63, "score": 7.211133098165886 }, { "content": "#![feature(plugin, unboxed_closures, core, iter_arith, custom_attribute, slice_patterns,\n\n collections_bound, btree_range, associated_type_defaults)]\n\n#![plugin(clippy)]\n\n#![deny(missing_docs)]\n\n\n\n//! # inverted_index\n\n//! This library provides an in-memory (subject to change) `InvertedIndex` that indexes documents\n\n//! to make them searchable. Below are a few details about its features. For more examples, see\n\n//! the tests.\n\n//!\n\n//! ## Indexing\n\n//! ```\n\n//! use inverted_index::{Document, InvertedIndex};\n\n//! let mut index = InvertedIndex::new();\n\n//! index.index(Document::new(1, \"learn to program in rust today\"));\n\n//! ```\n\n//!\n\n//! Indexing is the process of inserting a document into the `InvertedIndex` to make it searchable.\n\n//! The general process is:\n\n//!\n", "file_path": "src/lib.rs", "rank": 64, "score": 6.778690266020261 }, { "content": " }\n\n\n\n /// Returns the search result's score.\n\n ///\n\n /// Score is computed by the product of the summed length of the matching terms and the inverse\n\n /// square root of the length of the document. Taking the square root of the document's length\n\n /// helps to combat bias toward short content.\n\n pub fn score(&self) -> f32 {\n\n self.score\n\n }\n\n\n\n /// Returns the search result's content, surrounding all highlighted terms with `before`\n\n /// and `after` \n\n pub fn highlight(&self, before: &str, after: &str) -> String {\n\n let mut begin_idx = 0;\n\n let mut parts = String::new();\n\n for &Position{offsets:(begin, end), ..} in &self.positions {\n\n parts.push_str(&self.doc.content()[begin_idx..begin]);\n\n parts.push_str(before);\n\n parts.push_str(&self.doc.content()[begin..end]);\n\n parts.push_str(after);\n\n begin_idx = end;\n\n }\n\n parts.push_str(&self.doc.content()[begin_idx..]);\n\n parts\n\n }\n\n}\n", "file_path": "src/search_result.rs", "rank": 65, "score": 6.7780416959311935 }, { "content": " Some(next) => {\n\n *tok = next;\n\n Ok(true)\n\n }\n\n None => {\n\n match self.tokenizer.read(tok) {\n\n done @ Ok(false) | done @ Err(_) => done,\n\n Ok(true) => {\n\n let start = tok.position.offsets.0;\n\n let chars: Vec<_> = tok.token\n\n .char_indices()\n\n .map(|(offset, c)| (start + offset, c))\n\n .collect();\n\n self.next.extend((1..chars.len() + 1).rev().map(|to| {\n\n let token: String = chars[..to]\n\n .iter()\n\n .map(|&(_, c)| c)\n\n .collect();\n\n let (last_idx, last_char) = chars[to - 1];\n\n let finish = last_idx + last_char.len_utf8();\n", "file_path": "src/tokenizers.rs", "rank": 66, "score": 6.716315686278643 }, { "content": "/// A Document contains an id and content.\n\n#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash, RustcEncodable, RustcDecodable)]\n\npub struct Document {\n\n /// The id of the document\n\n pub id: usize,\n\n /// The document's content\n\n pub content: String,\n\n}\n\n\n\nimpl Document {\n\n /// Construct a new Document from an id and content.\n\n /// Both two arguments can be anything that can be turned into a String.\n\n pub fn new<T>(id: usize, content: T) -> Document\n\n where T: Into<String>\n\n {\n\n Document {\n\n id: id,\n\n content: content.into(),\n\n }\n\n }\n", "file_path": "src/document.rs", "rank": 67, "score": 6.683977871517853 }, { "content": "\n\nimpl<Tknzr: Tokenizer> Iterator for Iter<Tknzr> {\n\n type Item = io::Result<Token>;\n\n\n\n fn next(&mut self) -> Option<io::Result<Token>> {\n\n if self.err {\n\n return None;\n\n }\n\n let mut tok = Token::empty();\n\n match self.tokenizer.read(&mut tok) {\n\n Ok(true) => Some(Ok(tok)),\n\n Ok(false) => None,\n\n Err(err) => {\n\n self.err = true;\n\n Some(Err(err))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tokenizers.rs", "rank": 68, "score": 6.431481724573159 }, { "content": " /// b.insert(2, ());\n\n /// b.insert(3, ());\n\n ///\n\n /// let maps = &[a, b];\n\n /// let intersection: Vec<_> = maps.intersection().cloned().collect();\n\n /// assert_eq!(intersection, [2]);\n\n /// ```\n\n fn intersection(self) -> Intersection<Self::Key, Self::Iter>;\n\n}\n\n\n\nimpl<'a, K: Ord, V> BTreeMapExt for &'a [BTreeMap<K, V>] {\n\n type Key = &'a K;\n\n type Iter = Keys<'a, K, V>;\n\n fn intersection(self) -> Intersection<&'a K, Keys<'a, K, V>> {\n\n Intersection { iters: self.iter().map(|map| map.keys()).collect() }\n\n }\n\n}\n\n\n\nimpl<'a, K: Ord, V> BTreeMapExt for &'a [&'a BTreeMap<K, V>] {\n\n type Key = &'a K;\n\n type Iter = Keys<'a, K, V>;\n\n fn intersection(self) -> Intersection<&'a K, Keys<'a, K, V>> {\n\n Intersection { iters: self.iter().map(|map| map.keys()).collect() }\n\n }\n\n}\n\n\n", "file_path": "src/util/btree_map_ext.rs", "rank": 69, "score": 6.244127087510323 }, { "content": "use super::{Document, Position};\n\n\n\n/// A SearchResult is the representation of a Document returned for a specific set of search\n\n/// terms. It is unique upon the document and the vec of highlight indices. It also contains a\n\n/// search score for use in ranking against the other search results\n\n#[derive(Clone, Debug, RustcEncodable)]\n\npub struct SearchResult<'a> {\n\n /// The document returned for the search\n\n pub doc: &'a Document,\n\n /// The positions of the terms in the document that matched the search\n\n pub positions: Vec<Position>,\n\n /// The search score, for use in ranking documents\n\n pub score: f32,\n\n}\n\n\n\nimpl<'a> SearchResult<'a> {\n\n /// Constructs a new SearchResult from the given Document and term positions.\n\n /// Computes the score using the positions and the document length\n\n pub fn new(doc: &'a Document, positions: Vec<Position>) -> SearchResult<'a> {\n\n SearchResult {\n", "file_path": "src/search_result.rs", "rank": 70, "score": 5.919227339666229 }, { "content": " pub position: usize,\n\n}\n\n\n\nimpl Position {\n\n /// Creates a new Position struct with the given offsets and position.\n\n pub fn new(offsets: (usize, usize), position: usize) -> Position {\n\n Position {\n\n offsets: offsets,\n\n position: position,\n\n }\n\n }\n\n}\n\n\n\n/// A type that can output a sequence of tokens\n", "file_path": "src/tokenizers.rs", "rank": 71, "score": 5.891121815581386 }, { "content": "use std::char;\n\n\n\n/// A trait for types whose values have well-defined successors.\n", "file_path": "src/util/successor.rs", "rank": 72, "score": 5.390168604525653 }, { "content": "\n\n /// Returns a reference to the document's id\n\n pub fn id(&self) -> usize {\n\n self.id\n\n }\n\n\n\n /// Returns a reference to the document's content\n\n pub fn content(&self) -> &str {\n\n &self.content\n\n }\n\n}\n", "file_path": "src/document.rs", "rank": 73, "score": 5.30325859835833 }, { "content": " }\n\n }\n\n\n\n match retry_with {\n\n Some(new_maximum) => maximum = new_maximum,\n\n None => return Some(maximum),\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An extension trait for slices of BTreeMaps that enables\n\n/// computing intersections\n", "file_path": "src/util/btree_map_ext.rs", "rank": 74, "score": 4.476035202209571 }, { "content": " pub fn from_bytes<B>(bytes: B) -> EnglishUtf8<io::Cursor<Vec<u8>>>\n\n where B: Into<Vec<u8>>\n\n {\n\n EnglishUtf8::new(io::Cursor::new(bytes.into()))\n\n }\n\n\n\n /// Reset the backing buffer to position 0.\n\n pub fn reset(&mut self) {\n\n self.rdr.set_position(0);\n\n }\n\n}\n\n\n\nimpl<Buf: io::BufRead> Tokenizer for EnglishUtf8<Buf> {\n\n fn read(&mut self, tok: &mut Token) -> io::Result<bool> {\n\n let mut consumed = 0;\n\n tok.token.clear();\n\n 'LOOP: loop {\n\n self.rdr.consume(consumed);\n\n consumed = 0;\n\n let buf = try!(self.rdr.fill_buf());\n", "file_path": "src/tokenizers.rs", "rank": 75, "score": 4.393554747424564 }, { "content": "//! the string. `index.search(str)` is shorthand for `index.query(Match(str))`.\n\n//! * `Phrase` - An exact-match query. Takes a string argument and returns any documents that\n\n//! contain the exact string. n.b. the `InvertedIndex` may return false positives in\n\n//! some cases.\n\n//! * `And` - Composes a number of queries into a single query that restricts the results to the\n\n//! documents that are returned for each of the sub-queries.\n\n//! * `Or` - Composes a number of queries into a single query that returns all the documents that\n\n//! are returned for any of the sub-queries.\n\n//!\n\n//! ## Scoring\n\n//! The returned search results are ordered based on document relevance to the search query, sorted\n\n//! descending. Currently, relevance for each document is computed based on the length of matching\n\n//! content divided by the square root of the document length. This helps to ensure that longer\n\n//! documents don't receive too unfair of an advantage over shorter documents.\n\n//!\n\n//! ## Highlighting\n\n//! Search results include the positions in the document that matched the query. There is a helper\n\n//! method defined on the `SearchResult` struct to highlight the matching content. It accepts\n\n//! `before` and `after` string arguments to wrap the matching sections of the document in\n\n//! highlights.\n", "file_path": "src/lib.rs", "rank": 76, "score": 4.345590055119142 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl NgramsFilter<EnglishUtf8<io::Cursor<Vec<u8>>>> {\n\n /// Creates a new NgramsFilter with a backing English UTF-8 tokenizer backed by the bytes.\n\n pub fn from_bytes<B>(bytes: B) -> NgramsFilter<EnglishUtf8<io::Cursor<Vec<u8>>>>\n\n where B: Into<Vec<u8>>\n\n {\n\n NgramsFilter {\n\n tokenizer: EnglishUtf8::from_bytes(bytes),\n\n next: vec![],\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl<Tknzr: Tokenizer> Tokenizer for NgramsFilter<Tknzr> {\n\n fn read(&mut self, tok: &mut Token) -> io::Result<bool> {\n\n match self.next.pop() {\n", "file_path": "src/tokenizers.rs", "rank": 77, "score": 4.26781669818245 }, { "content": "impl<Tknzr: Tokenizer> Tokenizer for LowercaseFilter<Tknzr> {\n\n fn read(&mut self, tok: &mut Token) -> io::Result<bool> {\n\n match self.tokenizer.read(tok) {\n\n done @ Ok(false) | done @ Err(_) => done,\n\n done @ Ok(true) => {\n\n tok.token = tok.token.to_lowercase();\n\n done\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tokenizers.rs", "rank": 78, "score": 4.043345530025341 }, { "content": "/// A tokenizer of english documents encoded in UTF-8.\n\npub struct EnglishUtf8<Buf> {\n\n rdr: Buf,\n\n offset: usize,\n\n num_tokens: usize,\n\n}\n\n\n\nimpl<Buf: io::BufRead> EnglishUtf8<Buf> {\n\n /// Creates a new tokenizer backed by the given buffer.\n\n pub fn new(rdr: Buf) -> EnglishUtf8<Buf> {\n\n EnglishUtf8 {\n\n rdr: rdr,\n\n offset: 0,\n\n num_tokens: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl EnglishUtf8<io::Cursor<Vec<u8>>> {\n\n /// Construct an EnglishUtf8 tokenizer backed by a byte buffer.\n", "file_path": "src/tokenizers.rs", "rank": 79, "score": 3.975931280965552 }, { "content": " Token::new(token, (start, finish), tok.position.position)\n\n }));\n\n *tok = self.next.pop().unwrap();\n\n Ok(true)\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An analyzer that tokenizes and lowercases its input\n\npub struct LowercaseFilter<Tknzr: Tokenizer> {\n\n tokenizer: Tknzr,\n\n}\n\n\n\nimpl<Tknzr: Tokenizer> LowercaseFilter<Tknzr> {\n\n /// Creates a new LowercaseFilter with the specified backing tokenizer.\n\n pub fn after_tokenizer(tokenizer: Tknzr) -> LowercaseFilter<Tknzr> {\n\n LowercaseFilter { tokenizer: tokenizer }\n", "file_path": "src/tokenizers.rs", "rank": 80, "score": 3.5811507995097536 }, { "content": " let mut rval = other.next();\n\n loop {\n\n if let (Some(l), Some(r)) = (lval, rval) {\n\n match l.position.cmp(&r.position) {\n\n Ordering::Less => {\n\n if l.position + 1 == r.position {\n\n if !intersection.is_empty() {\n\n if intersection[intersection.len() - 1] != l {\n\n intersection.push(l);\n\n }\n\n } else {\n\n intersection.push(l);\n\n }\n\n intersection.push(r);\n\n rval = other.next();\n\n }\n\n lval = this.next();\n\n }\n\n Ordering::Greater | Ordering::Equal => rval = other.next(),\n\n }\n", "file_path": "src/postings.rs", "rank": 81, "score": 3.295023577764986 }, { "content": " self.rdr.consume(consumed);\n\n tok.position.position = self.num_tokens;\n\n self.num_tokens += 1;\n\n Ok(true)\n\n }\n\n}\n\n\n\n/// An analyzer that tokenizes its input and returns each subslice of each token that starts from\n\n/// the first char.\n\npub struct NgramsFilter<Tknzr: Tokenizer> {\n\n tokenizer: Tknzr,\n\n next: Vec<Token>,\n\n}\n\n\n\nimpl<Buf: io::BufRead> NgramsFilter<EnglishUtf8<Buf>> {\n\n /// Creates a new NgramsFilter with a backing English UTF-8 tokenizer backed by the buffer.\n\n pub fn new(buf: Buf) -> NgramsFilter<EnglishUtf8<Buf>> {\n\n NgramsFilter {\n\n tokenizer: EnglishUtf8::new(buf),\n\n next: vec![],\n", "file_path": "src/tokenizers.rs", "rank": 82, "score": 3.016285464457463 }, { "content": " } else {\n\n return intersection;\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl PositionalIntersect for PostingsMap {\n\n type Intersection = PostingsMap;\n\n fn intersect_positionally(&self, other: &Self) -> PostingsMap {\n\n let maps = &[self, other];\n\n maps.intersection()\n\n .map(|doc_id| {\n\n (doc_id.clone(),\n\n self[doc_id].intersect_positionally(&other[doc_id]))\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "src/postings.rs", "rank": 83, "score": 2.0255509014875828 }, { "content": " if buf.is_empty() {\n\n if tok.token.is_empty() {\n\n return Ok(false);\n\n } else {\n\n break 'LOOP;\n\n }\n\n }\n\n while consumed < buf.len() {\n\n let bytes = &buf[consumed..];\n\n let (n, c) = match decode_utf8(bytes) {\n\n None => {\n\n consumed += 1;\n\n self.offset += 1;\n\n continue;\n\n }\n\n Some((n, c)) => {\n\n consumed += n;\n\n (n, c)\n\n }\n\n };\n", "file_path": "src/tokenizers.rs", "rank": 84, "score": 1.9688440402483791 }, { "content": " }\n\n}\n\n\n\nimpl<Buf: io::BufRead> LowercaseFilter<EnglishUtf8<Buf>> {\n\n /// Creates a new LowercaseFilter with a backing English UTF-8 tokenizer backed by the buffer.\n\n pub fn new(buf: Buf) -> LowercaseFilter<EnglishUtf8<Buf>> {\n\n LowercaseFilter::after_tokenizer(EnglishUtf8::new(buf))\n\n }\n\n}\n\n\n\nimpl LowercaseFilter<EnglishUtf8<io::Cursor<Vec<u8>>>> {\n\n /// Creates a new LowercaseFilter with a backing English UTF-8 tokenizer backed by the bytes.\n\n pub fn from_bytes<B>(bytes: B) -> LowercaseFilter<EnglishUtf8<io::Cursor<Vec<u8>>>>\n\n where B: Into<Vec<u8>>\n\n {\n\n LowercaseFilter::after_tokenizer(EnglishUtf8::from_bytes(bytes))\n\n }\n\n\n\n}\n\n\n", "file_path": "src/tokenizers.rs", "rank": 85, "score": 1.6765497219948746 }, { "content": "// Original authorship burntsushi.\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\nuse std::str;\n\n\n\n// UTF-8 ranges and tags for encoding characters\n\nconst TAG_CONT: u8 = 0b1000_0000;\n\nconst TAG_TWO_B: u8 = 0b1100_0000;\n\nconst TAG_THREE_B: u8 = 0b1110_0000;\n\nconst TAG_FOUR_B: u8 = 0b1111_0000;\n\nconst MAX_ONE_B: u32 = 0x80;\n\nconst MAX_TWO_B: u32 = 0x800;\n\nconst MAX_THREE_B: u32 = 0x10000;\n\n\n\n/// Write a utf-8 char as a sequence of bytes.\n\n#[inline]\n", "file_path": "src/util/char_utf8.rs", "rank": 86, "score": 1.3159546010406244 } ]
Rust
src/vm/mod.rs
pmk21/rsqlite
2b2b50d64c05293d8bd6d8543cef6347f8d8b961
use crate::buffer::InputBuffer; use crate::constants::{EMAIL_SIZE, TABLE_MAX_ROWS, USERNAME_SIZE}; use crate::table::{Row, Table}; use std::str::FromStr; pub mod statement; use statement::{Statement, StatementType}; pub enum ExecuteResult { Success, TableFull, } pub enum MetaCommandResult { UnrecognizedCommand, } pub enum PrepareResult { Success, UnrecognizedStatement, SyntaxError, StringTooLong, NegativeID, } pub fn do_meta_command(input_buffer: &InputBuffer, table: &mut Table) -> MetaCommandResult { if input_buffer.buffer == ".exit" { table.db_close(); std::process::exit(0); } else { MetaCommandResult::UnrecognizedCommand } } fn prepare_insert(args: &[&str], statement: &mut Statement) -> PrepareResult { statement.row_to_insert.id = match FromStr::from_str(args[1]) { Ok(uint) => uint, Err(_) => return PrepareResult::NegativeID, }; let ubytes = args[2].as_bytes(); let ulen = ubytes.len(); if ulen > USERNAME_SIZE { return PrepareResult::StringTooLong; } let mut username_bytes = [0u8; USERNAME_SIZE]; username_bytes[0..ulen].copy_from_slice(args[2].as_bytes()); statement.row_to_insert.username = username_bytes; let ebytes = args[3].as_bytes(); let elen = ebytes.len(); if elen > EMAIL_SIZE { return PrepareResult::StringTooLong; } let mut email_bytes = [0u8; EMAIL_SIZE]; email_bytes[0..elen].copy_from_slice(args[3].as_bytes()); statement.row_to_insert.email = email_bytes; PrepareResult::Success } pub fn prepare_statement(input_buffer: &InputBuffer, statement: &mut Statement) -> PrepareResult { if &input_buffer.buffer[0..6] == "insert" { statement.stmt_type = StatementType::Insert; let args = input_buffer.buffer.split(' ').collect::<Vec<&str>>(); if args.len() < 4 { return PrepareResult::SyntaxError; } else { return prepare_insert(&args, statement); } } if &input_buffer.buffer[0..6] == "select" { statement.stmt_type = StatementType::Select; return PrepareResult::Success; } PrepareResult::UnrecognizedStatement } pub fn execute_statement(statement: &Statement, table: &mut Table) -> ExecuteResult { match statement.stmt_type { StatementType::Insert => execute_insert(statement, table), StatementType::Select => execute_select(table), StatementType::Empty => { println!("Empty statement"); ExecuteResult::Success } } } fn execute_insert(statement: &Statement, table: &mut Table) -> ExecuteResult { if table.num_rows >= TABLE_MAX_ROWS { return ExecuteResult::TableFull; } let row = Row { id: statement.row_to_insert.id, username: statement.row_to_insert.username, email: statement.row_to_insert.email, }; let (page_num, _) = table.row_slot(table.num_rows); table.serialize_row(row, page_num); table.num_rows += 1; ExecuteResult::Success } fn execute_select(table: &mut Table) -> ExecuteResult { for i in 0..table.num_rows { let (page_num, byte_offset) = table.row_slot(i); &table.deserialize_row(page_num, byte_offset).print_row(); } ExecuteResult::Success }
use crate::buffer::InputBuffer; use crate::constants::{EMAIL_SIZE, TABLE_MAX_ROWS, USERNAME_SIZE}; use crate::table::{Row, Table}; use std::str::FromStr; pub mod statement; use statement::{Statement, StatementType}; pub enum ExecuteResult { Success, TableFull, } pub enum MetaCommandResult { UnrecognizedCommand, } pub enum PrepareResult { Success, UnrecognizedStatement, SyntaxError, StringTooLong, NegativeID, } pub fn do_meta_command(input_buffer: &InputBuffer, table: &mut Table) -> MetaCommandResult { if input_buffer.buffer == ".exit" { table.db_close(); std::process::exit(0); } else { MetaCommandResult::UnrecognizedCommand } } fn prepare_insert(args: &[&str], statement: &mut Statement) -> PrepareResult { statement.row_to_insert.id = match FromStr::from_str(args[1]) { Ok(uint) => uint, Err(_) => return PrepareResult::NegativeID, }; let ubytes = args[2].as_bytes(); let ulen = ubytes.len(); if ulen > USERNAME_SIZE { return PrepareResult::StringTooLong; } let mut username_bytes = [0u8; USERNAME_SIZE]; username_bytes[0..ulen].copy_from_slice(args[2].as_bytes()); statement.row_to_insert.username = username_bytes; let ebytes = args[3].as_bytes(); let elen = ebytes.len(); if elen > EMAIL_SIZE { return PrepareResult::StringTooLong; } let mut email_bytes = [0u8; EMAIL_SIZE]; email_bytes[0..elen].copy_from_slice(args[3].as_bytes()); statement.row_to_insert.email = email_bytes; PrepareResult::Success } pub fn prepare_statement(input_buffer: &InputBuffer, statement: &mut Statement) -> PrepareResult { if &input_buffer.buffer[0..6] == "insert" { statement.stmt_type = StatementType::Insert; let args = input_buffer.buffer.split(' ').collect::<Vec<&str>>(); if args.len() < 4 { return PrepareResult::SyntaxError; } else { return prepare_insert(&args, statement); } } if &input_buffer.buffer[0..6] == "select" { statement.stmt_type = StatementType::Select; return PrepareResult::Success; } PrepareResult::UnrecognizedStatement } pub fn execute_statement(statement: &Statement, table: &mut Table) -> ExecuteResult { match statement.stmt_type { StatementType::Insert => execute_insert(statement, table), StatementType::Select => execute_select(table), StatementType::Empty => { println!("Empty statement"); ExecuteResult::Success } } } fn execute_insert(statement: &Statement, table: &mut Table) -> ExecuteResult { if table.num_rows >= TABLE_MAX_ROWS { return ExecuteResult::TableFull; } let row = Row { id: statement.row_to_insert.id, username: statement.row_to_insert.username, email: statement.row_to_insert.email, }; let (page_num, _) = table.row_slot(table.num_rows); table.serialize_row(row, page_num); table.num_rows += 1; ExecuteResult::Success }
fn execute_select(table: &mut Table) -> ExecuteResult { for i in 0..table.num_rows { let (page_num, byte_offset) = table.row_slot(i); &table.deserialize_row(page_num, byte_offset).print_row(); } ExecuteResult::Success }
function_block-full_function
[ { "content": "//! # Table\n\n//!\n\n//! Interface to implement the structure of a table\n\n\n\nuse crate::constants::{\n\n EMAIL_OFFSET, EMAIL_SIZE, ID_OFFSET, ID_SIZE, ROWS_PER_PAGE, ROW_SIZE, USERNAME_OFFSET,\n\n USERNAME_SIZE,\n\n};\n\n\n\npub mod pager;\n\nuse pager::Pager;\n\n\n\n/// Structure to store the data present in the table as\n\n/// well as the number of rows present currently\n\npub struct Table {\n\n pub num_rows: u32,\n\n pub pager: Pager,\n\n}\n\n\n\nimpl Table {\n", "file_path": "src/table/mod.rs", "rank": 6, "score": 54549.1413375262 }, { "content": " let id = u32::from_ne_bytes(id_byte_arr);\n\n let mut username = [0u8; USERNAME_SIZE];\n\n username.copy_from_slice(username_bytes);\n\n let mut email = [0u8; EMAIL_SIZE];\n\n email.copy_from_slice(email_bytes);\n\n Row {\n\n id,\n\n username,\n\n email,\n\n }\n\n }\n\n\n\n /// Calculate the page and byte offset where a row must be present.\n\n /// Also load the required page.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `table` - A mutable reference to `Table` struct\n\n /// * `row_num` - The index of the row in the table\n\n pub fn row_slot(&mut self, row_num: u32) -> (u32, u32) {\n", "file_path": "src/table/mod.rs", "rank": 7, "score": 54547.827238787766 }, { "content": " username: [0u8; USERNAME_SIZE],\n\n email: [0u8; EMAIL_SIZE],\n\n }\n\n }\n\n\n\n /// Helper function to print a `Row`\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `row` - A non-mutable reference to a `Row` struct\n\n pub fn print_row(&self) {\n\n println!(\n\n \"({}, {}, {})\",\n\n self.id,\n\n std::str::from_utf8(&self.username)\n\n .unwrap()\n\n .trim_end_matches(char::from(0)),\n\n std::str::from_utf8(&self.email)\n\n .unwrap()\n\n .trim_end_matches(char::from(0))\n\n );\n\n }\n\n}\n", "file_path": "src/table/mod.rs", "rank": 8, "score": 54545.66562701434 }, { "content": " let page_num = row_num / ROWS_PER_PAGE;\n\n let row_offset = row_num % ROWS_PER_PAGE;\n\n let byte_offset = row_offset * ROW_SIZE;\n\n self.pager.get_page(page_num);\n\n (page_num, byte_offset)\n\n }\n\n}\n\n\n\n/// A struct to hold data present in a row\n\npub struct Row {\n\n pub id: u32,\n\n pub username: [u8; USERNAME_SIZE],\n\n pub email: [u8; EMAIL_SIZE],\n\n}\n\n\n\nimpl Row {\n\n /// Returns an empty `Row`\n\n pub fn new() -> Self {\n\n Row {\n\n id: 0,\n", "file_path": "src/table/mod.rs", "rank": 9, "score": 54543.42019048983 }, { "content": " }\n\n }\n\n\n\n if self.pager.file.sync_data().is_err() {\n\n println!(\"Error closing db file.\");\n\n std::process::exit(1);\n\n }\n\n }\n\n\n\n /// Store all the data fields into a page\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `row` - The `Row` struct containing data to be stored\n\n /// * `page_num` - The corresponding page number where the data must be stored\n\n pub fn serialize_row(&mut self, row: Row, page_num: u32) {\n\n let id_bytes = row.id.to_ne_bytes();\n\n let username_bytes = row.username;\n\n let email_bytes = row.email;\n\n self.pager.pages[page_num as usize].extend_from_slice(&id_bytes);\n", "file_path": "src/table/mod.rs", "rank": 10, "score": 54543.01167340267 }, { "content": " self.pager.pages[page_num as usize].extend_from_slice(&username_bytes);\n\n self.pager.pages[page_num as usize].extend_from_slice(&email_bytes);\n\n }\n\n\n\n /// Retrieve a row from a given page and byte offset\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `page_num` - The corresponding page number where the row data is present\n\n /// * `byte_offset` - The offset in the page where the row data starts\n\n pub fn deserialize_row(&self, page_num: u32, byte_offset: u32) -> Row {\n\n let offset = byte_offset as usize;\n\n let mut id_byte_arr = [0; 4];\n\n let id_bytes_slice = &self.pager.pages[page_num as usize]\n\n [(offset + ID_OFFSET)..(offset + ID_OFFSET + ID_SIZE)];\n\n let username_bytes = &self.pager.pages[page_num as usize]\n\n [(offset + USERNAME_OFFSET)..(offset + USERNAME_OFFSET + USERNAME_SIZE)];\n\n let email_bytes = &self.pager.pages[page_num as usize]\n\n [(offset + EMAIL_OFFSET)..(offset + EMAIL_OFFSET + EMAIL_SIZE)];\n\n id_byte_arr.copy_from_slice(id_bytes_slice);\n", "file_path": "src/table/mod.rs", "rank": 11, "score": 54541.59914833559 }, { "content": " /// Opens a file to load the database from,\n\n /// if the file is not present, a new file is created\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `filename` - A string slice holding the file name\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use crate table::Table;\n\n /// let table = Table::db_open(\"test.db\");\n\n /// ```\n\n ///\n\n /// # Panics\n\n ///\n\n /// Function might panic if there is some problem in creating or opening a file\n\n pub fn db_open(filename: &str) -> Self {\n\n let pager = Pager::open(filename);\n\n let num_rows = pager.file_length as u32 / ROW_SIZE;\n", "file_path": "src/table/mod.rs", "rank": 12, "score": 54541.53067042165 }, { "content": " Table { pager, num_rows }\n\n }\n\n\n\n /// Safely closes the database and writes all the data to the file on the disk\n\n pub fn db_close(&mut self) {\n\n let num_full_pages = self.num_rows / ROWS_PER_PAGE;\n\n\n\n for i in 0..num_full_pages {\n\n if self.pager.pages[i as usize].is_empty() {\n\n continue;\n\n }\n\n self.pager.flush(i);\n\n }\n\n\n\n // There may be a partial page to write to the end of the file\n\n let num_add_rows = self.num_rows % ROWS_PER_PAGE;\n\n if num_add_rows > 0 {\n\n let page_num = num_full_pages;\n\n if !self.pager.pages[page_num as usize].is_empty() {\n\n self.pager.flush(page_num);\n", "file_path": "src/table/mod.rs", "rank": 13, "score": 54538.65912974107 }, { "content": "fn clear_db_file(filename: &str) {\n\n std::process::Command::new(\"rm\")\n\n .arg(\"-rf\")\n\n .arg(filename)\n\n .output()\n\n .expect(\"Failed to execute.\");\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 14, "score": 48552.12162208645 }, { "content": "#[test]\n\nfn insert_negative_id() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n let expected_op: Vec<&str> = vec![\"db > ID must be positive.\", \"db > Executed.\", \"db > \"];\n\n\n\n let assert = cmd\n\n .arg(\"test.db\")\n\n .write_stdin(\"insert -1 test [email protected]\\nselect\\n.exit\\n\")\n\n .assert();\n\n\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op, expected_op);\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 15, "score": 42325.195334356154 }, { "content": "#[test]\n\nfn insert_more_than_max_rows() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n let mut cmd_str = String::new();\n\n\n\n for i in 1..1402 {\n\n cmd_str.push_str(&format!(\"insert {} user{} person{}@example.com\\n\", i, i, i));\n\n }\n\n\n\n cmd_str.push_str(\".exit\\n\");\n\n\n\n let assert = cmd.arg(\"test.db\").write_stdin(cmd_str).assert();\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op[op.len() - 2], \"db > Error: Table full.\");\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 16, "score": 42227.70167714521 }, { "content": "#[test]\n\nfn insert_single_row() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n cmd.arg(\"test.db\")\n\n .write_stdin(\"insert 1 alice [email protected]\\n.exit\\n\")\n\n .assert()\n\n .success()\n\n .stdout(predicate::eq(\"db > Executed.\\ndb > \"));\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 17, "score": 42227.70167714521 }, { "content": " /// # Arguments\n\n ///\n\n /// * `filename` - A string slice holding the file name\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use crate table::pager::Pager;\n\n /// let pager = Pager::open(\"test.db\");\n\n /// ```\n\n ///\n\n /// # Panics\n\n ///\n\n /// Function might panic if there is some problem in creating or opening a file\n\n pub fn open(filename: &str) -> Self {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .read(true)\n\n .create(true)\n\n .open(filename)\n", "file_path": "src/table/pager.rs", "rank": 18, "score": 36088.4596729814 }, { "content": " .unwrap();\n\n let file_length = file.seek(SeekFrom::End(0)).unwrap();\n\n\n\n Pager {\n\n file,\n\n file_length,\n\n pages: vec![vec![]; TABLE_MAX_PAGES as usize],\n\n }\n\n }\n\n\n\n /// Gets the page corresponding to the `page_num`\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `page_num` - The index of the page to be loaded\n\n pub fn get_page(&mut self, page_num: u32) {\n\n if page_num > TABLE_MAX_PAGES {\n\n println!(\n\n \"Tried to fetch page number out of bounds. {} > {}\",\n\n page_num, TABLE_MAX_PAGES\n", "file_path": "src/table/pager.rs", "rank": 19, "score": 36087.167709775706 }, { "content": "//! # Pager\n\n//! \n\n//! Interface to load, hold and store pages into a file\n\n\n\nuse crate::constants::{PAGE_SIZE, TABLE_MAX_PAGES};\n\nuse std::fs::{File, OpenOptions};\n\nuse std::io::{Read, Seek, SeekFrom, Write};\n\n\n\n/// A struct to hold all the pages and file metadata\n\npub struct Pager {\n\n pub file: File,\n\n pub file_length: u64,\n\n pub pages: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl Pager {\n\n /// Opens a file to load the pages from,\n\n /// if the file is not present, a new file is created.\n\n /// Finally returns a `Pager` struct with relevant data\n\n ///\n", "file_path": "src/table/pager.rs", "rank": 20, "score": 36087.1029563399 }, { "content": " /// \n\n /// # Arguments\n\n /// \n\n /// * `page_num` - The index of the page to be written to the disk\n\n pub fn flush(&mut self, page_num: u32) {\n\n if self.pages[page_num as usize].is_empty() {\n\n println!(\"Tried to flush null page\");\n\n std::process::exit(1);\n\n }\n\n\n\n if self\n\n .file\n\n .seek(SeekFrom::Start((page_num * PAGE_SIZE) as u64))\n\n .is_err()\n\n {\n\n println!(\"Error seeking.\");\n\n std::process::exit(1);\n\n }\n\n\n\n let drained_vec: Vec<u8> = self.pages[page_num as usize].drain(..).collect();\n\n self.pages[page_num as usize].shrink_to_fit();\n\n\n\n if self.file.write_all(drained_vec.as_ref()).is_err() {\n\n println!(\"Error writing.\");\n\n std::process::exit(1);\n\n }\n\n }\n\n}\n", "file_path": "src/table/pager.rs", "rank": 21, "score": 36085.06908209655 }, { "content": " std::process::exit(1);\n\n }\n\n let buf_size: usize = if ((page_num * PAGE_SIZE) as u64) <= self.file_length {\n\n (self.file_length - (page_num * PAGE_SIZE) as u64) as usize\n\n } else {\n\n PAGE_SIZE as usize\n\n };\n\n\n\n let mut page: Vec<u8> = vec![0; buf_size];\n\n\n\n if self.file.read_exact(page.as_mut_slice()).is_err() {\n\n println!(\"Error reading file. {}\", page.len());\n\n std::process::exit(1);\n\n }\n\n self.pages[page_num as usize].extend_from_slice(page.as_slice());\n\n }\n\n }\n\n }\n\n\n\n /// Writes the page with given page number to the file on disk\n", "file_path": "src/table/pager.rs", "rank": 22, "score": 36084.586478884594 }, { "content": " );\n\n std::process::exit(1);\n\n }\n\n\n\n if self.pages[page_num as usize].is_empty() {\n\n // Cache miss. Load from file\n\n let mut num_pages = self.file_length / PAGE_SIZE as u64;\n\n\n\n // We might save a partial page at the end of the file\n\n if self.file_length % PAGE_SIZE as u64 > 0 {\n\n num_pages += 1;\n\n }\n\n\n\n if page_num as u64 <= num_pages {\n\n if self\n\n .file\n\n .seek(SeekFrom::Start((page_num * PAGE_SIZE) as u64))\n\n .is_err()\n\n {\n\n println!(\"Error seeking file.\");\n", "file_path": "src/table/pager.rs", "rank": 23, "score": 36083.71928780815 }, { "content": "#[test]\n\nfn insert_large_fields() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n let long_username = \"a\".repeat(33);\n\n let long_email = \"a\".repeat(256);\n\n let expected_op: Vec<&str> = vec![\"db > String is too long.\", \"db > Executed.\", \"db > \"];\n\n\n\n let assert = cmd\n\n .arg(\"test.db\")\n\n .write_stdin(format!(\n\n \"insert 1 {} {}\\nselect\\n.exit\\n\",\n\n long_username, long_email\n\n ))\n\n .assert();\n\n\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op, expected_op);\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 24, "score": 28460.323745345995 }, { "content": "#[test]\n\nfn insert_max_length_fields() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n let long_username = \"a\".repeat(32);\n\n let long_email = \"a\".repeat(255);\n\n let op_str = &format!(\"db > (1, {}, {})\", long_username, long_email);\n\n let expected_op: Vec<&str> = vec![\"db > Executed.\", op_str, \"Executed.\", \"db > \"];\n\n\n\n let assert = cmd\n\n .arg(\"test.db\")\n\n .write_stdin(format!(\n\n \"insert 1 {} {}\\nselect\\n.exit\\n\",\n\n long_username, long_email\n\n ))\n\n .assert();\n\n\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op, expected_op);\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/cli_test.rs", "rank": 25, "score": 27537.58431873915 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() < 2 {\n\n println!(\"Must supply database filename.\");\n\n println!(\"cargo run <filename>\");\n\n std::process::exit(1);\n\n }\n\n\n\n let filename = &args[1];\n\n\n\n let mut input_buffer = InputBuffer::new();\n\n let mut table = Table::db_open(filename);\n\n\n\n loop {\n\n print_prompt();\n\n input_buffer.read_input();\n\n\n\n if input_buffer.buffer.is_empty() {\n\n continue;\n", "file_path": "src/main.rs", "rank": 26, "score": 23041.91541338208 }, { "content": "/// Prints basic prompt onto stdout\n\nfn print_prompt() {\n\n print!(\"db > \");\n\n io::stdout().flush().expect(\"Could not flush stdout\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 22051.816532062465 }, { "content": "//! # Statement\n\n//!\n\n//! An abstract interface for handling SQL statements\n\n\n\nuse crate::table::Row;\n\n\n\n/// Enum to indicate the type of SQL statement\n\npub enum StatementType {\n\n Insert,\n\n Select,\n\n Empty,\n\n}\n\n\n\n/// Struct that holds the type of SQL statement and relevant data\n\npub struct Statement {\n\n pub stmt_type: StatementType,\n\n pub row_to_insert: Row,\n\n}\n\n\n\nimpl Statement {\n", "file_path": "src/vm/statement.rs", "rank": 28, "score": 20162.488218565286 }, { "content": " /// Returns a `Statement` struct with an empty statement\n\n /// and empty row\n\n pub fn new() -> Self {\n\n Statement {\n\n stmt_type: StatementType::Empty,\n\n row_to_insert: Row::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/vm/statement.rs", "rank": 29, "score": 20157.339185139615 }, { "content": "#[test]\n\nfn check_persistence() -> Result<(), Box<dyn std::error::Error>> {\n\n clear_db_file(\"test.db\");\n\n let mut cmd = Command::cargo_bin(\"rsqlite\").unwrap();\n\n let expected_op1: Vec<&str> = vec![\"db > Executed.\", \"db > \"];\n\n\n\n let assert = cmd\n\n .arg(\"test.db\")\n\n .write_stdin(\"insert 1 user1 [email protected]\\n.exit\\n\")\n\n .assert();\n\n\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op1: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op1, expected_op1);\n\n\n\n let expected_op2: Vec<&str> = vec![\"db > (1, user1, [email protected])\", \"Executed.\", \"db > \"];\n\n\n\n let assert = cmd.arg(\"test.db\").write_stdin(\"select\\n.exit\\n\").assert();\n\n\n\n println!(\"{:#?}\", assert);\n\n\n\n let output_str = String::from_utf8(assert.success().get_output().stdout.clone()).unwrap();\n\n let op2: Vec<&str> = output_str.split('\\n').collect();\n\n\n\n assert_eq!(op2, expected_op2);\n\n clear_db_file(\"test.db\");\n\n Ok(())\n\n}\n", "file_path": "tests/cli_test.rs", "rank": 32, "score": 15649.78010282469 }, { "content": "//! This file specifies the various constants used across the files.\n\n\n\n/// Set page size to be used internally.\n\n/// 4KB is the most common page size\n\npub const PAGE_SIZE: u32 = 4096;\n\n\n\n/// Maximum amount of pages a table can hold or store at a time\n\npub const TABLE_MAX_PAGES: u32 = 100;\n\n\n\n/// Number of rows that can fit into a page\n\npub const ROWS_PER_PAGE: u32 = PAGE_SIZE / ROW_SIZE;\n\n\n\n/// Maximum rows a table can hold or store\n\npub const TABLE_MAX_ROWS: u32 = ROWS_PER_PAGE * TABLE_MAX_PAGES;\n\n\n\n/// Size of the id field in bytes\n\npub const ID_SIZE: usize = 4;\n\n\n\n/// Size of the username field in bytes\n\npub const USERNAME_SIZE: usize = 32;\n", "file_path": "src/constants.rs", "rank": 33, "score": 12.253771973235185 }, { "content": " continue;\n\n }\n\n PrepareResult::SyntaxError => {\n\n println!(\"Syntax error. Could not parse statement.\");\n\n continue;\n\n }\n\n PrepareResult::StringTooLong => {\n\n println!(\"String is too long.\");\n\n continue;\n\n }\n\n PrepareResult::NegativeID => {\n\n println!(\"ID must be positive.\");\n\n continue;\n\n }\n\n }\n\n\n\n match execute_statement(&statement, &mut table) {\n\n ExecuteResult::Success => {\n\n println!(\"Executed.\");\n\n }\n\n ExecuteResult::TableFull => {\n\n println!(\"Error: Table full.\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 34, "score": 11.300434035409445 }, { "content": "use std::env;\n\nuse std::io::{self, Write};\n\n\n\nmod buffer;\n\nmod constants;\n\nmod table;\n\nmod vm;\n\n\n\nuse buffer::InputBuffer;\n\nuse table::Table;\n\nuse vm::statement::Statement;\n\nuse vm::{\n\n do_meta_command, execute_statement, prepare_statement, ExecuteResult, MetaCommandResult,\n\n PrepareResult,\n\n};\n\n\n\n/// Prints basic prompt onto stdout\n", "file_path": "src/main.rs", "rank": 35, "score": 11.257789550779721 }, { "content": " }\n\n\n\n if input_buffer.buffer.starts_with('.') {\n\n match do_meta_command(&input_buffer, &mut table) {\n\n MetaCommandResult::UnrecognizedCommand => {\n\n println!(\"Unrecognized command '{}'.\", input_buffer.buffer);\n\n continue;\n\n }\n\n }\n\n }\n\n\n\n let mut statement: Statement = Statement::new();\n\n\n\n match prepare_statement(&input_buffer, &mut statement) {\n\n PrepareResult::Success => (),\n\n PrepareResult::UnrecognizedStatement => {\n\n println!(\n\n \"Unrecognized keyword at the start of '{}'.\",\n\n input_buffer.buffer\n\n );\n", "file_path": "src/main.rs", "rank": 36, "score": 9.74040440886228 }, { "content": "\n\n/// Size of the email field in bytes\n\npub const EMAIL_SIZE: usize = 255;\n\n\n\n// Since all the fields are converted to bytes and stored in a single byte array.\n\n// The three fields have to be deserialized from different offsets in the byte array.\n\n// The following constants specify those offsets.\n\n\n\n/// The offset in the byte array where bytes of the id field start\n\npub const ID_OFFSET: usize = 0;\n\n\n\n/// The offset in the byte array where bytes of the username field start\n\npub const USERNAME_OFFSET: usize = ID_OFFSET + ID_SIZE;\n\n\n\n/// The offset in the byte array where bytes of the email field start\n\npub const EMAIL_OFFSET: usize = USERNAME_OFFSET + USERNAME_SIZE;\n\n\n\n/// Total amount in bytes that a row will occupy in memory\n\npub const ROW_SIZE: u32 = (ID_SIZE + USERNAME_SIZE + EMAIL_SIZE) as u32;\n", "file_path": "src/constants.rs", "rank": 37, "score": 9.400496811480895 }, { "content": "//! # Buffer\n\n//! \n\n//! A small interface to read user input from stdin.\n\n\n\nuse std::io::{self, BufRead};\n\n\n\n/// Structure to hold the user input\n\npub struct InputBuffer {\n\n pub buffer: String,\n\n}\n\n\n\nimpl InputBuffer {\n\n /// Returns a new InputBuffer which contains an empty buffer\n\n /// \n\n /// # Example\n\n /// \n\n /// ```\n\n /// use crate::buffer::InputBuffer;\n\n /// let input_buffer = InputBuffer::new();\n\n /// ```\n", "file_path": "src/buffer.rs", "rank": 38, "score": 4.8094586216245165 }, { "content": "# RSQLite\n\n\n\n![](https://github.com/pmk21/rsqlite/workflows/rsqlite/badge.svg)\n\n\n\nA simple SQLite clone in Rust. This is basically a translation of the C code present on [this](https://cstack.github.io/db_tutorial/) brilliant tutorial into Rust(not fully idiomatic). This code contains implementation only upto Part 5 in the tutorial.\n\n\n\nThis a very simple database and is a small project I took up to gain experience with Rust.\n\n\n\n## Requirements\n\n\n\nHaving [`rustup`](https://www.rust-lang.org/tools/install) and [`cargo`](https://doc.rust-lang.org/cargo/getting-started/installation.html) should be enough to get this up and running.\n\n\n\n## Usage\n\n\n\n* In the base directory of the repository type the command `$cargo run <filename>`, the database will be stored in the given file and will also load values(if present) from the given file.\n\n\n\n* Once the program is up and running a prompt `db >` will appear, there you can execute database commands.\n\n\n\n* Supported commands are(which are only a few!) -\n\n\n\n * `.exit` - To exit the program.\n\n \n\n * `insert <id> <username> <email>` - Inserts the given values into the database. The values are persisted on the disk.\n\n \n\n * `select` - Displays all the rows present in the database.\n\n\n\n## Documentation\n\n\n\nDocumentation of the various modules and functions can be seen by typing `$cargo doc --open` in the base directory of the repository.\n\n\n\n## Tests\n\n\n\nA few simple tests can be run with `$cargo test -- --test-threads=1`.\n\n\n\n## License\n\n\n\nThis project is licensed under the MIT License.\n\n\n\nMIT © Prithvi MK\n", "file_path": "README.md", "rank": 39, "score": 4.557925165839162 }, { "content": " pub fn new() -> Self {\n\n InputBuffer {\n\n buffer: String::new(),\n\n }\n\n }\n\n\n\n /// Reads user input from stdin\n\n /// \n\n /// # Example\n\n /// \n\n /// ```\n\n /// use crate::buffer::InputBuffer;\n\n /// let input_buffer = InputBuffer::new();\n\n /// // Input from stdin present in input_buffer.buffer\n\n /// input_buffer.read_input();\n\n /// ```\n\n pub fn read_input(&mut self) {\n\n self.buffer.clear();\n\n let stdin = io::stdin();\n\n stdin\n\n .lock()\n\n .read_line(&mut self.buffer)\n\n .expect(\"Could not read from stdin\");\n\n // TODO: Find better way to remove newline character\n\n self.buffer.pop();\n\n }\n\n}\n", "file_path": "src/buffer.rs", "rank": 40, "score": 3.985005530892894 }, { "content": "use assert_cmd::Command;\n\nuse predicates::prelude::*;\n\n\n", "file_path": "tests/cli_test.rs", "rank": 41, "score": 2.702575110640584 } ]
Rust
src/main.rs
joxcat/nextcloud-api
5cd949c5dbbe5dc32089ba25ea53221a7b0de52b
mod app; mod library; #[macro_use] extern crate log; #[macro_use] extern crate simple_error; use crate::library::serde_is_valid_and_contain; use badlog::init_from_env; use library::{ create_user, is_env, private_decrypt, public_encrypt, set_var, var, ConvertTo, GenericValue, QueryCreateUser, QueryGetToken, Response, ResponseTypes, Tomb, UserCookies, }; const ENDPOINT: &[u8] = include_bytes!("../keys/endpoints.pub"); const PRIVATE: &[u8] = include_bytes!("../keys/nextcloud.prv"); fn main() -> Result<(), Box<dyn std::error::Error>> { openssl_probe::init_ssl_cert_env_vars(); let endpoint_pub = openssl::rsa::Rsa::public_key_from_pem(ENDPOINT)?; let self_private = openssl::rsa::Rsa::private_key_from_pem(PRIVATE)?; let mut app_base = app::build_cli(); let app = app_base.clone().get_matches(); let (sub, command) = match app.subcommand_matches("create") { Some(_) => (app.subcommand_matches("create"), "create_user"), None => match app.subcommand_matches("cookies") { Some(_) => (app.subcommand_matches("cookies"), "get_token"), None => { app_base.print_help()?; println!("\n"); std::process::exit(-1); } }, }; let sub = sub.unwrap(); match sub.value_of("log-level") { Some(x) => set_var("LOG_LEVEL", x.to_uppercase()), None => { is_env("LOG_LEVEL", &|_| (), &|env| set_var(env, "INFO")); } } init_from_env("LOG_LEVEL"); match sub.value_of("timeout") { Some(x) => set_var("HEADLESS_TIMEOUT", x), None => { is_env("HEADLESS_TIMEOUT", &|_| {}, &|env| set_var(env, "3000")); } } info!("Headless timeout: {}", var("HEADLESS_TIMEOUT")?); is_env("BASE_NC_URL", &|_| {}, &|env| { set_var(env, "https://files.hume.cloud") }); let username_uncrypt = String::from_utf8(base64::decode( &private_decrypt( &self_private, serde_json::from_str::<Tomb>( String::from_utf8(base64::decode(&sub.value_of("username").unwrap())?)?.as_str(), )?, )? .value, )?)?; let password_uncrypt = String::from_utf8(base64::decode( &private_decrypt( &self_private, serde_json::from_str::<Tomb>( String::from_utf8(base64::decode(&sub.value_of("password").unwrap())?)?.as_str(), )?, )? .value, )?)?; let text = &match command { "create_user" => format!( r#"{{"command":"{}","username":"{}","password":"{}"}}"#, command, username_uncrypt, password_uncrypt ), "get_token" => format!( r#"{{"command":"{}","username":"{}","password":"{}","response_type":"{}"}}"#, command, username_uncrypt, password_uncrypt, sub.value_of("response-type").unwrap() ), _ => std::process::exit(-1), }[..]; let result = if serde_is_valid_and_contain::<QueryCreateUser>(text, "command", "create_user") { let msg = serde_json::from_str::<QueryCreateUser>(text).unwrap(); is_env( "NC_ADMIN_USERNAME", &|env| info!("{} is set", env), &|env| { info!("{} is not set", env); let response = public_encrypt( &endpoint_pub, serde_json::to_string(&Response::<GenericValue<ResponseTypes>> { status_code: 503, error_msg: Some( "The server is unavailable to handle this request right now", ), error_details: Some("Error! Missing some env variables"), value: None, }) .unwrap() .as_bytes(), ) .unwrap(); let response = base64::encode(&serde_json::to_string(&response).unwrap()); println!("{}", response); std::process::exit(-1); }, ); is_env( "NC_ADMIN_PASSWORD", &|env| info!("{} is set", env), &|env| { info!("{} is not set", env); let response = public_encrypt( &endpoint_pub, serde_json::to_string(&Response::<GenericValue<ResponseTypes>> { status_code: 503, error_msg: Some( "The server is unavailable to handle this request right now", ), error_details: Some("Error! Missing some env variables"), value: None, }) .unwrap() .as_bytes(), ) .unwrap(); let response = base64::encode(&serde_json::to_string(&response).unwrap()); println!("{}", response); std::process::exit(-1); }, ); match create_user(msg) { Ok(_) => { let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Boolean(true)); Response { status_code: 200, error_msg: None, error_details: None, value: val, } } Err(_) => { let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Boolean(false)); Response { status_code: 500, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot create the user"), value: val, } } } } else if serde_is_valid_and_contain::<QueryGetToken>(text, "command", "get_token") { let msg = serde_json::from_str::<QueryGetToken>(text).unwrap(); #[allow(unused_assignments)] let mut resp = (String::new(), String::new(), String::new()); let val_converter = msg.value_type; match library::get_tokens(msg) { Ok(_resp) => { resp = _resp; let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Cookies(UserCookies { nc_session_id: val_converter.convert_to(resp.0.as_str()), nc_token: val_converter.convert_to(resp.1.as_str()), nc_username: val_converter.convert_to(resp.2.as_str()), })); Response { status_code: 200, error_msg: None, error_details: None, value: val, } } Err(e) => { warn!("{}", e); Response { status_code: 500, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot get the tokens"), value: None, } } } } else { Response { status_code: 500, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot get the tokens"), value: None, } }; let response = public_encrypt( &endpoint_pub, serde_json::to_string(&result).unwrap().as_bytes(), )?; let response = base64::encode(&serde_json::to_string(&response)?); println!("{}", response); std::process::exit(0) }
mod app; mod library; #[macro_use] extern crate log; #[macro_use] extern crate simple_error; use crate::library::serde_is_valid_and_contain; use badlog::init_from_env; use library::{ create_user, is_env, private_decrypt, public_encrypt, set_var, var, ConvertTo, GenericValue, QueryCreateUser, QueryGetToken, Response, ResponseTypes, Tomb, UserCookies, }; const ENDPOINT: &[u8] = include_bytes!("../keys/endpoints.pub"); const PRIVATE: &[u8] = include_bytes!("../keys/nextcloud.prv"); fn main() -> Result<(), Box<dyn std::error::Error>> { openssl_probe::init_ssl_cert_env_vars(); let endpoint_pub = openssl::rsa::Rsa::public_key_from_pem(ENDPOINT)?; let self_private = openssl::rsa::Rsa::private_key_from_pem(PRIVATE)?; let mut app_base = app::build_cli(); let app = app_base.clone().get_matches(); let (sub, command) = match app.subcommand_matches("create") { Some(_) => (app.subcommand_matches("create"), "create_user"), None => match app.subcommand_matches("cookies") { Some(_) => (app.subcommand_matches("cookies"), "get_token"), None => { app_base.print_help()?; println!("\n"); std::process::exit(-1); } }, }; let sub = sub.unwrap(); match sub.value_of("log-level") { Some(x) => set_var("LOG_LEVEL", x.to_uppercase()), None => { is_env("LOG_LEVEL", &|_| (), &|env| set_var(env, "INFO")); } } init_from_env("LOG_LEVEL"); match sub.value_of("timeout") { Some(x) => set_var("HEADLESS_TIMEOUT", x), None => { is_env("HEADLESS_TIMEOUT", &|_| {}, &|env| set_var(env, "3000")); } } info!("Headless timeout: {}", var("HEADLESS_TIMEOUT")?); is_env("BASE_NC_URL", &|_| {}, &|env| { set_var(env, "https://files.hume.cloud") }); let username_uncrypt = String::from_utf8(base64::decode( &private_decrypt( &self_private, serde_json::from_str::<Tomb>( String::from_utf8(base64::decode(&sub.value_of("username").unwrap())?)?.as_str(), )?, )? .value, )?)?; let password_uncrypt = String::from_utf8(base64::decode( &private_decrypt( &self_private, serde_json::from_str::<Tomb>( String::from_utf8(base64::decode(&sub.value_of("password").unwrap())?)?.as_str(), )?, )? .value, )?)?; let text = &match command { "create_user" => format!( r#"{{"command":"{}","username":"{}","password":"{}"}}"#, command, username_uncrypt, password_uncrypt ), "get_token" => format!( r#"{{"command":"{}","username":"{}","password":"{}","response_type":"{}"}}"#, command, username_uncrypt, password_uncrypt, sub.value_of("response-type").unwrap() ), _ => std::process::exit(-1), }[..]; let result = if serde_is_valid_and_contain::<QueryCreateUser>(text, "command", "create_user") { let msg = serde_json::from_str::<QueryCreateUser>(text).unwrap(); is_env( "NC_ADMIN_USERNAME", &|env| info!("{} is set", env), &|env| { info!("{} is not set", env); let response = public_encrypt( &endpoint_pub, serde_json::to_string(&Response::<GenericValue<ResponseTypes>> { status_code: 503, error_msg: Some( "The server is unavailable to handle this request right now", ), error_details: Some("Error! Missing some env variables"), value: None, }) .unwrap() .as_bytes(), ) .unwrap(); let response = base64::encode(&serde_json::to_string(&response).unwrap()); println!("{}", response); std::process::exit(-1); }, ); is_env( "NC_ADMIN_PASSWORD", &|env| info!("{} is set", env), &|env| { info!("{} is not set", env); let response = public_encrypt( &endpoint_pub, serde_json::to_string(&Response::<GenericValue<ResponseTypes>> { status_code: 503, error_msg: Some( "The server is unavailable to handle this request right now", ), error_details: Some("Error! Missing some env variables"), value: None, }) .unwrap() .as_bytes(), ) .unwrap(); let response = base64::encode(&serde_json::to_string(&response).unwrap()); println!("{}", response); std::process::exit(-1); }, ); match create_user(msg) { Ok(_) => { let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Boolean(true)); Response { status_code: 200, error_msg: None, error_details: None, value: val, } } Err(_) => { let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Boolean(false)); Response { status_code: 500, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot create the user"), value: val, } } } } else if serde_is_valid_and_contain::<QueryGetToken>(text, "command", "get_token") { let msg = serde_json::from_str::<QueryGetToken>(text).unwrap(); #[allow(unused_assignments)] let mut resp = (String::new(), String::new(), String::new()); let val_converter = msg.value_type; match library::get_tokens(msg) { Ok(_resp) => { resp = _resp; let val: GenericValue<ResponseTypes> = Some(ResponseTypes::Cookies(UserCookies { nc_session_id: val_converter.convert_to(resp.0.as_str()), nc_token: val_converter.convert_to(resp.1.as_str()), nc_username: val_converter.convert_to(resp.2.as_str()), })); Response { status_code: 200, error_msg: None, error_details: None, value: val, } } Err(e) => { warn!("{}", e); Response { status_code: 50
0, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot get the tokens"), value: None, } } } } else { Response { status_code: 500, error_msg: Some("Internal Server Error"), error_details: Some("Error! Cannot get the tokens"), value: None, } }; let response = public_encrypt( &endpoint_pub, serde_json::to_string(&result).unwrap().as_bytes(), )?; let response = base64::encode(&serde_json::to_string(&response)?); println!("{}", response); std::process::exit(0) }
function_block-function_prefixed
[ { "content": "pub fn create_user(query: QueryCreateUser) -> Result<(), Box<dyn std::error::Error>> {\n\n // Browser setup\n\n let self_private = openssl::rsa::Rsa::private_key_from_pem(PRIVATE)?;\n\n let username = var(\"NC_ADMIN_USERNAME\").unwrap();\n\n let username = String::from_utf8(base64::decode(\n\n &private_decrypt(\n\n &self_private,\n\n serde_json::from_str::<Tomb>(String::from_utf8(base64::decode(&username)?)?.as_str())?,\n\n )?\n\n .value,\n\n )?)?;\n\n let password = var(\"NC_ADMIN_PASSWORD\").unwrap();\n\n let password = String::from_utf8(base64::decode(\n\n &private_decrypt(\n\n &self_private,\n\n serde_json::from_str::<Tomb>(String::from_utf8(base64::decode(&password)?)?.as_str())?,\n\n )?\n\n .value,\n\n )?)?;\n\n let mut options = LaunchOptionsBuilder::default();\n", "file_path": "src/library/methods.rs", "rank": 1, "score": 122855.77860928538 }, { "content": "fn sym_enc(msg: String) -> Result<(String, String, String), Error> {\n\n let rk = thread_rng().gen::<[u8; 16]>();\n\n let random = thread_rng().gen::<[u8; 16]>();\n\n\n\n let ciphertext = openssl::symm::encrypt(\n\n openssl::symm::Cipher::aes_128_ctr(),\n\n &rk,\n\n Some(&random),\n\n msg.as_bytes(),\n\n )\n\n .map_err(|e| e.to_string())?;\n\n\n\n Ok((\n\n encode(&rk.to_vec()),\n\n encode(&random.to_vec()),\n\n encode(&ciphertext),\n\n ))\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 2, "score": 104919.14739371212 }, { "content": "fn sym_dec(key: String, iv: String, encrypted: String) -> Result<Vec<u8>, Error> {\n\n Ok(openssl::symm::decrypt(\n\n openssl::symm::Cipher::aes_128_ctr(),\n\n &decode(&key)?,\n\n Some(&decode(&iv)?),\n\n &decode(&encrypted)?,\n\n )\n\n .map_err(|e| e.to_string())?)\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 3, "score": 93395.22791285336 }, { "content": "#[allow(dead_code)]\n\n#[cfg(not(debug_assertions))]\n\nfn take_screenshot(_tab: &Tab, _path: &str) -> Result<(), Box<dyn std::error::Error>> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 4, "score": 81135.38322454228 }, { "content": "#[allow(dead_code)]\n\n#[cfg(debug_assertions)]\n\nfn take_screenshot(tab: &Tab, path: &str) -> Result<(), Box<dyn std::error::Error>> {\n\n let path = format!(\"{}.png\", path);\n\n let mut screen = tab.capture_screenshot(ScreenshotFormat::PNG, None, true)?;\n\n std::io::BufWriter::new(std::fs::File::create(&std::path::Path::new(path.as_str()))?)\n\n .write_all(screen.as_mut_slice())?;\n\n Ok(())\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 5, "score": 81135.38322454228 }, { "content": "#[allow(dead_code)]\n\npub fn is_env(env: &str, exist: &dyn Fn(&str), notexist: &dyn Fn(&str)) {\n\n if var(env).is_err() || var(env).unwrap() == \"\" {\n\n if var_os(env).is_none() || var_os(env).unwrap() == \"\" {\n\n notexist(env);\n\n } else {\n\n exist(env);\n\n }\n\n } else {\n\n exist(env);\n\n }\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 6, "score": 80283.30873516532 }, { "content": "pub fn private_decrypt(\n\n key: &openssl::rsa::Rsa<openssl::pkey::Private>,\n\n bytes: Tomb,\n\n) -> Result<Tomb, Error> {\n\n let mut sym = vec![0; key.size() as usize];\n\n let mut iv = vec![0; key.size() as usize];\n\n let len_sym = key.private_decrypt(\n\n &decode(&bytes.aes_key)?,\n\n &mut sym,\n\n openssl::rsa::Padding::PKCS1,\n\n )?;\n\n let len_iv = key.private_decrypt(\n\n &decode(&bytes.aes_iv)?,\n\n &mut iv,\n\n openssl::rsa::Padding::PKCS1,\n\n )?;\n\n let sym = encode(&sym[..len_sym].to_vec());\n\n let iv = encode(&iv[..len_iv].to_vec());\n\n let text = sym_dec(sym.clone(), iv.clone(), bytes.value)?;\n\n Ok(Tomb {\n\n aes_key: sym,\n\n aes_iv: iv,\n\n value: encode(&text),\n\n })\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 7, "score": 75448.36169334059 }, { "content": "pub fn build_cli() -> App<'static, 'static> {\n\n let username: Arg<'static, 'static> = Arg::with_name(\"username\")\n\n .long(\"username\")\n\n .short(\"u\")\n\n .value_name(\"STRING\")\n\n .required(true)\n\n .help(\"GSuite authorized user username\");\n\n\n\n let password: Arg<'static, 'static> = Arg::with_name(\"password\")\n\n .long(\"password\")\n\n .short(\"p\")\n\n .value_name(\"STRING\")\n\n .required(true)\n\n .help(\"GSuite authorized user password\");\n\n\n\n let response_type: Arg<'static, 'static> = Arg::with_name(\"response-type\")\n\n .long(\"encoding\")\n\n .short(\"e\")\n\n .value_name(\"ENCODING\")\n\n .default_value(\"plaintext\")\n", "file_path": "src/app.rs", "rank": 8, "score": 62172.13566534165 }, { "content": "pub fn public_encrypt(\n\n key: &openssl::rsa::Rsa<openssl::pkey::Public>,\n\n bytes: &[u8],\n\n) -> Result<Tomb, Error> {\n\n let mut sym_crypt = vec![0; key.size() as usize];\n\n let mut iv_crypt = vec![0; key.size() as usize];\n\n let (sym_key, iv, text) = sym_enc(String::from_utf8(bytes.to_vec())?)?;\n\n key.public_encrypt(\n\n &decode(&sym_key)?,\n\n &mut sym_crypt,\n\n openssl::rsa::Padding::PKCS1,\n\n )?;\n\n key.public_encrypt(&decode(&iv)?, &mut iv_crypt, openssl::rsa::Padding::PKCS1)?;\n\n Ok(Tomb {\n\n aes_key: encode(&sym_crypt),\n\n aes_iv: encode(&iv_crypt),\n\n value: text,\n\n })\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 9, "score": 58080.365986361845 }, { "content": "type Error = Box<dyn std::error::Error>;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Tomb {\n\n pub aes_key: String,\n\n pub aes_iv: String,\n\n pub value: String,\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 10, "score": 55524.875675277326 }, { "content": "pub trait ConvertTo {\n\n fn convert_to(&self, _content: &str) -> String {\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl ConvertTo for Option<ResponseValueType> {\n\n fn convert_to(&self, content: &str) -> String {\n\n match self {\n\n Some(ResponseValueType::PlainText) => content.to_owned(),\n\n Some(ResponseValueType::Base64) => encode(content),\n\n Some(ResponseValueType::UrlEncoded) => super::urlencode(content),\n\n None => content.to_owned(),\n\n }\n\n }\n\n}\n", "file_path": "src/library/requests.rs", "rank": 11, "score": 52669.462304342946 }, { "content": "pub fn serde_is_valid_and_contain<'a, T>(json: &'a str, key: &str, val: &str) -> bool\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n let key = key.to_owned();\n\n let val = val.to_owned();\n\n let result = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(json).unwrap();\n\n serde_json::from_str::<'a, T>(json).is_ok()\n\n && result.contains_key(key.as_str())\n\n && result\n\n .get(key.as_str())\n\n .unwrap()\n\n .eq(&serde_json::Value::String(val))\n\n}\n\n\n", "file_path": "src/library/common_ah_tools.rs", "rank": 12, "score": 49874.54137941276 }, { "content": "pub fn get_tokens(\n\n query: QueryGetToken,\n\n) -> Result<(String, String, String), Box<dyn std::error::Error>> {\n\n // Browser setup\n\n let username = query.username.to_owned();\n\n let password = query.password.to_owned();\n\n let mut options = LaunchOptionsBuilder::default();\n\n let options = options\n\n .path(Some(default_executable().unwrap()))\n\n .sandbox(true);\n\n #[cfg(target_os = \"windows\")]\n\n let options = options.headless(false);\n\n #[cfg(not(target_os = \"windows\"))]\n\n let options = options.headless(true);\n\n let options = options.build().unwrap();\n\n\n\n let browser = Browser::new(options)?;\n\n let tab: Arc<Tab> = browser.wait_for_initial_tab()?;\n\n let timeout =\n\n std::time::Duration::from_millis(var(\"HEADLESS_TIMEOUT\").unwrap().parse::<u64>()?);\n", "file_path": "src/library/methods.rs", "rank": 13, "score": 42934.81949878733 }, { "content": "mod cookies;\n\npub mod traits {\n\n pub use super::cookies::CookiesManagement;\n\n}\n\nmod common_ah_tools;\n\npub use common_ah_tools::{\n\n is_env, private_decrypt, public_encrypt, serde_is_valid_and_contain, set_var, sleep, urlencode,\n\n var, Tomb,\n\n};\n\nmod requests;\n\npub use requests::{ConvertTo, QueryCreateUser, QueryGetToken, ResponseValueType};\n\nmod responses;\n\npub use responses::{GenericValue, Response, ResponseTypes, UserCookies};\n\nmod methods;\n\npub use methods::{create_user, get_tokens};\n", "file_path": "src/library/mod.rs", "rank": 14, "score": 38683.672696362904 }, { "content": "use base64::encode;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize)]\n\n// {\"command\":\"create_user\",\"username\":\"johan\",\"password\":\"bonjourjohan\"}\n\npub struct QueryCreateUser<'a> {\n\n pub command: &'a str,\n\n pub username: &'a str,\n\n pub password: &'a str,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize)]\n\n// {\"command\":\"get_token\",\"username\":\"test\",\"password\":\"test\",\"value_type\":\"plaintext\"}\n\npub struct QueryGetToken<'a> {\n\n pub command: &'a str,\n\n pub username: &'a str,\n\n pub password: &'a str,\n\n pub value_type: Option<ResponseValueType>,\n\n}\n\n\n\n#[serde(rename_all = \"lowercase\")]\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize)]\n\npub enum ResponseValueType {\n\n PlainText,\n\n Base64,\n\n UrlEncoded,\n\n}\n\n\n", "file_path": "src/library/requests.rs", "rank": 15, "score": 38532.48305842578 }, { "content": "#[allow(dead_code)]\n\npub fn sleep(time: u64) {\n\n std::thread::sleep(Duration::from_millis(time));\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 16, "score": 37616.98621273724 }, { "content": "#[allow(dead_code)]\n\npub fn urlencode<T: Deref<Target = str>>(url: T) -> String {\n\n let scopes = url.to_owned();\n\n scopes\n\n .replace(\":\", \"%3A\")\n\n .replace(\"/\", \"%2F\")\n\n .replace(\" \", \"%20\")\n\n .replace(\"?\", \"%3F\")\n\n .replace(\"&\", \"%26\")\n\n .replace(\"=\", \"%3D\")\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 17, "score": 30508.70165703063 }, { "content": "#[allow(dead_code)]\n\npub fn urldecode<T: Deref<Target = str>>(url: T) -> String {\n\n let scopes = url.to_owned();\n\n scopes\n\n .replace(\"%3A\", \":\")\n\n .replace(\"%2F\", \"/\")\n\n .replace(\"%20\", \" \")\n\n .replace(\"%3F\", \"?\")\n\n .replace(\"%26\", \"&\")\n\n .replace(\"%3D\", \"=\")\n\n}\n", "file_path": "src/library/common_ah_tools.rs", "rank": 18, "score": 30508.70165703063 }, { "content": "use clap::{App, Arg, SubCommand};\n\n\n", "file_path": "src/app.rs", "rank": 19, "score": 22414.57643899526 }, { "content": " .required(false)\n\n .help(\"Verbosity level\");\n\n\n\n let timeout: Arg<'static, 'static> = Arg::with_name(\"timeout\")\n\n .long(\"timeout\")\n\n .short(\"t\")\n\n .value_name(\"INTEGER (u64)\")\n\n .default_value(\"3000\")\n\n .required(false)\n\n .help(\"Timeout for actions\");\n\n\n\n App::new(\"Drive Headless Tools\")\n\n .author(\"Arthur Hugon <[email protected]>\")\n\n .about(\"App to generate GSuite SA Certificates\")\n\n .subcommand(SubCommand::with_name(\"create\").args(&[\n\n username.clone(),\n\n password.clone(),\n\n sleeptime.clone(),\n\n loglevel.clone(),\n\n timeout.clone(),\n", "file_path": "src/app.rs", "rank": 20, "score": 22414.07190580131 }, { "content": " .possible_values(&[\"plaintext\", \"base64\"])\n\n .case_insensitive(true)\n\n .required(false)\n\n .help(\"Encoding of the response\");\n\n\n\n let sleeptime: Arg<'static, 'static> = Arg::with_name(\"sleeptime\")\n\n .long(\"sleeptime\")\n\n .short(\"s\")\n\n .value_name(\"INTEGER (u64)\")\n\n .default_value(\"500\")\n\n .required(false)\n\n .help(\"Basic time to wait between actions in ms\");\n\n\n\n let loglevel: Arg<'static, 'static> = Arg::with_name(\"log-level\")\n\n .long(\"log-level\")\n\n .short(\"v\")\n\n .value_name(\"LOG_LEVEL\")\n\n .default_value(\"warn\")\n\n .possible_values(&[\"trace\", \"debug\", \"info\", \"warn\", \"error\"])\n\n .case_insensitive(true)\n", "file_path": "src/app.rs", "rank": 21, "score": 22413.118918129465 }, { "content": " ]))\n\n .subcommand(SubCommand::with_name(\"cookies\").args(&[\n\n username,\n\n password,\n\n response_type,\n\n sleeptime,\n\n loglevel,\n\n timeout,\n\n ]))\n\n}\n", "file_path": "src/app.rs", "rank": 22, "score": 22411.499832741578 }, { "content": "use super::cookies::{CookiesFns, CookiesManagement};\n\nuse super::var;\n\nuse super::{private_decrypt, Tomb};\n\nuse super::{QueryCreateUser, QueryGetToken};\n\nuse crate::PRIVATE;\n\nuse headless_chrome::{\n\n browser::default_executable, browser::tab::Tab, Browser, LaunchOptionsBuilder,\n\n};\n\nuse std::sync::Arc;\n\n\n", "file_path": "src/library/methods.rs", "rank": 33, "score": 16858.90129328593 }, { "content": " let url = format!(\"{}/login\", var(\"BASE_NC_URL\")?);\n\n\n\n // Begin\n\n debug!(\"Navigate: {}\", url.as_str());\n\n tab.navigate_to(url.as_str())?;\n\n tab.wait_until_navigated()?;\n\n tab.wait_for_element_with_custom_timeout(\"input#user\", timeout)?\n\n .click()?;\n\n tab.type_str(username.as_str())?;\n\n tab.wait_for_element(\"input#password\")?.click()?;\n\n tab.type_str(password.as_str())?;\n\n tab.wait_for_element(\"input#submit\")?.click()?;\n\n tab.wait_until_navigated()?;\n\n let cookies = tab.get_all_cookies()?;\n\n let session = cookies.get_cookie_by_name(\"nc_session_id\");\n\n let token = cookies.get_cookie_by_name(\"nc_token\");\n\n let username = cookies.get_cookie_by_name(\"nc_username\");\n\n info!(\"{:?}\", cookies);\n\n if session.is_none() || token.is_none() || username.is_none() {\n\n bail!(\"ERROR NO COOKIES\")\n\n } else {\n\n Ok((\n\n session.unwrap().value,\n\n token.unwrap().value,\n\n username.unwrap().value,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/library/methods.rs", "rank": 34, "score": 16857.67924830202 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Response<'a, T> {\n\n pub status_code: u16,\n\n pub error_msg: Option<&'a str>,\n\n pub error_details: Option<&'a str>,\n\n #[serde(flatten)]\n\n pub value: GenericValue<T>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum ResponseTypes {\n\n #[serde(rename(serialize = \"value\"))]\n\n Boolean(bool),\n\n #[serde(rename(serialize = \"value\"))]\n\n Cookies(UserCookies),\n\n}\n\n\n\npub type GenericValue<T> = Option<T>;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct UserCookies {\n\n pub nc_session_id: String,\n\n pub nc_token: String,\n\n pub nc_username: String,\n\n}\n", "file_path": "src/library/responses.rs", "rank": 35, "score": 16855.9745813955 }, { "content": " let options = options\n\n .path(Some(default_executable().unwrap()))\n\n .sandbox(true);\n\n #[cfg(target_os = \"windows\")]\n\n let options = options.headless(false);\n\n #[cfg(not(target_os = \"windows\"))]\n\n let options = options.headless(true);\n\n let options = options.build().unwrap();\n\n\n\n let browser = Browser::new(options)?;\n\n let tab: Arc<Tab> = browser.wait_for_initial_tab()?;\n\n let timeout =\n\n std::time::Duration::from_millis(var(\"HEADLESS_TIMEOUT\").unwrap().parse::<u64>()?);\n\n let url = format!(\"{}/login\", var(\"BASE_NC_URL\")?);\n\n\n\n // Begin\n\n debug!(\"Navigate: {}\", url.as_str());\n\n tab.navigate_to(url.as_str())?;\n\n tab.wait_until_navigated()?;\n\n tab.wait_for_element_with_custom_timeout(\"input#user\", timeout)?\n", "file_path": "src/library/methods.rs", "rank": 36, "score": 16854.490427715988 }, { "content": "use headless_chrome::protocol::Method;\n\nuse headless_chrome::Tab;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Cookie {\n\n pub name: String,\n\n pub value: String,\n\n pub domain: String,\n\n pub path: String,\n\n pub expires: f64,\n\n pub http_only: bool,\n\n pub secure: bool,\n\n pub session: bool,\n\n pub same_site: Option<CookieSameSite>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\npub enum CookieSameSite {\n", "file_path": "src/library/cookies.rs", "rank": 37, "score": 16853.75066215789 }, { "content": " .click()?;\n\n tab.type_str(username.as_str())?;\n\n tab.wait_for_element(\"input#password\")?.click()?;\n\n tab.type_str(password.as_str())?;\n\n tab.wait_for_element(\"input#submit\")?.click()?;\n\n tab.wait_until_navigated()?;\n\n tab.navigate_to(\"https://files.hume.cloud/settings/users\")?;\n\n tab.wait_until_navigated()?;\n\n tab.wait_for_element(\"#new-user-button\")?.click()?;\n\n tab.wait_for_element(\"#newusername\")?.click()?;\n\n tab.type_str(query.username)?;\n\n tab.wait_for_element(\"#newuserpassword\")?.click()?;\n\n tab.type_str(query.password)?;\n\n tab.wait_for_element(\"#newsubmit\")?.click()?;\n\n Ok(())\n\n}\n", "file_path": "src/library/methods.rs", "rank": 38, "score": 16852.32681105797 }, { "content": " Strict,\n\n Lax,\n\n Extended,\n\n None,\n\n}\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct GetAllCookies {}\n\n/*{\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub urls: Option<Vec<String>>\n\n}*/\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct GetCookiesReturnObject {\n\n pub cookies: Vec<Cookie>,\n\n}\n\n\n\nimpl Method for GetAllCookies {\n\n const NAME: &'static str = \"Network.getAllCookies\";\n\n type ReturnObject = GetCookiesReturnObject;\n\n}\n\n\n", "file_path": "src/library/cookies.rs", "rank": 39, "score": 16851.52899641982 }, { "content": "use base64::{decode, encode};\n\n#[cfg(debug_assertions)]\n\nuse headless_chrome::protocol::page::ScreenshotFormat;\n\nuse headless_chrome::Tab;\n\nuse rand::{thread_rng, Rng};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::env::var_os;\n\npub use std::env::{set_var, var};\n\n#[cfg(debug_assertions)]\n\nuse std::io::Write;\n\nuse std::ops::Deref;\n\nuse std::time::Duration;\n\n\n\n#[allow(dead_code)]\n\n#[cfg(debug_assertions)]\n", "file_path": "src/library/common_ah_tools.rs", "rank": 40, "score": 15290.546575875527 }, { "content": "pub trait CookiesManagement {\n\n fn get_all_cookies(&self) -> Result<Vec<Cookie>, Box<dyn std::error::Error>> {\n\n unimplemented!()\n\n }\n\n}\n\nimpl CookiesManagement for Tab {\n\n fn get_all_cookies(&self) -> Result<Vec<Cookie>, Box<dyn std::error::Error>> {\n\n Ok(self.call_method(GetAllCookies {})?.cookies)\n\n }\n\n}\n\n\n", "file_path": "src/library/cookies.rs", "rank": 41, "score": 14604.198635864015 }, { "content": "pub trait CookiesFns {\n\n fn get_cookie_by_name(&self, _name: &str) -> Option<Cookie> {\n\n unimplemented!()\n\n }\n\n}\n\nimpl CookiesFns for Vec<Cookie> {\n\n fn get_cookie_by_name(&self, name: &str) -> Option<Cookie> {\n\n match self.iter().find(|x| x.name.eq(name)) {\n\n Some(c) => Some(c.clone()),\n\n None => None,\n\n }\n\n }\n\n}\n", "file_path": "src/library/cookies.rs", "rank": 42, "score": 14604.198635864015 } ]
Rust
bencode/tests/ser.rs
adrianplavka/rbit
de7e950c894666b987617cfcdfa7218379c953f7
#[cfg(test)] mod ser_tests { extern crate bitrust_bencode; use bitrust_bencode::to_string; use serde::Serialize; #[test] fn ser_integers() { assert_eq!(r#"i0e"#, to_string(&0usize).unwrap()); assert_eq!(r#"i0e"#, to_string(&0isize).unwrap()); assert_eq!(r#"i1e"#, to_string(&1usize).unwrap()); assert_eq!(r#"i1e"#, to_string(&1isize).unwrap()); assert_eq!(r#"i123e"#, to_string(&123usize).unwrap()); assert_eq!(r#"i123e"#, to_string(&123isize).unwrap()); assert_eq!(r#"i0e"#, to_string(&-0).unwrap()); assert_eq!(r#"i-1e"#, to_string(&-1).unwrap()); assert_eq!(r#"i-123e"#, to_string(&-123).unwrap()); } #[test] fn ser_integers_bounds() { assert_eq!( format!("i{}e", std::u8::MAX), to_string(&std::u8::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u16::MAX), to_string(&std::u16::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u32::MAX), to_string(&std::u32::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u64::MAX), to_string(&std::u64::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i8::MAX), to_string(&std::i8::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i16::MAX), to_string(&std::i16::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i32::MAX), to_string(&std::i32::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i64::MAX), to_string(&std::i64::MAX).unwrap() ); } #[test] fn ser_strings() { assert_eq!(r#"3:key"#, to_string(&"key").unwrap()); assert_eq!(r#"5:asdfg"#, to_string(&"asdfg").unwrap()); assert_eq!(r#"4:0087"#, to_string(&"0087").unwrap()); assert_eq!(r#"0:"#, to_string(&"").unwrap()); assert_eq!(r#"2: "#, to_string(&" ").unwrap()); assert_eq!(r#"6:❤️"#, to_string(&"❤️").unwrap()); assert_eq!( r#"21:!@#$%^&*()_+{}|:<>?"/"#, to_string(&"!@#$%^&*()_+{}|:<>?\"/").unwrap() ); assert_eq!( r#"28:KR�/[W+x/^nAkW��;T0"#, to_string(&r#"KR�/[W+x/^nAkW��;T0"#).unwrap() ); } #[test] fn ser_structs() { #[derive(Serialize, PartialEq, Debug)] struct IntegerTest { integer: i32, integers: Vec<i32>, } assert_eq!( r#"d7:integeri1995e8:integersli1ei2ei3eee"#, to_string(&IntegerTest { integer: 1995, integers: vec!(1, 2, 3) }) .unwrap() ); #[derive(Serialize, PartialEq, Debug)] struct StringTest<'a> { string: String, strings: Vec<String>, string_slice: &'a str, string_slices: Vec<&'a str>, } assert_eq!( r#"d6:string10:somestring7:stringsl1:a1:b1:ce12:string_slice100:longstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstring13:string_slicesl1:d1:e1:f1:gee"#, to_string(&StringTest { string: String::from("somestring"), strings: vec!(String::from("a"), String::from("b"), String::from("c")), string_slice: "longstring".repeat(10).as_str(), string_slices: vec!("d", "e", "f", "g") }) .unwrap() ); #[derive(Serialize, PartialEq, Debug)] struct InnerMixedStructTest<'a> { string: &'a str, } #[derive(Serialize, PartialEq, Debug)] struct MixedStructTest<'a> { integer: usize, negative_integer: i32, #[serde(borrow)] inner_struct: InnerMixedStructTest<'a>, } assert_eq!( r#"d7:integeri3000e16:negative_integeri-89343451e12:inner_structd6:string4:asdfee"#, to_string(&MixedStructTest { integer: 3000, negative_integer: -89343451, inner_struct: InnerMixedStructTest { string: "asdf" } }) .unwrap() ); } }
#[cfg(test)] mod ser_tests { extern crate bitrust_bencode; use bitrust_bencode::to_string; use serde::Serialize; #[test] fn ser_integers() { assert_eq!(r#"i0e"#, to_string(&0usize).unwrap()); assert_eq!(r#"i0e"#, to_string(&0isize).unwrap()); assert_eq!(r#"i1e"#, to_string(&1usize).unwrap()); assert_eq!(r#"i1e"#, to_string(&1isize).unwrap()); assert_eq!(r#"i123e"#, to_string(&123usize).unwrap()); assert_eq!(r#"i123e"#, to_string(&123isize).unwrap()); assert_eq!(r#"i0e"#, to_string(&-0).unwrap()); assert_eq!(r#"i-1e"#, to_string(&-1).unwrap()); assert_eq!(r#"i-123e"#, to_string(&-123).unwrap()); } #[test] fn ser_integers_bounds() { assert_eq!( format!("i{}e", std::u8::MAX), to_string(&std::u8::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u16::MAX), to_string(&std::u16::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u32::MAX), to_string(&std::u32::MAX).unwrap() ); assert_eq!( format!("i{}e", std::u64::MAX), to_string(&std::u64::MAX).unwrap() ); assert_e
#[test] fn ser_strings() { assert_eq!(r#"3:key"#, to_string(&"key").unwrap()); assert_eq!(r#"5:asdfg"#, to_string(&"asdfg").unwrap()); assert_eq!(r#"4:0087"#, to_string(&"0087").unwrap()); assert_eq!(r#"0:"#, to_string(&"").unwrap()); assert_eq!(r#"2: "#, to_string(&" ").unwrap()); assert_eq!(r#"6:❤️"#, to_string(&"❤️").unwrap()); assert_eq!( r#"21:!@#$%^&*()_+{}|:<>?"/"#, to_string(&"!@#$%^&*()_+{}|:<>?\"/").unwrap() ); assert_eq!( r#"28:KR�/[W+x/^nAkW��;T0"#, to_string(&r#"KR�/[W+x/^nAkW��;T0"#).unwrap() ); } #[test] fn ser_structs() { #[derive(Serialize, PartialEq, Debug)] struct IntegerTest { integer: i32, integers: Vec<i32>, } assert_eq!( r#"d7:integeri1995e8:integersli1ei2ei3eee"#, to_string(&IntegerTest { integer: 1995, integers: vec!(1, 2, 3) }) .unwrap() ); #[derive(Serialize, PartialEq, Debug)] struct StringTest<'a> { string: String, strings: Vec<String>, string_slice: &'a str, string_slices: Vec<&'a str>, } assert_eq!( r#"d6:string10:somestring7:stringsl1:a1:b1:ce12:string_slice100:longstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstring13:string_slicesl1:d1:e1:f1:gee"#, to_string(&StringTest { string: String::from("somestring"), strings: vec!(String::from("a"), String::from("b"), String::from("c")), string_slice: "longstring".repeat(10).as_str(), string_slices: vec!("d", "e", "f", "g") }) .unwrap() ); #[derive(Serialize, PartialEq, Debug)] struct InnerMixedStructTest<'a> { string: &'a str, } #[derive(Serialize, PartialEq, Debug)] struct MixedStructTest<'a> { integer: usize, negative_integer: i32, #[serde(borrow)] inner_struct: InnerMixedStructTest<'a>, } assert_eq!( r#"d7:integeri3000e16:negative_integeri-89343451e12:inner_structd6:string4:asdfee"#, to_string(&MixedStructTest { integer: 3000, negative_integer: -89343451, inner_struct: InnerMixedStructTest { string: "asdf" } }) .unwrap() ); } }
q!( format!("i{}e", std::i8::MAX), to_string(&std::i8::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i16::MAX), to_string(&std::i16::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i32::MAX), to_string(&std::i32::MAX).unwrap() ); assert_eq!( format!("i{}e", std::i64::MAX), to_string(&std::i64::MAX).unwrap() ); }
function_block-function_prefixed
[ { "content": "fn main() {\n\n\n\n}\n", "file_path": "core/src/main.rs", "rank": 0, "score": 34516.8374640069 }, { "content": "pub fn to_string<T>(value: &T) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut serializer = Serializer {\n\n output: String::new(),\n\n };\n\n value.serialize(&mut serializer)?;\n\n\n\n Ok(serializer.output)\n\n}\n\n\n\nimpl Serializer {\n\n fn serialize_integer<T>(&mut self, integer: T) -> Result<()>\n\n where\n\n T: ToString,\n\n {\n\n self.output += \"i\";\n\n self.output += &integer.to_string();\n\n self.output += \"e\";\n", "file_path": "bencode/src/ser.rs", "rank": 1, "score": 24448.60008080259 }, { "content": "pub fn from_slice<'a, T>(v: &'a [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n from_trait(read::SliceRead::new(v))\n\n}\n\n\n\n//////////////////////////////////////////////////////////////////////////////\n\n\n\nimpl<'de, R> Deserializer<R>\n\nwhere\n\n R: read::Read<'de>,\n\n{\n\n /// Parse a Bencode's signed integer value.\n\n fn parse_signed<T>(&mut self) -> Result<T>\n\n where\n\n T: Neg<Output = T> + CheckedAdd + CheckedMul + From<i8>,\n\n {\n\n let mut integer = T::from(0);\n\n let mut is_first_loop = true;\n", "file_path": "bencode/src/de.rs", "rank": 2, "score": 24442.852002637315 }, { "content": "pub fn from_str<'a, T>(s: &'a str) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n from_trait(read::StrRead::new(s))\n\n}\n\n\n", "file_path": "bencode/src/de.rs", "rank": 3, "score": 24442.852002637315 }, { "content": "#[cfg(test)]\n\nmod de_tests {\n\n extern crate bitrust_bencode;\n\n use bitrust_bencode::{from_slice, from_str, Error};\n\n use serde::Deserialize;\n\n\n\n #[test]\n\n fn de_integers() {\n\n // Happy paths.\n\n assert_eq!(0usize, from_str(r#\"i0e\"#).unwrap());\n\n assert_eq!(0isize, from_str(r#\"i0e\"#).unwrap());\n\n assert_eq!(1usize, from_str(r#\"i1e\"#).unwrap());\n\n assert_eq!(1isize, from_str(r#\"i1e\"#).unwrap());\n\n assert_eq!(123usize, from_str(r#\"i123e\"#).unwrap());\n\n assert_eq!(123isize, from_str(r#\"i123e\"#).unwrap());\n\n assert_eq!(-0, from_str(r#\"i0e\"#).unwrap());\n\n assert_eq!(-1, from_str(r#\"i-1e\"#).unwrap());\n\n assert_eq!(-123, from_str(r#\"i-123e\"#).unwrap());\n\n\n\n // Unhappy paths.\n", "file_path": "bencode/tests/de.rs", "rank": 5, "score": 23657.206579294874 }, { "content": " }\n\n\n\n assert_eq!(\n\n MixedStructTest {\n\n integer: 3000,\n\n negative_integer: -89343451,\n\n inner_struct: InnerMixedStructTest { string: \"asdf\" }\n\n },\n\n from_str::<MixedStructTest>(\n\n r#\"d7:integeri3000e16:negative_integeri-89343451e12:inner_structd6:string4:asdfee\"#\n\n )\n\n .unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn de_structs_file() {\n\n use std::env;\n\n use std::fs;\n\n use std::path::Path;\n", "file_path": "bencode/tests/de.rs", "rank": 6, "score": 23654.821688296313 }, { "content": " IntegerTest {\n\n integer: 1995,\n\n integers: vec!(1, 2, 3)\n\n },\n\n from_str::<IntegerTest>(r#\"d7:integeri1995e8:integersli1ei2ei3eee\"#).unwrap()\n\n );\n\n\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct StringTest<'a> {\n\n string: String,\n\n strings: Vec<String>,\n\n string_slice: &'a str,\n\n string_slices: Vec<&'a str>,\n\n }\n\n\n\n assert_eq!(\n\n StringTest {\n\n string: String::from(\"somestring\"),\n\n strings: vec!(String::from(\"a\"), String::from(\"b\"), String::from(\"c\")),\n\n string_slice: \"longstring\".repeat(10).as_str(),\n", "file_path": "bencode/tests/de.rs", "rank": 8, "score": 23651.89964462639 }, { "content": " string_slices: vec!(\"d\", \"e\", \"f\", \"g\")\n\n },\n\n from_str::<StringTest>(\n\n r#\"d6:string10:somestring7:stringsl1:a1:b1:ce12:string_slice100:longstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstringlongstring13:string_slicesl1:d1:e1:f1:gee\"#\n\n )\n\n .unwrap()\n\n );\n\n\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct InnerMixedStructTest<'a> {\n\n string: &'a str,\n\n }\n\n\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct MixedStructTest<'a> {\n\n integer: usize,\n\n negative_integer: i32,\n\n\n\n #[serde(borrow)]\n\n inner_struct: InnerMixedStructTest<'a>,\n", "file_path": "bencode/tests/de.rs", "rank": 9, "score": 23651.7964166321 }, { "content": "\n\n // Check for an invalid conversion from byte slice to an UTF-8 `&str`.\n\n // This sequence would translate to: `6:He?llo`.\n\n //\n\n // This sequence has an invalid code point 0xf0, therefore it should fail.\n\n assert_eq!(\n\n Err(Error::InvalidUnicodeCodePoint),\n\n from_slice::<&str>(&[0x36, 0x3a, 0x48, 0x65, 0xf0, 0x6c, 0x6c, 0x6f])\n\n );\n\n }\n\n\n\n #[test]\n\n fn de_structs() {\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct IntegerTest {\n\n integer: i32,\n\n integers: Vec<i32>,\n\n }\n\n\n\n assert_eq!(\n", "file_path": "bencode/tests/de.rs", "rank": 13, "score": 23651.13436498489 }, { "content": "\n\n let mut dir = env::current_dir().unwrap();\n\n dir.push(Path::new(\n\n \"tests/data/ubuntu-19.10-desktop-amd64.iso.torrent\",\n\n ));\n\n let f = &fs::read(dir).unwrap();\n\n\n\n // Expecting a valid deserialization, therefore shouldn't throw any errors.\n\n from_slice::<TorrentMetainfo>(f).unwrap();\n\n }\n\n}\n", "file_path": "bencode/tests/de.rs", "rank": 14, "score": 23650.824754998648 }, { "content": " Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"li123ee\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"d1:ai323ee\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::TrailingCharacters),\n\n from_str::<usize>(r#\"i123etrailing\"#)\n\n );\n\n }\n\n\n\n #[test]\n\n fn de_integers_bounds() {\n\n // Happy paths.\n\n assert_eq!(\n\n std::u8::MAX,\n\n from_str(format!(\"i{}e\", std::u8::MAX).as_str()).unwrap()\n\n );\n", "file_path": "bencode/tests/de.rs", "rank": 15, "score": 23650.499632207677 }, { "content": " assert_eq!(\n\n Err(Error::ExpectedStringIntegerLength),\n\n from_str::<&str>(r#\"d1:ae\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::TrailingCharacters),\n\n from_str::<&str>(r#\"3:keytrailing\"#)\n\n );\n\n }\n\n\n\n #[test]\n\n fn de_floats() {\n\n // Happy paths.\n\n assert_eq!(4.32, from_str::<f32>(r#\"4:4.32\"#).unwrap());\n\n assert_eq!(134.64, from_str::<f64>(r#\"6:134.64\"#).unwrap());\n\n assert_eq!(-134.64, from_str::<f64>(r#\"7:-134.64\"#).unwrap());\n\n assert_eq!(-0.0, from_str::<f64>(r#\"4:-0.0\"#).unwrap());\n\n assert_eq!(-5032.0, from_str::<f64>(r#\"5:-5032\"#).unwrap());\n\n assert_eq!(0.0, from_str::<f64>(r#\"0:\"#).unwrap());\n\n\n", "file_path": "bencode/tests/de.rs", "rank": 17, "score": 23650.335738587655 }, { "content": " // Unhappy paths.\n\n assert_eq!(Err(Error::ExpectedFloat), from_str::<f64>(r#\"7:invalid\"#));\n\n assert_eq!(Err(Error::ExpectedFloat), from_str::<f64>(r#\"3:-0a\"#));\n\n }\n\n\n\n #[test]\n\n fn de_bytes() {\n\n // Happy paths.\n\n\n\n // Check for a valid conversion from byte slice.\n\n // This sequence would translate to: `6:He?llo`.\n\n //\n\n // Since this conversion is raw & doesn't translate to UTF-8, it should\n\n // unwrap without an error (even though there is an invalid code point).\n\n assert_eq!(\n\n &[0x48, 0x65, 0xf0, 0x6c, 0x6c, 0x6f],\n\n from_slice::<&[u8]>(&[0x36, 0x3a, 0x48, 0x65, 0xf0, 0x6c, 0x6c, 0x6f]).unwrap()\n\n );\n\n\n\n // Unhappy paths.\n", "file_path": "bencode/tests/de.rs", "rank": 18, "score": 23650.18880589545 }, { "content": " from_str::<i64>(format!(\"i{}0e\", std::i64::MAX).as_str())\n\n );\n\n }\n\n\n\n #[test]\n\n fn de_strings() {\n\n // Happy paths.\n\n assert_eq!(\"key\", from_str::<&str>(r#\"3:key\"#).unwrap());\n\n assert_eq!(\"asdfg\", from_str::<&str>(r#\"5:asdfg\"#).unwrap());\n\n assert_eq!(\"0087\", from_str::<&str>(r#\"4:0087\"#).unwrap());\n\n assert_eq!(\"\", from_str::<&str>(r#\"0:\"#).unwrap());\n\n assert_eq!(\" \", from_str::<&str>(r#\"2: \"#).unwrap());\n\n assert_eq!(\"❤️\", from_str::<&str>(r#\"6:❤️\"#).unwrap());\n\n assert_eq!(\n\n \"!@#$%^&*()_+{}|:<>?\\\"/\",\n\n from_str::<&str>(r#\"21:!@#$%^&*()_+{}|:<>?\"/\"#).unwrap()\n\n );\n\n assert_eq!(\n\n r#\"KR\u0010�/[W+x/^n\u001eA\u0010kW��;T0\"#,\n\n from_str::<&str>(r#\"28:KR\u0010�/[W+x/^n\u001eA\u0010kW��;T0\"#).unwrap()\n", "file_path": "bencode/tests/de.rs", "rank": 19, "score": 23650.153548080205 }, { "content": "\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct TorrentInfo<'a> {\n\n length: usize,\n\n\n\n name: &'a str,\n\n\n\n #[serde(rename(deserialize = \"piece length\"))]\n\n piece_length: usize,\n\n\n\n pieces: &'a [u8],\n\n }\n\n\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct TorrentMetainfo<'a> {\n\n #[serde(borrow)]\n\n announce: &'a str,\n\n\n\n info: TorrentInfo<'a>,\n\n }\n", "file_path": "bencode/tests/de.rs", "rank": 20, "score": 23648.819386105897 }, { "content": " from_str::<u32>(format!(\"i{}0e\", std::u32::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<u64>(format!(\"i{}0e\", std::u64::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<i8>(format!(\"i{}0e\", std::i8::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<i16>(format!(\"i{}0e\", std::i16::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<i32>(format!(\"i{}0e\", std::i32::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n", "file_path": "bencode/tests/de.rs", "rank": 21, "score": 23648.819386105897 }, { "content": " );\n\n assert_eq!(Err(Error::EOF), from_str::<usize>(r#\"i123\"#));\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"i123.456e\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"i007e\"#)\n\n );\n\n assert_eq!(Err(Error::ExpectedInteger), from_str::<isize>(r#\"i007e\"#));\n\n assert_eq!(\n\n Err(Error::ExpectedInteger),\n\n from_str::<isize>(r#\"i-1.034e\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"4:asdf\"#)\n\n );\n\n assert_eq!(\n", "file_path": "bencode/tests/de.rs", "rank": 22, "score": 23648.819386105897 }, { "content": " );\n\n\n\n // Unhappy paths.\n\n assert_eq!(Err(Error::EOF), from_str::<&str>(r#\"4:EOF\"#));\n\n assert_eq!(\n\n Err(Error::ExpectedStringIntegerLength),\n\n from_str::<&str>(r#\"string\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedStringIntegerLength),\n\n from_str::<&str>(r#\"nointeger:value\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedStringIntegerLength),\n\n from_str::<&str>(r#\"i123e\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedStringIntegerLength),\n\n from_str::<&str>(r#\"l2:abe\"#)\n\n );\n", "file_path": "bencode/tests/de.rs", "rank": 23, "score": 23648.819386105897 }, { "content": " assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"ie\"#)\n\n );\n\n assert_eq!(Err(Error::ExpectedInteger), from_str::<isize>(r#\"i-0e\"#));\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"i1-23e\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"iasdfe\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<usize>(r#\"i e\"#)\n\n );\n\n assert_eq!(\n\n Err(Error::ExpectedUnsignedInteger),\n\n from_str::<u8>(r#\"i-100e\"#)\n", "file_path": "bencode/tests/de.rs", "rank": 24, "score": 23648.819386105897 }, { "content": " assert_eq!(\n\n std::u16::MAX,\n\n from_str(format!(\"i{}e\", std::u16::MAX).as_str()).unwrap()\n\n );\n\n assert_eq!(\n\n std::u32::MAX,\n\n from_str(format!(\"i{}e\", std::u32::MAX).as_str()).unwrap()\n\n );\n\n assert_eq!(\n\n std::u64::MAX,\n\n from_str(format!(\"i{}e\", std::u64::MAX).as_str()).unwrap()\n\n );\n\n assert_eq!(\n\n std::i8::MAX,\n\n from_str(format!(\"i{}e\", std::i8::MAX).as_str()).unwrap()\n\n );\n\n assert_eq!(\n\n std::i16::MAX,\n\n from_str(format!(\"i{}e\", std::i16::MAX).as_str()).unwrap()\n\n );\n", "file_path": "bencode/tests/de.rs", "rank": 25, "score": 23648.819386105897 }, { "content": " assert_eq!(\n\n std::i32::MAX,\n\n from_str(format!(\"i{}e\", std::i32::MAX).as_str()).unwrap()\n\n );\n\n assert_eq!(\n\n std::i64::MAX,\n\n from_str(format!(\"i{}e\", std::i64::MAX).as_str()).unwrap()\n\n );\n\n\n\n // Unhappy paths.\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<u8>(format!(\"i{}0e\", std::u8::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n\n from_str::<u16>(format!(\"i{}0e\", std::u16::MAX).as_str())\n\n );\n\n assert_eq!(\n\n Err(Error::IntegerOverflow),\n", "file_path": "bencode/tests/de.rs", "rank": 26, "score": 23648.819386105897 }, { "content": "fn from_trait<'de, R, T>(read: R) -> Result<T>\n\nwhere\n\n R: Read<'de>,\n\n T: de::Deserialize<'de>,\n\n{\n\n let mut de = Deserializer::new(read);\n\n let value = de::Deserialize::deserialize(&mut de)?;\n\n\n\n if de.read.end() {\n\n Ok(value)\n\n } else {\n\n Err(Error::TrailingCharacters)\n\n }\n\n}\n\n\n", "file_path": "bencode/src/de.rs", "rank": 28, "score": 23336.264564613753 }, { "content": "extern crate num_traits;\n\nextern crate serde;\n\n\n\n#[doc(inline)]\n\npub use self::de::{from_slice, from_str, Deserializer};\n\n\n\n#[doc(inline)]\n\npub use self::error::{Error, Result};\n\n\n\n#[doc(inline)]\n\npub use self::ser::{to_string, Serializer};\n\n\n\npub mod de;\n\npub mod error;\n\npub mod ser;\n\n\n\nmod read;\n", "file_path": "bencode/src/lib.rs", "rank": 29, "score": 11.70803027323596 }, { "content": "use crate::error::{Error, Result};\n\n\n\n/// Trait used by the deserializer for iterating over input. This is manually\n\n/// \"specialized\" for iterating over &[u8].\n\n///\n\n/// This trait is sealed and cannot be implemented for types outside of this\n\n/// crate.\n", "file_path": "bencode/src/read.rs", "rank": 30, "score": 6.19790223865424 }, { "content": "//! Bencode deserialization using Serde library.\n\n\n\nuse std::ops::Neg;\n\nuse std::str::{self, FromStr};\n\n\n\nuse num_traits::{CheckedAdd, CheckedMul, Float};\n\nuse serde::de::{self, DeserializeSeed, Visitor};\n\nuse serde::Deserialize;\n\n\n\nuse crate::error::{Error, Result};\n\nuse crate::read::{self};\n\npub use crate::read::{Read, SliceRead, StrRead};\n\n\n\n/// A structure that deserializes Bencode into Rust values.\n\npub struct Deserializer<R> {\n\n read: R,\n\n}\n\n\n\nimpl<'de, R> Deserializer<R>\n\nwhere\n", "file_path": "bencode/src/de.rs", "rank": 31, "score": 6.178130695050813 }, { "content": "//! Bencode serialization using Serde library.\n\n\n\nuse std::str;\n\nuse std::string::ToString;\n\n\n\nuse serde::ser::{self, Serialize};\n\n\n\nuse crate::error::{Error, Result};\n\n\n\npub struct Serializer {\n\n /// This string starts empty and values are appended as it's\n\n /// being serialized.\n\n output: String,\n\n}\n\n\n", "file_path": "bencode/src/ser.rs", "rank": 32, "score": 5.557748635396659 }, { "content": "/// Trait used by the deserializer for iterating over input. This is manually\n\n/// \"specialized\" for iterating over &[u8].\n\n///\n\n/// This trait is sealed and cannot be implemented for types outside of this\n\n/// crate.\n\npub trait Read<'de>: private::Sealed {\n\n /// Peek at the current byte in the input, without consuming it.\n\n #[doc(hidden)]\n\n fn peek_byte(&self) -> Result<u8>;\n\n\n\n /// Peek at the n-th byte in the input from the current index,\n\n /// without consuming it.\n\n #[doc(hidden)]\n\n fn peek_byte_nth(&self, n: usize) -> Result<u8>;\n\n\n\n /// Consumes the next byte in the input.\n\n #[doc(hidden)]\n\n fn next_byte(&mut self) -> Result<u8>;\n\n\n\n /// Consumes next bytes in the input until the length of inclusive end.\n\n #[doc(hidden)]\n\n fn next_bytes(&mut self, end: usize) -> Result<&'de [u8]>;\n\n\n\n // Check, if input is at end.\n\n #[doc(hidden)]\n", "file_path": "bencode/src/read.rs", "rank": 33, "score": 5.1030275199803325 }, { "content": "//! Bencode errors and result type for serialization & deserialization.\n\n\n\nuse std::error;\n\nuse std::fmt::{self, Debug, Display};\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse serde::de;\n\nuse serde::ser;\n\n\n\n/// This type represents all possible errors that can occur during Bencode\n\n/// serialization & deserialization.\n\n#[derive(PartialEq)]\n\npub enum Error {\n\n /// Catch-all for deserialization & serialization error messages.\n\n Message(Box<str>),\n\n\n\n /// ExpectedInteger occurs, when a signed integer was expected at the position\n\n /// during deserialization.\n\n ExpectedInteger,\n", "file_path": "bencode/src/error.rs", "rank": 34, "score": 3.2940137838579435 }, { "content": "# Bitrust - Bencode\n\n\n\n**Bencode** is a specific data format, used in BitTorrent protocol.\n\n\n\nThis package should provide seamless Bencode serialization & deserialization of various data types, using Serde library.\n\n\n\n**Current state:**\n\n\n\n- [x] deserialization\n\n- [x] serialization\n", "file_path": "bencode/README.md", "rank": 35, "score": 2.853303486025416 }, { "content": " /// Note that newtype variant (and all of the other variant serialization\n\n /// methods) refer exclusively to the \"externally tagged\" enum\n\n /// representation.\n\n ///\n\n /// Serialize this to Bencode in externally tagged form as\n\n /// `d<length>:<key><length>:<value>e`.\n\n fn serialize_newtype_variant<T>(\n\n self,\n\n _name: &'static str,\n\n _variant_index: u32,\n\n variant: &'static str,\n\n value: &T,\n\n ) -> Result<()>\n\n where\n\n T: ?Sized + Serialize,\n\n {\n\n self.output += \"d\";\n\n variant.serialize(&mut *self)?;\n\n value.serialize(&mut *self)?;\n\n self.output += \"e\";\n", "file_path": "bencode/src/ser.rs", "rank": 36, "score": 2.4916186269485427 }, { "content": " /// formats typically use the index of the variant and human-readable formats\n\n /// typically use the name.\n\n fn serialize_unit_variant(\n\n self,\n\n _name: &'static str,\n\n _variant_index: u32,\n\n variant: &'static str,\n\n ) -> Result<()> {\n\n self.serialize_str(variant)\n\n }\n\n\n\n /// As is done here, serializers are encouraged to treat newtype structs as\n\n /// insignificant wrappers around the data they contain.\n\n fn serialize_newtype_struct<T>(self, _name: &'static str, value: &T) -> Result<()>\n\n where\n\n T: ?Sized + Serialize,\n\n {\n\n value.serialize(self)\n\n }\n\n\n", "file_path": "bencode/src/ser.rs", "rank": 37, "score": 2.30093697869971 }, { "content": " R: read::Read<'de>,\n\n{\n\n /// Create a Bencode deserializer from one of the possible bitrust_bencode\n\n /// input sources.\n\n ///\n\n /// Typically it is more convenient to use one of these methods instead:\n\n /// - Deserializer::from_str\n\n /// - Deserializer::from_slice\n\n ///\n\n /// Or using exported functions:\n\n /// - bitrust_bencode::from_str\n\n /// - bitrust_benocde::from_slice\n\n pub fn new(read: R) -> Self {\n\n Deserializer { read }\n\n }\n\n}\n\n\n\nimpl<'de, 'a> Deserializer<read::SliceRead<'a>> {\n\n /// Creates a Bencode deserializer from a `&[u8]`.\n\n pub fn from_slice(bytes: &'a [u8]) -> Self {\n", "file_path": "bencode/src/de.rs", "rank": 38, "score": 2.267175523132891 }, { "content": " self.deserialize_any(visitor)\n\n }\n\n\n\n /// Method definitions for various unsigned deserializations.\n\n ///\n\n /// Every definition guarantees, that it will use the appropriate type when\n\n /// deserializing, saving it from using bigger, unnecessary unsigned types.\n\n ///\n\n /// This can overflow, if an input has a bigger size than expected type.\n\n ///\n\n /// Can return errors when deserializing unsigned types.\n\n fn_deserialize_unsigned!(deserialize_u8, visit_u8, u8);\n\n fn_deserialize_unsigned!(deserialize_u16, visit_u16, u16);\n\n fn_deserialize_unsigned!(deserialize_u32, visit_u32, u32);\n\n fn_deserialize_unsigned!(deserialize_u64, visit_u64, u64);\n\n serde::serde_if_integer128! {\n\n fn_deserialize_unsigned!(deserialize_u128, visit_u128, u128);\n\n }\n\n\n\n /// Method definitions for various signed deserializations.\n", "file_path": "bencode/src/de.rs", "rank": 39, "score": 2.002705723134671 }, { "content": " ///\n\n /// Every definition guarantees, that it will use the appropriate type when\n\n /// deserializing, saving it from using bigger, unnecessary signed types.\n\n ///\n\n /// This can overflow, if an input has a bigger size than expected type.\n\n ///\n\n /// Can return errors when deserializing signed types.\n\n fn_deserialize_signed!(deserialize_i8, visit_i8, i8);\n\n fn_deserialize_signed!(deserialize_i16, visit_i16, i16);\n\n fn_deserialize_signed!(deserialize_i32, visit_i32, i32);\n\n fn_deserialize_signed!(deserialize_i64, visit_i64, i64);\n\n serde::serde_if_integer128! {\n\n fn_deserialize_signed!(deserialize_i128, visit_i128, i128);\n\n }\n\n\n\n fn deserialize_str<V>(self, visitor: V) -> Result<V::Value>\n\n where\n\n V: Visitor<'de>,\n\n {\n\n match self.read.peek_byte()? {\n", "file_path": "bencode/src/de.rs", "rank": 40, "score": 1.964511794457548 }, { "content": " fn end(&self) -> bool;\n\n}\n\n\n\n/// Bencode input source that reads from a slice of bytes.\n\npub struct SliceRead<'a> {\n\n /// This slice starts full and values are trimmed as it's\n\n /// being read from.\n\n pub slice: &'a [u8],\n\n}\n\n\n\n/// Bencode input source that reads from an UTF-8 string.\n\npub struct StrRead<'a> {\n\n delegate: SliceRead<'a>,\n\n}\n\n\n\n// Prevent users from implementing the Read trait.\n\nmod private {\n", "file_path": "bencode/src/read.rs", "rank": 41, "score": 1.9237258885780153 }, { "content": "\n\nCopyright 2018 Adrian Plavka\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n", "file_path": "bencode/LICENSE.md", "rank": 42, "score": 1.8361165690729653 }, { "content": "\n\nCopyright 2018 Adrian Plavka\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n", "file_path": "LICENSE.md", "rank": 43, "score": 1.8361165690729653 }, { "content": " self,\n\n _name: &'static str,\n\n len: usize,\n\n ) -> Result<Self::SerializeTupleStruct> {\n\n self.serialize_seq(Some(len))\n\n }\n\n\n\n /// Tuple variants are represented in Bencode as `d<length>:<key>l<data>ee`.\n\n /// This method is only responsible for the externally tagged representation.\n\n fn serialize_tuple_variant(\n\n self,\n\n _name: &'static str,\n\n _variant_index: u32,\n\n variant: &'static str,\n\n _len: usize,\n\n ) -> Result<Self::SerializeTupleVariant> {\n\n self.output += \"d\";\n\n variant.serialize(&mut *self)?;\n\n self.output += \"l\";\n\n Ok(self)\n", "file_path": "bencode/src/ser.rs", "rank": 44, "score": 1.639083111887329 }, { "content": " }\n\n\n\n fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {\n\n self.output += \"d\";\n\n Ok(self)\n\n }\n\n\n\n /// Structs look just like maps in Bencode. In particular, Bencode requires that we\n\n /// serialize the field names of the struct. Other formats may be able to\n\n /// omit the field names when serializing structs because the corresponding\n\n /// Deserialize implementation is required to know what the keys are without\n\n /// looking at the serialized data.\n\n fn serialize_struct(self, _name: &'static str, len: usize) -> Result<Self::SerializeStruct> {\n\n self.serialize_map(Some(len))\n\n }\n\n\n\n /// Struct variants are represented in Bencode as `d<length>:<key>d<key>:<value>...ee`.\n\n /// This is the externally tagged representation.\n\n fn serialize_struct_variant(\n\n self,\n", "file_path": "bencode/src/ser.rs", "rank": 45, "score": 1.3836869874394013 }, { "content": "impl<'de, 'a, R: Read<'de>> de::Deserializer<'de> for &'a mut Deserializer<R> {\n\n type Error = Error;\n\n\n\n /// Look at the input data to decide, what Serde data model type to deserialize as.\n\n /// It will infer a Bencode type based on starting characters, useful when no\n\n /// type was provided to `from_*` deserialization functions.\n\n ///\n\n /// Integers will be always deserialized to unsigned or signed type, depending on\n\n /// a knowledge, if unparsed integer starts with a '-':\n\n /// - If it doesn't, the type of integer will be `u64`.\n\n /// - If it does, the type of integer will be `i64`.\n\n ///\n\n /// Not all data formats are able to support this operation & will result in an\n\n /// UnknownType error.\n\n ///\n\n /// Can return errors when deserializing types.\n\n fn deserialize_any<V>(self, visitor: V) -> Result<V::Value>\n\n where\n\n V: Visitor<'de>,\n\n {\n", "file_path": "bencode/src/de.rs", "rank": 46, "score": 1.2927846605110052 }, { "content": " }\n\n}\n\n\n\n/// Some `Serialize` types are not able to hold a key and value in memory at the\n\n/// same time so `SerializeMap` implementations are required to support\n\n/// `serialize_key` and `serialize_value` individually.\n\nimpl<'a> ser::SerializeMap for &'a mut Serializer {\n\n type Ok = ();\n\n type Error = Error;\n\n\n\n /// The Serde data model allows map keys to be any serializable type. Bencode\n\n /// only allows string keys so the implementation below will produce invalid\n\n /// Bencode if the key serializes as something other than a string.\n\n ///\n\n /// A real Bencode serializer would need to validate that map keys are strings.\n\n /// This can be done by using a different Serializer to serialize the key\n\n /// (instead of `&mut **self`) and having that other serializer only\n\n /// implement `serialize_str` and return an error on any other data type.\n\n fn serialize_key<T>(&mut self, key: &T) -> Result<()>\n\n where\n", "file_path": "bencode/src/ser.rs", "rank": 47, "score": 1.2611299334445634 } ]
Rust
crates/notation_bevy/src/play/play_plugin.rs
theAdamColton/notation
270e8592127c8e60ff33b12b040ab55dba5c92a3
use std::sync::Arc; use notation_bevy_utils::prelude::{DoLayoutEvent, GridData, LayoutData, ShapeOp, ColorBackground}; use notation_midi::prelude::PlayControlEvent; use notation_model::prelude::{ LaneEntry, PlayState, PlayingState, Position, Tab, TickResult, }; use bevy::prelude::*; use crate::bar::bar_beat::BarBeatData; use crate::bar::bar_view::BarView; use crate::chord::chord_color_background::ChordColorBackground; use crate::chord::chord_playing::ChordPlaying; use crate::prelude::{ BarPlaying, EntryPlaying, NotationAssetsStates, NotationSettings, NotationTheme, TabBars, TabState, }; use crate::settings::layout_settings::LayoutMode; use crate::tab::tab_events::TabBarsResizedEvent; use crate::tab::tab_state::TabPlayStateChanged; use crate::ui::layout::NotationLayout; use super::bar_indicator::{BarIndicatorData}; use super::play_button::PlayButton; use super::play_panel::PlayPanel; use super::pos_indicator::{PosIndicatorData}; pub type PlayPanelDoLayoutEvent = DoLayoutEvent<NotationLayout<'static>, PlayPanel>; pub struct PlayPlugin; impl Plugin for PlayPlugin { fn build(&self, app: &mut AppBuilder) { PlayPanelDoLayoutEvent::setup(app); app.add_system_set( SystemSet::on_update(NotationAssetsStates::Loaded) .with_system(PlayPanel::do_layout.system()) .with_system(PlayPanel::on_play_control_evt.system()) .with_system(PlayButton::on_layout_changed.system()) .with_system(on_bar_playing_changed.system()) .with_system(on_tab_play_state_changed.system()) .with_system(on_play_control_evt.system()) .with_system(on_tab_resized.system()), ); } } impl PlayPlugin { pub fn spawn_indicators( commands: &mut Commands, theme: &NotationTheme, entity: Entity, tab: &Arc<Tab>, ) { let bar_data = BarIndicatorData::new(tab.clone()); bar_data.create(commands, &theme, entity); let pos_data = PosIndicatorData::new(tab.bar_units()); pos_data.create(commands, &theme, entity); } } fn update_indicators( commands: &mut Commands, theme: &NotationTheme, settings: &mut NotationSettings, chord_color_background_query: &mut Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, bar_indicator_query: &mut Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, pos_indicator_query: &mut Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, tab_bars_query: &mut Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, bar_playing: &BarPlaying, bar_layout: LayoutData, ) { let bar_props = bar_playing.bar_props; let mut in_bar_pos = None; for (entity, mut data) in pos_indicator_query.iter_mut() { data.bar_props = bar_props; data.bar_layout = bar_layout; data.update(commands, &theme, entity); settings .layout .focus_bar(commands, theme, tab_bars_query, &data); in_bar_pos = Some(data.bar_position.in_bar_pos); } for (entity, mut data) in bar_indicator_query.iter_mut() { data.bar_props = bar_props; data.bar_layout = bar_layout; data.update_data(commands, theme, entity, bar_props, bar_layout, in_bar_pos); ChordColorBackground::update_color(commands, theme, chord_color_background_query, data.chord); } } fn on_tab_resized( mut evts: EventReader<TabBarsResizedEvent>, mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &BarPlaying, &Arc<BarView>, &LayoutData)>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } let mut bars = None; for evt in evts.iter() { bars = Some(&evt.0); } if let Some(_bars) = bars { let mut first_playing_layout = None; let mut current_playing_layout = None; for (_entity, playing, view, layout) in query.iter_mut() { if view.bar_props.bar_ordinal == 0 { first_playing_layout = Some((playing, layout.clone())); } if playing.value == PlayingState::Current { current_playing_layout = Some((playing, layout.clone())); break; } } let playing_layout = if current_playing_layout.is_none() { first_playing_layout } else { current_playing_layout }; if let Some((playing, layout)) = playing_layout { update_indicators( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut tab_bars_query, playing, layout.clone(), ); } } } fn on_bar_playing_changed( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &BarPlaying, &Arc<BarView>, &LayoutData), Changed<BarPlaying>>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } for (_entity, playing, _view, layout) in query.iter_mut() { if playing.value == PlayingState::Current { update_indicators( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut tab_bars_query, playing, layout.clone(), ); break; } } } fn on_tab_play_state_changed( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &TabState), Added<TabPlayStateChanged>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut bar_playing_query: Query<(Entity, &mut BarPlaying), With<BarPlaying>>, mut entry_playing_query: Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } for (state_entity, tab_state) in query.iter_mut() { TabState::clear_play_state_changed(&mut commands, state_entity); if let Some(pos_data) = PosIndicatorData::update_pos( &mut commands, &theme, &mut pos_indicator_query, tab_state.play_control.position, ) { settings .layout .focus_bar(&mut commands, &theme, &mut tab_bars_query, &pos_data); } if !tab_state.play_control.play_state.is_playing() { let playing_bar_ordinal = tab_state.play_control.position.bar.bar_ordinal; BarPlaying::update(&mut bar_playing_query, tab_state, playing_bar_ordinal); EntryPlaying::update(&mut entry_playing_query, tab_state); } } } fn on_tick( commands: &mut Commands, theme: &NotationTheme, settings: &mut NotationSettings, chord_color_background_query: &mut Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, bar_indicator_query: &mut Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, pos_indicator_query: &mut Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, bar_playing_query: &mut Query<(Entity, &mut BarPlaying), With<BarPlaying>>, entry_playing_query: &mut Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, chord_playing_query: &mut Query<(Entity, &mut ChordPlaying), With<ChordPlaying>>, tab_bars_query: &mut Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, state_entity: Entity, tab_state: &mut TabState, new_position: &Position, tick_result: &TickResult, ) { tab_state.set_position(*new_position); let TickResult { changed: _changed, end_passed, stopped, jumped, } = tick_result; if *stopped { tab_state.set_play_state(commands, state_entity, PlayState::Stopped); } let playing_bar_ordinal = new_position.bar.bar_ordinal; BarPlaying::update(bar_playing_query, tab_state, playing_bar_ordinal); EntryPlaying::update_with_pos( entry_playing_query, tab_state, new_position, *end_passed, *jumped, ); let chord_changed = ChordPlaying::update(chord_playing_query, tab_state, new_position); if let Some(pos_data) = PosIndicatorData::update_pos(commands, theme, pos_indicator_query, *new_position) { if settings.layout.mode == LayoutMode::Line && pos_data.is_synced() { settings .layout .focus_bar(commands, theme, tab_bars_query, &pos_data); } if chord_changed > 0 { if let Some(bar_data) = BarIndicatorData::update_pos(commands, theme, bar_indicator_query, pos_data.bar_props, pos_data.bar_position.in_bar_pos) { ChordColorBackground::update_color(commands, theme, chord_color_background_query, bar_data.chord); } } } } fn on_play_control_evt( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut evts: EventReader<PlayControlEvent>, mut tab_state_query: Query<(Entity, &mut TabState)>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut bar_playing_query: Query<(Entity, &mut BarPlaying), With<BarPlaying>>, mut entry_playing_query: Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, mut chord_playing_query: Query<(Entity, &mut ChordPlaying), With<ChordPlaying>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, mut beat_query: Query<(Entity, &mut BarBeatData)>, ) { if theme._bypass_systems { return; } for evt in evts.iter() { for (state_entity, mut tab_state) in tab_state_query.iter_mut() { if !tab_state.under_control { continue; } match evt { PlayControlEvent::OnTick { position, tick_result, } => on_tick( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut bar_playing_query, &mut entry_playing_query, &mut chord_playing_query, &mut tab_bars_query, state_entity, &mut tab_state, position, tick_result, ), PlayControlEvent::OnPlayState(play_state) => { tab_state.set_play_state(&mut commands, state_entity, *play_state); } PlayControlEvent::OnSpeedFactor(play_speed) => { tab_state.set_speed_factor(*play_speed); } PlayControlEvent::OnBeginEnd(begin_bar_ordinal, end_bar_ordinal) => { tab_state.set_begin_end(*begin_bar_ordinal, *end_bar_ordinal); BarBeatData::update_all(&mut commands, &theme, &tab_state, &mut beat_query); } PlayControlEvent::OnShouldLoop(should_loop) => { tab_state.set_should_loop(*should_loop); } } } } }
use std::sync::Arc; use notation_bevy_utils::prelude::{DoLayoutEvent, GridData, LayoutData, ShapeOp, ColorBackground}; use notation_midi::prelude::PlayControlEvent; use notation_model::prelude::{ LaneEntry, PlayState, PlayingState, Position, Tab, TickResult, }; use bevy::prelude::*; use crate::bar::bar_beat::BarBeatData; use crate::bar::bar_view::BarView; use crate::chord::chord_color_background::ChordColorBackground; use crate::chord::chord_playing::ChordPlaying; use crate::prelude::{ BarPlaying, EntryPlaying, NotationAssetsStates, NotationSettings, NotationTheme, TabBars, TabState, }; use crate::settings::layout_settings::LayoutMode; use crate::tab::tab_events::TabBarsResizedEvent; use crate::tab::tab_state::TabPlayStateChanged; use crate::ui::layout::NotationLayout; use super::bar_indicator::{BarIndicatorData}; use super::play_button::PlayButton; use super::play_panel::PlayPanel; use super::pos_indicator::{PosIndicatorData}; pub type PlayPanelDoLayoutEvent = DoLayoutEvent<NotationLayout<'static>, PlayPanel>; pub struct PlayPlugin; impl Plugin for PlayPlugin { fn build(&self, app: &mut AppBuilder) { PlayPanelDoLayoutEvent::setup(app); app.add_system_set( SystemSet::on_update(NotationAssetsStates::Loaded) .with_system(PlayPanel::do_layout.system()) .with_system(PlayPanel::on_play_control_evt.system()) .with_system(PlayButton::on_layout_changed.system()) .with_system(on_bar_playing_changed.system()) .with_system(on_tab_play_state_changed.system()) .with_system(on_play_control_evt.system()) .with_system(on_tab_resized.system()), ); } } impl PlayPlugin { pub fn spawn_indicators( commands: &mut Commands, theme: &NotationTheme, entity: Entity, tab: &Arc<Tab>, ) { let bar_data = BarIndicatorData::new(tab.clone()); bar_data.create(commands, &theme, entity); let pos_data = PosIndicatorData::new(tab.bar_units()); pos_data.create(commands, &theme, entity); } } fn update_indicators( commands: &mut Commands, theme: &NotationTheme, settings: &mut NotationSettings, chord_color_background_query: &mut Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, bar_indicator_query: &mut Query<(Entity, &mut BarIndicatorData), With<BarIndicatorD
fn on_tab_resized( mut evts: EventReader<TabBarsResizedEvent>, mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &BarPlaying, &Arc<BarView>, &LayoutData)>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } let mut bars = None; for evt in evts.iter() { bars = Some(&evt.0); } if let Some(_bars) = bars { let mut first_playing_layout = None; let mut current_playing_layout = None; for (_entity, playing, view, layout) in query.iter_mut() { if view.bar_props.bar_ordinal == 0 { first_playing_layout = Some((playing, layout.clone())); } if playing.value == PlayingState::Current { current_playing_layout = Some((playing, layout.clone())); break; } } let playing_layout = if current_playing_layout.is_none() { first_playing_layout } else { current_playing_layout }; if let Some((playing, layout)) = playing_layout { update_indicators( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut tab_bars_query, playing, layout.clone(), ); } } } fn on_bar_playing_changed( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &BarPlaying, &Arc<BarView>, &LayoutData), Changed<BarPlaying>>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } for (_entity, playing, _view, layout) in query.iter_mut() { if playing.value == PlayingState::Current { update_indicators( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut tab_bars_query, playing, layout.clone(), ); break; } } } fn on_tab_play_state_changed( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut query: Query<(Entity, &TabState), Added<TabPlayStateChanged>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut bar_playing_query: Query<(Entity, &mut BarPlaying), With<BarPlaying>>, mut entry_playing_query: Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, ) { if theme._bypass_systems { return; } for (state_entity, tab_state) in query.iter_mut() { TabState::clear_play_state_changed(&mut commands, state_entity); if let Some(pos_data) = PosIndicatorData::update_pos( &mut commands, &theme, &mut pos_indicator_query, tab_state.play_control.position, ) { settings .layout .focus_bar(&mut commands, &theme, &mut tab_bars_query, &pos_data); } if !tab_state.play_control.play_state.is_playing() { let playing_bar_ordinal = tab_state.play_control.position.bar.bar_ordinal; BarPlaying::update(&mut bar_playing_query, tab_state, playing_bar_ordinal); EntryPlaying::update(&mut entry_playing_query, tab_state); } } } fn on_tick( commands: &mut Commands, theme: &NotationTheme, settings: &mut NotationSettings, chord_color_background_query: &mut Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, bar_indicator_query: &mut Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, pos_indicator_query: &mut Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, bar_playing_query: &mut Query<(Entity, &mut BarPlaying), With<BarPlaying>>, entry_playing_query: &mut Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, chord_playing_query: &mut Query<(Entity, &mut ChordPlaying), With<ChordPlaying>>, tab_bars_query: &mut Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, state_entity: Entity, tab_state: &mut TabState, new_position: &Position, tick_result: &TickResult, ) { tab_state.set_position(*new_position); let TickResult { changed: _changed, end_passed, stopped, jumped, } = tick_result; if *stopped { tab_state.set_play_state(commands, state_entity, PlayState::Stopped); } let playing_bar_ordinal = new_position.bar.bar_ordinal; BarPlaying::update(bar_playing_query, tab_state, playing_bar_ordinal); EntryPlaying::update_with_pos( entry_playing_query, tab_state, new_position, *end_passed, *jumped, ); let chord_changed = ChordPlaying::update(chord_playing_query, tab_state, new_position); if let Some(pos_data) = PosIndicatorData::update_pos(commands, theme, pos_indicator_query, *new_position) { if settings.layout.mode == LayoutMode::Line && pos_data.is_synced() { settings .layout .focus_bar(commands, theme, tab_bars_query, &pos_data); } if chord_changed > 0 { if let Some(bar_data) = BarIndicatorData::update_pos(commands, theme, bar_indicator_query, pos_data.bar_props, pos_data.bar_position.in_bar_pos) { ChordColorBackground::update_color(commands, theme, chord_color_background_query, bar_data.chord); } } } } fn on_play_control_evt( mut commands: Commands, theme: Res<NotationTheme>, mut settings: ResMut<NotationSettings>, mut evts: EventReader<PlayControlEvent>, mut tab_state_query: Query<(Entity, &mut TabState)>, mut chord_color_background_query: Query<(Entity, &mut ColorBackground), With<ChordColorBackground>>, mut bar_indicator_query: Query<(Entity, &mut BarIndicatorData), With<BarIndicatorData>>, mut pos_indicator_query: Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, mut bar_playing_query: Query<(Entity, &mut BarPlaying), With<BarPlaying>>, mut entry_playing_query: Query< (Entity, &Arc<LaneEntry>, &mut EntryPlaying), With<EntryPlaying>, >, mut chord_playing_query: Query<(Entity, &mut ChordPlaying), With<ChordPlaying>>, mut tab_bars_query: Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, mut beat_query: Query<(Entity, &mut BarBeatData)>, ) { if theme._bypass_systems { return; } for evt in evts.iter() { for (state_entity, mut tab_state) in tab_state_query.iter_mut() { if !tab_state.under_control { continue; } match evt { PlayControlEvent::OnTick { position, tick_result, } => on_tick( &mut commands, &theme, &mut settings, &mut chord_color_background_query, &mut bar_indicator_query, &mut pos_indicator_query, &mut bar_playing_query, &mut entry_playing_query, &mut chord_playing_query, &mut tab_bars_query, state_entity, &mut tab_state, position, tick_result, ), PlayControlEvent::OnPlayState(play_state) => { tab_state.set_play_state(&mut commands, state_entity, *play_state); } PlayControlEvent::OnSpeedFactor(play_speed) => { tab_state.set_speed_factor(*play_speed); } PlayControlEvent::OnBeginEnd(begin_bar_ordinal, end_bar_ordinal) => { tab_state.set_begin_end(*begin_bar_ordinal, *end_bar_ordinal); BarBeatData::update_all(&mut commands, &theme, &tab_state, &mut beat_query); } PlayControlEvent::OnShouldLoop(should_loop) => { tab_state.set_should_loop(*should_loop); } } } } }
ata>>, pos_indicator_query: &mut Query<(Entity, &mut PosIndicatorData), With<PosIndicatorData>>, tab_bars_query: &mut Query<( Entity, &mut Transform, &Arc<TabBars>, &LayoutData, &Arc<GridData>, )>, bar_playing: &BarPlaying, bar_layout: LayoutData, ) { let bar_props = bar_playing.bar_props; let mut in_bar_pos = None; for (entity, mut data) in pos_indicator_query.iter_mut() { data.bar_props = bar_props; data.bar_layout = bar_layout; data.update(commands, &theme, entity); settings .layout .focus_bar(commands, theme, tab_bars_query, &data); in_bar_pos = Some(data.bar_position.in_bar_pos); } for (entity, mut data) in bar_indicator_query.iter_mut() { data.bar_props = bar_props; data.bar_layout = bar_layout; data.update_data(commands, theme, entity, bar_props, bar_layout, in_bar_pos); ChordColorBackground::update_color(commands, theme, chord_color_background_query, data.chord); } }
function_block-function_prefixed
[ { "content": "pub fn add_notation_app_events(app: &mut AppBuilder) {\n\n app.add_event::<WindowResizedEvent>();\n\n app.add_event::<MouseClickedEvent>();\n\n app.add_event::<MouseDraggedEvent>();\n\n}\n", "file_path": "crates/notation_bevy/src/app/app_events.rs", "rank": 0, "score": 255115.65002068676 }, { "content": "pub fn new_tab() -> Tab {\n\n tab! {\n\n \"8bf15271-53dc-482e-8704-d62a56085ebb\"\n\n Meta: TabMeta::new(Key::G, Scale::Major, Signature::_4_4, Tempo::Bpm(60))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = _1\n\n \"1\" Chord ( 1: 3 5 )\n\n \"1:246\" Chord ( 1: 2- 4+ 6- )\n\n \"6-\" Chord ( 6: 3- 5 )\n\n ]}\n\n {guitar Guitar [\n\n Fretboard\n\n $duration = _1\n\n \"Em\" Shape ( 0 2 2 0 0 0 )\n\n \"G\" Shape ( 3 2 0 0 0 0 )\n\n $duration = T_1_8\n\n \"picks\" Pick [ 6 3 2 1 2 3 ]\n\n Pick [ 6 3 2 1 2 3 ] |\n\n ]}\n", "file_path": "apps/notation_tool/src/tab/test.rs", "rank": 1, "score": 252549.6764397099 }, { "content": "pub fn new_tab() -> Tab {\n\n tab! {\n\n \"c430733f-46c3-4db2-9685-a72c05027e62\"\n\n Meta: TabMeta::new(Key::G, Scale::Major, Signature::_4_4, Tempo::Bpm(60))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = _1\n\n \"1\" Chord ( 1: 3 5 )\n\n \"6-\" Chord ( 6: 3- 5 )\n\n ]}\n\n {guitar Guitar [\n\n Fretboard\n\n $duration = _1\n\n \"Em\" Shape ( 0 2 2 0 0 0 )\n\n \"G\" Shape ( 3 2 0 0 0 0 )\n\n $duration = T_1_8\n\n \"picks\" Pick [ 6 3 2 1 2 3 ]\n\n Pick [ 6 3 2 1 2 3 ] |\n\n ]}\n\n ]\n", "file_path": "apps/notation_tool/src/tab/beginner/right_hand_1.rs", "rank": 2, "score": 248140.85386487437 }, { "content": "pub fn new_tab() -> Tab {\n\n tab! {\n\n \"ae2d8c46-9a9f-4ae8-932b-31d811797ea5\"\n\n Meta: TabMeta::new(Key::A, Scale::Major, Signature::_4_4, Tempo::Bpm(120))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = _1\n\n \"1\" Chord ( 1: 3 5 )\n\n \"6-\" Chord ( 6: 3- 5 )\n\n \"4\" Chord ( 4: 3 5 )\n\n \"5\" Chord ( 5: 3 5 )\n\n ]}\n\n {guitar Guitar [\n\n Fretboard\n\n $duration = _1\n\n \"A\" Shape ( 0 0 2 2 2 0 )\n\n \"#Fm\" Shape ( 2 4 4 2 2 2 )\n\n \"D\" Shape ( _ _ 0 2 3 2 )\n\n \"E\" Shape ( 0 2 2 1 0 0 )\n\n $duration = _1_8\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 3, "score": 248140.85386487437 }, { "content": "pub fn new_tab() -> Tab {\n\n tab! {\n\n \"06dd7278-cdaf-40dd-abc6-6e66ec2d6b8c\"\n\n Meta: TabMeta::new(Key::E, Scale::Dorian, Signature::_3_4, Tempo::Bpm(110))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = D_1_2\n\n \"2sus4_7\" Chord ( 2: 4 5 7- )\n\n \"2sus4_7/4\" Chord ( 2: 4 5 7- /4 )\n\n \"1/4\" Chord ( 1: 3 5 /4 )\n\n \"1\" Chord ( 1: 3 5 )\n\n \"2sus2\" Chord ( 2: 2 5 )\n\n \"2-\" Chord ( 2: 3- 5 )\n\n \"4\" Chord ( 4: 3 5 )\n\n \"v:12\"\n\n $duration = _1_4\n\n Chord ( 4: 3 5 )\n\n $duration = _1_2\n\n Chord ( 5: 3 5 ) |\n\n \"v:19\"\n", "file_path": "apps/notation_tool/src/tab/songs/misc/scarborough_fair.rs", "rank": 4, "score": 246017.29986690305 }, { "content": "//photo book 02 - page 68\n\npub fn new_tab() -> Tab {\n\n tab! {\n\n \"ef6bb44b-17cf-47e6-a50e-0ab636868334\"\n\n Meta: TabMeta::new(Key::A, Scale::Major, Signature::_4_4, Tempo::Bpm(72))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = _1\n\n \"1\" Chord ( 1: 3 5 )\n\n \"1/4\" Chord ( 1: 3 5 /4 )\n\n \"1/5\" Chord ( 1: 3 5 /5 )\n\n \"1_9\" Chord ( 1: 3 5 9 )\n\n \"2-\" Chord ( 2: 3- 5 7- )\n\n \"3-\" Chord ( 3: 3- 5 7- )\n\n \"4\" Chord ( 4: 3 5 7 )\n\n \"5\" Chord ( 5: 3 5 )\n\n \"b6\" Chord ( %6: 3- 5% 7% )\n\n \"6-\" Chord ( 6: 3- 5 7- )\n\n $duration = _1_2\n\n \"1,4\"\n\n Chord ( 1: 3 5 /4 )\n", "file_path": "apps/notation_tool/src/tab/songs/jay/long_juan_feng.rs", "rank": 5, "score": 243944.91928514768 }, { "content": "pub fn new_tab() -> Tab {\n\n tab! {\n\n \"15ab6b58-1eaf-4075-95ab-f0b35117eddb\"\n\n Meta: TabMeta::new(Key::E, Scale::Minor, Signature::_3_4, Tempo::Bpm(118))\n\n Tracks: [\n\n {chord Chord [\n\n $duration = D_1_2\n\n \"1\" Chord ( 1: 3 5 )\n\n \"1/3\" Chord ( 1: 3 5 /3 )\n\n \"2-\" Chord ( 2: 3- 5 )\n\n \"2o\" Chord ( 2: 3- 5% )\n\n \"3\" Chord ( 3: 3 5 7- )\n\n \"5\" Chord ( 5: 3 5 )\n\n \"6-\" Chord ( 6: 3- 5 )\n\n \"6\" Chord ( 6: 3 5 )\n\n $duration = _1_2\n\n \"2o, 3\" Chord [ ( 2: 3- 5- ) ( 3: 3 5 7- ) ]\n\n \"3, 6-\" Chord [ ( 3: 3 5 7- ) ( 6: 3- 5 ) ]\n\n ]}\n\n {guitar Guitar [\n", "file_path": "apps/notation_tool/src/tab/songs/pu_shu/bai_hua_lin.rs", "rank": 6, "score": 241921.7946995719 }, { "content": "fn setup_camera(mut commands: Commands) {\n\n commands.spawn_bundle(OrthographicCameraBundle::new_2d());\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 7, "score": 234782.5181860876 }, { "content": "pub fn new_tab(name: &str) -> Option<Tab> {\n\n get_tab_map().get(name).map(|x| (x.new_tab)())\n\n}\n", "file_path": "apps/notation_tool/src/tab/mod.rs", "rank": 8, "score": 233300.03756790105 }, { "content": "pub fn jump_to_bar(\n\n jump_to_bar_evts: &mut EventWriter<JumpToBarEvent>,\n\n bar_props: TabBarProps,\n\n) {\n\n jump_to_bar_evts.send(JumpToBarEvent::new(bar_props));\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 9, "score": 230413.86061691673 }, { "content": "pub fn update_ui_scale_factor(mut egui_settings: ResMut<EguiSettings>, windows: Res<Windows>) {\n\n if let Some(_window) = windows.get_primary() {\n\n //setting scale_factor like this will make the app crash on windows for some reason\n\n //egui_settings.scale_factor = window.scale_factor();\n\n egui_settings.scale_factor = 1.0;\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/ui/mod.rs", "rank": 10, "score": 217372.79017390168 }, { "content": "pub fn get_tab_list<'a>() -> Vec<TabInfo<'a>> {\n\n vec![\n\n TabInfo::new(\"test\", test::new_tab),\n\n TabInfo::new(\"beginner:1_right_hand\", beginner::right_hand_1::new_tab),\n\n TabInfo::new(\n\n \"songs:pu_shu:bai_hua_lin\",\n\n songs::pu_shu::bai_hua_lin::new_tab,\n\n ),\n\n TabInfo::new(\n\n \"songs:jay:long_juan_feng\",\n\n songs::jay::long_juan_feng::new_tab,\n\n ),\n\n TabInfo::new(\n\n \"songs:misc:stand_by_me\",\n\n songs::misc::stand_by_me::new_tab,\n\n ),\n\n TabInfo::new(\n\n \"songs:misc:scarborough_fair\",\n\n songs::misc::scarborough_fair::new_tab,\n\n ),\n\n ]\n\n}\n\n\n", "file_path": "apps/notation_tool/src/tab/mod.rs", "rank": 11, "score": 216264.20770257953 }, { "content": "pub fn get_tab_map<'a>() -> HashMap<&'a str, TabInfo<'a>> {\n\n get_tab_list()\n\n .into_iter()\n\n .map(|tab| (tab.name, tab))\n\n .collect()\n\n}\n\n\n", "file_path": "apps/notation_tool/src/tab/mod.rs", "rank": 12, "score": 208574.5108495752 }, { "content": "fn insert_window_descriptor(app: &mut AppBuilder, title: String) {\n\n app.insert_resource(WindowDescriptor {\n\n title,\n\n //width: 1920.,\n\n //height: 1080.,\n\n ..WindowDescriptor::default()\n\n });\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 13, "score": 192798.414152672 }, { "content": "#[proc_macro]\n\npub fn tab(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::tab::TabDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 14, "score": 180525.00734044003 }, { "content": "pub fn create_entry(\n\n commands: &mut Commands,\n\n assets: &NotationAssets,\n\n theme: &NotationTheme,\n\n settings: &NotationSettings,\n\n entity: Entity,\n\n entry: &Arc<LaneEntry>,\n\n) {\n\n let entry_bundle = EntryBundle::from(entry.clone());\n\n let entry_entity = BevyUtil::spawn_child_bundle(commands, entity, entry_bundle);\n\n insert_entry_extra(commands, assets, theme, settings, entry_entity, entry);\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/entry/entry_plugin.rs", "rank": 15, "score": 175835.63707756653 }, { "content": "pub fn inspector_ui(v: &mut Syllable, ui: &mut egui::Ui, context: &Context) -> bool {\n\n let mut changed = false;\n\n egui::ComboBox::from_id_source(context.id())\n\n .selected_text(format!(\"{:?}\", v))\n\n .show_ui(ui, |ui| {\n\n if ui.selectable_label(false, \"Do\").clicked() {\n\n *v = Syllable::Do;\n\n changed = true;\n\n }\n\n if ui.selectable_label(false, \"Re\").clicked() {\n\n *v = Syllable::Re;\n\n changed = true;\n\n }\n\n if ui.selectable_label(false, \"So\").clicked() {\n\n *v = Syllable::So;\n\n changed = true;\n\n }\n\n });\n\n changed\n\n}\n", "file_path": "crates/notation_bevy/src/inspector/syllable.rs", "rank": 16, "score": 172088.06468800455 }, { "content": "pub fn hex_linear(hex: &str) -> Color {\n\n let color = Color::hex(hex).unwrap();\n\n color.as_rgba_linear()\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Serialize, Deserialize, Debug, Default)]\n\n#[cfg_attr(feature = \"inspector\", derive(Inspectable))]\n\npub struct ThemeColors {\n\n pub syllables: SyllableColors,\n\n pub bar: BarColors,\n\n pub chord: ChordColors,\n\n pub lyrics: LyricsColors,\n\n pub section: SectionColors,\n\n pub strings: StringsColors,\n\n pub rhythm: RhythmColors,\n\n pub mini_map: MiniMapColors,\n\n pub ui: UiColors,\n\n}\n\n\n\nimpl ThemeColors {\n", "file_path": "crates/notation_bevy/src/theme/theme_colors.rs", "rank": 17, "score": 171247.29927089295 }, { "content": "fn write_tab(tab: String, output: Option<PathBuf>) {\n\n if let Some(tab) = tab::new_tab(tab.as_str()) {\n\n let pretty = PrettyConfig::new()\n\n .with_separate_tuple_members(true)\n\n .with_enumerate_arrays(true);\n\n let s = to_string_pretty(&tab, pretty).expect(\"Serialization failed\");\n\n match output {\n\n None => println!(\"{}\", s),\n\n Some(path) => {\n\n let mut file = File::create(&path).unwrap();\n\n file.write_all(s.as_bytes()).unwrap();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "apps/notation_tool/src/main.rs", "rank": 18, "score": 170083.2647764694 }, { "content": "fn load_tab(\n\n mut commands: Commands,\n\n time: Res<Time>,\n\n mut state: ResMut<NotationAppState>,\n\n mut theme: ResMut<NotationTheme>,\n\n entities: Query<Entity, With<GlobalTransform>>,\n\n assets: ResMut<Assets<TabAsset>>,\n\n mut evts: EventWriter<AddTabEvent>,\n\n viewer_query: Query<(Entity, &Arc<NotationViewer>), With<Arc<NotationViewer>>>,\n\n) {\n\n if state.window_width > 0.0 && state.window_height > 0.0 && state.tab.is_none() && state.parse_error.is_none() {\n\n let mut count = 0;\n\n for _ in entities.iter() {\n\n count += 1;\n\n }\n\n //A bit hacky to make sure despawning finished, otherwise might got panic with \"Entity not exist\"\n\n if count > 1 {\n\n if state._despawn_delay_seconds > 0.0 {\n\n state._despawn_delay_seconds -= time.delta_seconds();\n\n println!(\"load_tab(): Waiting to despawn: {} -> {}\", count, state._despawn_delay_seconds);\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 19, "score": 169233.42721302115 }, { "content": "fn on_mouse_dragged(\n\n mut evts: EventReader<MouseDraggedEvent>,\n\n theme: Res<NotationTheme>,\n\n settings: Res<NotationSettings>,\n\n mut tab_bars_query: Query<(\n\n Entity,\n\n &mut Transform,\n\n &Arc<TabBars>,\n\n &LayoutData,\n\n &Arc<GridData>,\n\n )>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n for evt in evts.iter() {\n\n if settings.allow_panning {\n\n settings\n\n .layout\n\n .pan_tab_bars(&theme, &mut tab_bars_query, -evt.delta.x, -evt.delta.y);\n\n }\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 20, "score": 167645.50964529623 }, { "content": "fn on_mouse_clicked(\n\n mut evts: EventReader<MouseClickedEvent>,\n\n theme: Res<NotationTheme>,\n\n mut app_state: ResMut<NotationAppState>,\n\n mut settings: ResMut<NotationSettings>,\n\n tab_state_query: Query<(Entity, &TabState), With<TabState>>,\n\n mini_bar_query: Query<(&Arc<MiniBar>, &LayoutData, &GlobalTransform)>,\n\n button_query: Query<(&Arc<PlayButton>, &LayoutData, &GlobalTransform)>,\n\n rhythm_query: Query<(&Arc<RhythmView>, &LayoutData, &GlobalTransform)>,\n\n chord_query: Query<(&Arc<ChordView>, &LayoutData, &GlobalTransform)>,\n\n bar_query: Query<(&Arc<BarView>, &LayoutData, &GlobalTransform)>,\n\n tab_control_query: Query<(&Arc<TabControl>, &LayoutData, &GlobalTransform)>,\n\n mut jump_to_bar_evts: EventWriter<JumpToBarEvent>,\n\n midi_settings: Res<MidiSettings>,\n\n mut midi_state: ResMut<MidiState>,\n\n mut play_control_evts: EventWriter<PlayControlEvent>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n let mut pos = None;\n\n for evt in evts.iter() {\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 21, "score": 167645.50964529623 }, { "content": "pub fn color_of_hex(hex: &str) -> Color {\n\n let color = Color::hex(hex).unwrap();\n\n color.as_rgba_linear()\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Serialize, Deserialize, Debug, Default)]\n\n#[cfg_attr(feature = \"inspector\", derive(Inspectable))]\n\npub struct BevyUtilsTheme {\n\n pub layout: LayoutTheme,\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Serialize, Deserialize, Debug)]\n\n#[cfg_attr(feature = \"inspector\", derive(Inspectable))]\n\npub struct LayoutTheme {\n\n pub pivot_color: Color,\n\n pub anchor_color: Color,\n\n pub border_color: Color,\n\n pub pivot_radius: f32,\n\n pub anchor_radius: f32,\n\n pub border_line_width: f32,\n", "file_path": "crates/notation_bevy_utils/src/dev/theme.rs", "rank": 22, "score": 160867.39498264933 }, { "content": "fn on_switch_tab(\n\n mut evts: EventReader<SwitchTabEvent>,\n\n settings: Res<MidiSettings>,\n\n mut state: ResMut<MidiState>,\n\n mut hub: NonSendMut<MidiHub>,\n\n mut play_control_evts: EventWriter<PlayControlEvent>,\n\n) {\n\n for evt in evts.iter() {\n\n hub.switch_tab(&settings, &mut state, evt.tab.clone());\n\n _do_tick(\n\n &settings,\n\n &mut state,\n\n &mut hub,\n\n &mut play_control_evts,\n\n true,\n\n 0.0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/notation_midi/src/midi_plugin.rs", "rank": 23, "score": 159965.8381763133 }, { "content": "fn on_tab_bars_resized(\n\n mut evts: EventReader<TabBarsResizedEvent>,\n\n mut commands: Commands,\n\n settings: Res<NotationSettings>,\n\n theme: Res<NotationTheme>,\n\n mut tone_note_query: Query<(Entity, &mut ToneNoteData), With<ToneNoteData>>,\n\n mut pick_note_query: Query<(Entity, &mut PickNoteData), With<PickNoteData>>,\n\n mut single_string_query: Query<(Entity, &mut SingleStringData), With<SingleStringData>>,\n\n mut word_text_query: Query<(Entity, &mut WordTextData), With<WordTextData>>,\n\n mut shape_diagram_6_query: Query<(Entity, &mut ShapeDiagramData6), With<ShapeDiagramData6>>,\n\n mut shape_diagram_4_query: Query<(Entity, &mut ShapeDiagramData4), With<ShapeDiagramData4>>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n for evt in evts.iter() {\n\n let bars = &evt.0;\n\n for (entity, mut data) in tone_note_query.iter_mut() {\n\n for (view, layout) in bars.iter() {\n\n if data.bar_props.bar_ordinal == view.bar_props.bar_ordinal {\n\n data.value.bar_size = layout.size.width;\n\n data.update(&mut commands, &theme, entity);\n", "file_path": "crates/notation_bevy/src/entry/entry_plugin.rs", "rank": 25, "score": 156069.24763438333 }, { "content": "fn list_tabs(_verbose: u8) {\n\n let tabs = tab::get_tab_list();\n\n println!(\"\\nTotal Tabs: {}\\n\", tabs.len());\n\n for tab in tabs {\n\n println!(\" {}\", tab.name);\n\n }\n\n}\n\n\n", "file_path": "apps/notation_tool/src/main.rs", "rank": 27, "score": 153834.9131960185 }, { "content": "fn register_inspectors(mut registry: ResMut<InspectableRegistry>) {\n\n registry.register_raw(syllable::inspector_ui);\n\n}\n", "file_path": "crates/notation_bevy/src/inspector/mod.rs", "rank": 28, "score": 129319.30473166419 }, { "content": "pub fn create_tone_notes(\n\n commands: &mut Commands,\n\n assets: &NotationAssets,\n\n theme: &NotationTheme,\n\n settings: &NotationSettings,\n\n entity: Entity,\n\n entry: &LaneEntry,\n\n tone: &Tone,\n\n) {\n\n /* TODO: check whether is the first on in row\n\n if entry.prev_is_tie() {\n\n continue;\n\n }\n\n */\n\n if let Some(lane) = entry.lane() {\n\n let mode: ToneMode = lane.kind.into();\n\n let bar = lane.bar().unwrap();\n\n for note in tone.get_notes() {\n\n let data = ToneNoteData::new(entry, ToneNoteValue::new(&bar, note, mode));\n\n let note_entity = data.create(commands, theme, entity);\n\n if settings.show_melody_syllable && !entry.prev_is_tie() {\n\n theme.texts.melody.spawn_syllable_text(commands, note_entity, assets, settings, &data.value.syllable())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/tone/tone_systems.rs", "rank": 29, "score": 129253.42286267888 }, { "content": "pub fn on_entry_playing_changed(\n\n mut commands: Commands,\n\n theme: Res<NotationTheme>,\n\n query: Query<(Entity, &EntryPlaying, &Children), Changed<EntryPlaying>>,\n\n mut note_query: Query<(Entity, &mut ToneNoteData)>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n for (_entity, playing, children) in query.iter() {\n\n for child in children.iter() {\n\n if let Ok((entity, mut data)) = note_query.get_mut(*child) {\n\n //println!(\"{:?} -> {:?} -> {:?}\", name, data, playing)\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/tone/tone_systems.rs", "rank": 30, "score": 129253.42286267888 }, { "content": "pub fn on_entry_playing_changed(\n\n mut commands: Commands,\n\n settings: Res<NotationSettings>,\n\n theme: Res<NotationTheme>,\n\n query: Query<(Entity, &EntryPlaying, &Children), Changed<EntryPlaying>>,\n\n mut note_query: QuerySet<(\n\n Query<(Entity, &mut PickNoteData, &Children)>,\n\n Query<(Entity, &mut PickNoteData)>,\n\n )>,\n\n mut font_query: Query<&mut Text>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n if settings.hide_strings_lane { return; }\n\n for (_entity, playing, children) in query.iter() {\n\n for child in children.iter() {\n\n if let Ok((entity, mut data, note_children)) = note_query.q0_mut().get_mut(*child) {\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n for child in note_children.iter() {\n\n if let Ok(mut text) = font_query.get_mut(*child) {\n", "file_path": "crates/notation_bevy/src/strings/pick_systems.rs", "rank": 31, "score": 129253.42286267888 }, { "content": "pub fn create_word_text(\n\n commands: &mut Commands,\n\n assets: &NotationAssets,\n\n theme: &NotationTheme,\n\n _settings: &NotationSettings,\n\n entity: Entity,\n\n entry: &LaneEntry,\n\n text: &WordTextValue,\n\n) {\n\n /* TODO: check whether is the first on in row\n\n if entry.prev_is_tie() {\n\n continue;\n\n }\n\n */\n\n let data = WordTextData::new(entry, text.clone());\n\n let text_entity = data.create(commands, theme, entity);\n\n if text.word.text != \"\" {\n\n theme\n\n .texts.lyrics\n\n .spawn_word_text(commands, text_entity, &assets, text.word.text.as_str())\n\n }\n\n}\n\n\n\n/*\n\n Update font looks a bit weird, so not using it for now, leave the codes here in case want to bring it back.\n", "file_path": "crates/notation_bevy/src/word/word_systems.rs", "rank": 32, "score": 129253.42286267888 }, { "content": "pub fn on_entry_playing_changed(\n\n mut commands: Commands,\n\n theme: Res<NotationTheme>,\n\n query: Query<(Entity, &EntryPlaying, &Children), Changed<EntryPlaying>>,\n\n mut text_query: Query<(Entity, &mut WordTextData)>,\n\n) {\n\n if theme._bypass_systems { return; }\n\n for (_entity, playing, children) in query.iter() {\n\n for child in children.iter() {\n\n if let Ok((entity, mut data)) = text_query.get_mut(*child) {\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/word/word_systems.rs", "rank": 33, "score": 129253.42286267888 }, { "content": "pub fn on_entry_playing_changed_with_font(\n\n mut commands: Commands,\n\n theme: Res<NotationTheme>,\n\n query: Query<(Entity, &EntryPlaying, &Children), Changed<EntryPlaying>>,\n\n mut text_query: QuerySet<(\n\n Query<(Entity, &mut WordTextData, &Children)>,\n\n Query<(Entity, &mut WordTextData)>,\n\n )>,\n\n mut font_query: Query<&mut Text>,\n\n) {\n\n for (_entity, playing, children) in query.iter() {\n\n for child in children.iter() {\n\n if let Ok((entity, mut data, text_children)) = text_query.q0_mut().get_mut(*child) {\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n for child in text_children.iter() {\n\n if let Ok(mut text) = font_query.get_mut(*child) {\n\n let font_size = theme.texts.lyrics.word_font_size.of_state(&text.value.playing_state);\n\n BevyUtil::set_text_size_color(&mut text, data.calc_text_font_size(&theme), data.calc_text_color(&theme));\n\n }\n\n }\n\n } else if let Ok((entity, mut data)) = text_query.q1_mut().get_mut(*child) {\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n }\n\n }\n\n }\n\n}\n\n */\n\n\n", "file_path": "crates/notation_bevy/src/word/word_systems.rs", "rank": 34, "score": 128036.06365663405 }, { "content": "fn check_synth_buffer(mut hub: NonSendMut<MidiHub>) {\n\n hub.check_synth_buffer();\n\n}\n\n\n\nimpl MidiHub {\n\n pub fn get_synth_buffer(&mut self, settings: &MidiSettings) -> Option<DoubleAudioBuffer> {\n\n self.check_output(settings);\n\n if let Some(synth) = &self.output_synth {\n\n synth.get_buffer()\n\n } else {\n\n None\n\n }\n\n }\n\n pub fn check_synth_buffer(&mut self) {\n\n if let Some(synth) = self.output_synth.as_mut() {\n\n synth.check_buffer();\n\n }\n\n }\n\n}\n", "file_path": "crates/notation_midi/src/native/mod.rs", "rank": 35, "score": 127338.96695402311 }, { "content": "pub fn get_lane_entry<T, F: Fn(&LaneEntry) -> Option<T>>(\n\n v: &[Arc<LaneEntry>],\n\n predicate: &F,\n\n) -> Option<T> {\n\n for entry in v.iter() {\n\n if let Some(result) = predicate(entry.as_ref()) {\n\n return Some(result);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "crates/notation_model/src/util.rs", "rank": 36, "score": 124788.79835499829 }, { "content": "pub fn get_track_entry<T, F: Fn(&ModelEntry) -> Option<T>>(\n\n v: &[Arc<ModelEntry>],\n\n predicate: &F,\n\n) -> Option<T> {\n\n for entry in v.iter() {\n\n if let Some(result) = predicate(entry.as_ref()) {\n\n return Some(result);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "crates/notation_model/src/util.rs", "rank": 37, "score": 124788.79835499829 }, { "content": "pub trait ShapeOp<Theme, S: Shape> : Clone + Send + Sync + 'static {\n\n fn get_shape(&self, theme: &Theme) -> S;\n\n fn create(&self, commands: &mut Commands, theme: &Theme, parent: Entity) -> Entity {\n\n let shape = self.get_shape(theme);\n\n let shape_entity = shape.create(commands, parent);\n\n commands.entity(shape_entity).insert(self.clone());\n\n shape_entity\n\n }\n\n fn update(&self, commands: &mut Commands, theme: &Theme, entity: Entity) {\n\n let shape = self.get_shape(theme);\n\n shape.update(commands, entity);\n\n }\n\n}\n", "file_path": "crates/notation_bevy_utils/src/shape/shape.rs", "rank": 38, "score": 123372.03241054103 }, { "content": "fn on_window_resized(\n\n mut window: ResMut<WindowDescriptor>,\n\n mut evts: EventReader<WindowResized>,\n\n mut app_state: ResMut<NotationAppState>,\n\n mut window_resized_evts: EventWriter<WindowResizedEvent>,\n\n) {\n\n if app_state.tab.is_none() {\n\n return;\n\n }\n\n for evt in evts.iter() {\n\n if evt.width as usize != window.width as usize\n\n || evt.height as usize != window.height as usize\n\n {\n\n println!(\"on_window_resized(): {} {} -> {} {} \", window.width, window.height, evt.width, evt.height);\n\n window.width = evt.width;\n\n window.height = evt.height;\n\n app_state.window_width = evt.width;\n\n app_state.window_height = evt.height;\n\n app_state.scale_factor_override = window.scale_factor_override;\n\n window_resized_evts.send(WindowResizedEvent());\n\n }\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 39, "score": 123188.10272238014 }, { "content": "pub fn get_track_entry_<T, F: Fn(usize, &ModelEntry) -> Option<T>>(\n\n v: &[Arc<ModelEntry>],\n\n predicate: &F,\n\n) -> Option<T> {\n\n for (index, entry) in v.iter().enumerate() {\n\n if let Some(result) = predicate(index, entry.as_ref()) {\n\n return Some(result);\n\n }\n\n }\n\n None\n\n}\n\nimpl Track {\n\n pub fn get_entry<T, F: Fn(&ModelEntry) -> Option<T>>(&self, predicate: &F) -> Option<T> {\n\n get_track_entry(&self.entries, predicate)\n\n }\n\n}\n\nimpl BarLayer {\n\n pub fn get_track_entry<T, F: Fn(&ModelEntry) -> Option<T>>(&self, predicate: &F) -> Option<T> {\n\n self.track.get_entry(predicate)\n\n }\n", "file_path": "crates/notation_model/src/util.rs", "rank": 40, "score": 122253.74758576835 }, { "content": "pub fn get_lane_entry_<T, F: Fn(usize, &LaneEntry) -> Option<T>>(\n\n v: &[Arc<LaneEntry>],\n\n predicate: &F,\n\n) -> Option<T> {\n\n for (index, entry) in v.iter().enumerate() {\n\n if let Some(result) = predicate(index, entry.as_ref()) {\n\n return Some(result);\n\n }\n\n }\n\n None\n\n}\n\nimpl BarLane {\n\n pub fn get_entry<T, F: Fn(&LaneEntry) -> Option<T>>(&self, predicate: &F) -> Option<T> {\n\n get_lane_entry(&self.entries, predicate)\n\n }\n\n}\n\nimpl TabBar {\n\n pub fn get_entry_in_lanes<T, F: Fn(&LaneEntry) -> Option<T>>(\n\n &self,\n\n predicate: &F,\n\n ) -> Option<T> {\n\n for lane in self.lanes.iter() {\n\n if let Some(x) = lane.get_entry(predicate) {\n\n return Some(x);\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "crates/notation_model/src/util.rs", "rank": 41, "score": 122253.74758576835 }, { "content": "fn handle_mouse_inputs(\n\n windows: Res<Windows>,\n\n mouse_input: Res<Input<MouseButton>>,\n\n app_state: Res<NotationAppState>,\n\n settings: Res<NotationSettings>,\n\n mut mouse_motion_events: EventReader<MouseMotion>,\n\n mut mouse_wheel_input: EventReader<bevy::input::mouse::MouseWheel>,\n\n mut mouse_clicked: EventWriter<MouseClickedEvent>,\n\n mut mouse_dragged: EventWriter<MouseDraggedEvent>,\n\n) {\n\n if app_state.tab.is_none() {\n\n return;\n\n }\n\n if mouse_input.just_released(MouseButton::Left) {\n\n windows\n\n .get_primary()\n\n .and_then(|x| x.cursor_position())\n\n .map(|cursor_position| {\n\n //println!(\"handle_inputs() -> MouseClickedEvent({:?})\", cursor_position);\n\n mouse_clicked.send(MouseClickedEvent { cursor_position });\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 42, "score": 122046.09931705118 }, { "content": "fn setup_window_size(\n\n window: Res<WindowDescriptor>,\n\n mut app_state: ResMut<NotationAppState>,\n\n) {\n\n #[cfg(target_arch = \"wasm32\")]\n\n let (width, height) = crate::wasm::bevy_web_fullscreen::get_viewport_size();\n\n\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let (width, height) = (window.width, window.height);\n\n\n\n println!(\"setup_window_size(): {} {} \", width, height);\n\n app_state.window_width = width;\n\n app_state.window_height = height;\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 43, "score": 122046.09931705118 }, { "content": "fn handle_keyboard_inputs(\n\n keyboard_input: Res<Input<KeyCode>>,\n\n mut app_state: ResMut<NotationAppState>,\n\n mut settings: ResMut<NotationSettings>,\n\n mut theme: ResMut<NotationTheme>,\n\n midi_settings: Res<MidiSettings>,\n\n mut midi_state: ResMut<MidiState>,\n\n mut play_control_evts: EventWriter<PlayControlEvent>,\n\n mut window_resized_evts: EventWriter<WindowResizedEvent>,\n\n mut jump_to_bar_evts: EventWriter<JumpToBarEvent>,\n\n) {\n\n if app_state.tab.is_none() {\n\n return;\n\n }\n\n if keyboard_input.just_released(KeyCode::LControl) {\n\n app_state.hide_control = !app_state.hide_control;\n\n if !ControlView::HUD_MODE {\n\n window_resized_evts.send(WindowResizedEvent());\n\n }\n\n } else if keyboard_input.just_released(KeyCode::Space) {\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 44, "score": 122046.09931705118 }, { "content": "fn handle_touch_inputs(\n\n windows: Res<Windows>,\n\n touch_input: Res<Touches>,\n\n mut app_state: ResMut<NotationAppState>,\n\n mut mouse_clicked: EventWriter<MouseClickedEvent>,\n\n //mut mouse_dragged: EventWriter<MouseDraggedEvent>,\n\n) {\n\n if app_state.tab.is_none() {\n\n return;\n\n }\n\n for (_index, finger) in touch_input.iter().enumerate() {\n\n if touch_input.just_pressed(finger.id()) {\n\n windows\n\n .get_primary()\n\n .map(|w| (w.physical_width() as f32, w.physical_height() as f32))\n\n .map(| (physical_width, physical_height) | {\n\n /*\n\n Super hacky way to get the touch input in mobile browsers (WASM).\n\n winit not support it yet, using a pull request version, which seems to have some issues\n\n as well, also the touch event triggering is very unreliable during my test, but at least\n", "file_path": "crates/notation_bevy/src/app/app.rs", "rank": 45, "score": 122046.09931705118 }, { "content": "pub fn get_viewport_size() -> (f32, f32) {\n\n let web_window = web_sys::window().expect(\"could not get window\");\n\n let document_element = web_window\n\n .document()\n\n .expect(\"could not get document\")\n\n .document_element()\n\n .expect(\"could not get document element\");\n\n\n\n let width = document_element.client_width();\n\n let height = document_element.client_height();\n\n web_log!(\"bevy_web_fullscreen::get_viewport_size() -> {}, {}\", width, height);\n\n\n\n (width as f32, height as f32)\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/wasm/bevy_web_fullscreen.rs", "rank": 46, "score": 118870.56778076543 }, { "content": "#[proc_macro]\n\npub fn slice(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::slice::SliceDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 47, "score": 118603.98167848578 }, { "content": "#[proc_macro]\n\npub fn track(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::track::TrackDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 48, "score": 118603.98167848578 }, { "content": "#[proc_macro]\n\npub fn form(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::form::FormDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 49, "score": 118603.98167848578 }, { "content": "#[proc_macro]\n\npub fn bar(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::bar::BarDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 50, "score": 118603.98167848578 }, { "content": "#[proc_macro]\n\npub fn entry(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::entry::EntryDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 51, "score": 118603.98167848578 }, { "content": "#[proc_macro]\n\npub fn section(input: TokenStream) -> TokenStream {\n\n parse_macro_input!(input as proto::section::SectionDsl)\n\n .into_token_stream()\n\n .into()\n\n}\n\n\n", "file_path": "crates/notation_dsl/src/lib.rs", "rank": 52, "score": 118603.98167848578 }, { "content": "fn main() {\n\n match Args::from_args() {\n\n Args::WriteTab { tab, output } => write_tab(tab, output),\n\n Args::ListTabs { verbose } => list_tabs(verbose),\n\n };\n\n}\n", "file_path": "apps/notation_tool/src/main.rs", "rank": 53, "score": 116398.40837526773 }, { "content": "#[bevy_main]\n\nfn main() {\n\n #[cfg(target_arch = \"wasm32\")]\n\n let tabs = vec![notation_bevy::prelude::NotationApp::get_tab_from_url()\n\n .unwrap_or(\"beginner/1_right_hand.ron\".to_owned())];\n\n\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let tabs = vec![\n\n \"songs/misc/scarborough_fair.ron\".to_owned(),\n\n \"songs/misc/stand_by_me.ron\".to_owned(),\n\n \"songs/jay/long_juan_feng.ron\".to_owned(),\n\n \"songs/pu_shu/bai_hua_lin.ron\".to_owned(),\n\n \"beginner/1_right_hand.ron\".to_owned(),\n\n \"test.ron\".to_owned(),\n\n ];\n\n notation_bevy::prelude::NotationViewer::run(tabs);\n\n}\n", "file_path": "apps/notation_viewer/src/main.rs", "rank": 54, "score": 116398.40837526773 }, { "content": "fn do_tick(\n\n settings: Res<MidiSettings>,\n\n mut state: ResMut<MidiState>,\n\n mut hub: NonSendMut<MidiHub>,\n\n mut clock: ResMut<PlayClock>,\n\n mut play_control_evts: EventWriter<PlayControlEvent>,\n\n) {\n\n clock.tick();\n\n //println!(\"do_tick() -> {}\", clock.delta_seconds());\n\n _do_tick(\n\n &settings,\n\n &mut state,\n\n &mut hub,\n\n &mut play_control_evts,\n\n false,\n\n clock.delta_seconds(),\n\n );\n\n}\n", "file_path": "crates/notation_midi/src/midi_plugin.rs", "rank": 55, "score": 115281.85271747239 }, { "content": "fn _do_tick(\n\n settings: &MidiSettings,\n\n state: &mut MidiState,\n\n hub: &mut MidiHub,\n\n play_control_evts: &mut EventWriter<PlayControlEvent>,\n\n jumped: bool,\n\n delta_seconds: f32,\n\n) {\n\n let tick_result = state.tick(settings, hub, jumped, delta_seconds);\n\n if jumped || tick_result.changed {\n\n play_control_evts.send(PlayControlEvent::on_tick(\n\n state.play_control.position,\n\n tick_result,\n\n ));\n\n }\n\n}\n\n\n", "file_path": "crates/notation_midi/src/midi_plugin.rs", "rank": 56, "score": 115281.85271747239 }, { "content": "fn on_jump_to_bar(\n\n mut evts: EventReader<JumpToBarEvent>,\n\n settings: Res<MidiSettings>,\n\n mut state: ResMut<MidiState>,\n\n mut hub: NonSendMut<MidiHub>,\n\n mut play_control_evts: EventWriter<PlayControlEvent>,\n\n) {\n\n let mut bar_props = None;\n\n for evt in evts.iter() {\n\n bar_props = Some(evt.bar_props);\n\n }\n\n if let Some(bar_props) = bar_props {\n\n state.jump_to_bar(&settings, &mut hub, bar_props);\n\n _do_tick(\n\n &settings,\n\n &mut state,\n\n &mut hub,\n\n &mut play_control_evts,\n\n true,\n\n 0.0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/notation_midi/src/midi_plugin.rs", "rank": 57, "score": 113920.51368567231 }, { "content": "fn on_play_control_evt(\n\n settings: Res<MidiSettings>,\n\n mut state: ResMut<MidiState>,\n\n mut hub: NonSendMut<MidiHub>,\n\n mut evts: EventReader<PlayControlEvent>,\n\n) {\n\n for evt in evts.iter() {\n\n match evt {\n\n PlayControlEvent::OnPlayState(play_state) => {\n\n state.seek_position = None;\n\n if !play_state.is_playing() {\n\n state.init_channels(&settings, &mut hub);\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/notation_midi/src/midi_plugin.rs", "rank": 60, "score": 112597.79986084261 }, { "content": "fn on_layout_data_changed(\n\n mut commands: Commands,\n\n theme: Res<BevyUtilsTheme>,\n\n layout_query: Query<(Entity, &LayoutData), Changed<LayoutData>>,\n\n) {\n\n for (entity, layout) in layout_query.iter() {\n\n layout.update(&mut commands, &theme, entity);\n\n }\n\n}\n", "file_path": "crates/notation_bevy_utils/src/dev/plugin.rs", "rank": 62, "score": 111312.09037073492 }, { "content": "fn on_bar_playing_changed(\n\n mut commands: Commands,\n\n theme: Res<NotationTheme>,\n\n mut query: Query<(Entity, &BarPlaying, &mut MiniBarData), Changed<BarPlaying>>,\n\n) {\n\n for (entity, playing, mut data) in query.iter_mut() {\n\n //println!(\"{:?} -> {:?} -> {:?}\", name, data, playing)\n\n data.value.playing_state = playing.value;\n\n data.update(&mut commands, &theme, entity);\n\n }\n\n}\n", "file_path": "crates/notation_bevy/src/mini/mini_plugin.rs", "rank": 64, "score": 111312.09037073492 }, { "content": "fn insert_entry_extra(\n\n commands: &mut Commands,\n\n assets: &NotationAssets,\n\n theme: &NotationTheme,\n\n settings: &NotationSettings,\n\n entity: Entity,\n\n entry: &LaneEntry,\n\n) {\n\n match entry.model.proto.as_ref() {\n\n ProtoEntry::Core(core_entry) => {\n\n insert_core_entry_extra(commands, assets, theme, settings, entity, entry, core_entry)\n\n }\n\n ProtoEntry::Lyric(lyric_entry) => LyricsPlugin::insert_entry_extra(\n\n commands,\n\n assets,\n\n theme,\n\n settings,\n\n entity,\n\n entry,\n\n lyric_entry,\n", "file_path": "crates/notation_bevy/src/entry/entry_plugin.rs", "rank": 65, "score": 111312.09037073492 }, { "content": "fn on_add_layout_data(\n\n mut commands: Commands,\n\n theme: Res<BevyUtilsTheme>,\n\n layout_query: Query<(Entity, &LayoutData), Added<LayoutData>>,\n\n) {\n\n for (entity, layout) in layout_query.iter() {\n\n layout.create(&mut commands, &theme, entity);\n\n }\n\n}\n\n\n", "file_path": "crates/notation_bevy_utils/src/dev/plugin.rs", "rank": 66, "score": 111312.09037073492 }, { "content": "fn insert_core_entry_extra(\n\n commands: &mut Commands,\n\n assets: &NotationAssets,\n\n theme: &NotationTheme,\n\n settings: &NotationSettings,\n\n entity: Entity,\n\n entry: &LaneEntry,\n\n core_entry: &CoreEntry,\n\n) {\n\n match core_entry {\n\n CoreEntry::Tie => (),\n\n CoreEntry::Rest(_) => (),\n\n CoreEntry::Tone(tone, _) => {\n\n commands\n\n .entity(entity)\n\n .insert_bundle(ToneBundle::from(*tone));\n\n crate::tone::tone_systems::create_tone_notes(\n\n commands, assets, theme, settings, entity, entry, tone,\n\n );\n\n }\n\n CoreEntry::Chord(chord, _) => {\n\n commands\n\n .entity(entity)\n\n .insert_bundle(ChordBundle::from(*chord));\n\n }\n\n };\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/entry/entry_plugin.rs", "rank": 67, "score": 110061.85378327992 }, { "content": "\n\nuse super::tab_asset::TabAssetLoader;\n\n\n\nuse super::tab_chords::TabChords;\n\nuse super::tab_content::TabContent;\n\nuse super::tab_control::TabControl;\n\nuse super::tab_events::{RhythmViewDoLayoutEvent, TabBarsDoLayoutEvent, TabBarsResizedEvent, TabBarsResizedPreEvent, TabChordsDoLayoutEvent, TabContentDoLayoutEvent, TabControlDoLayoutEvent, TabHeaderDoLayoutEvent, TabViewDoLayoutEvent};\n\nuse super::tab_header::TabHeader;\n\nuse super::tab_view::TabView;\n\n\n\npub struct TabPlugin;\n\n\n\nimpl Plugin for TabPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n TabViewDoLayoutEvent::setup(app);\n\n TabContentDoLayoutEvent::setup(app);\n\n TabHeaderDoLayoutEvent::setup(app);\n\n TabControlDoLayoutEvent::setup(app);\n\n TabChordsDoLayoutEvent::setup(app);\n\n TabBarsDoLayoutEvent::setup(app);\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 68, "score": 103355.24522205559 }, { "content": " // Not using GuitarView here, since it 's y position been changed to adjust with capo position\n\n for (_tab_control, layout, global_transform) in tab_control_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n Control::seek_forward(&midi_settings, &mut midi_state, &mut play_control_evts);\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 69, "score": 103349.80250967623 }, { "content": "use std::sync::Arc;\n\n\n\nuse bevy::prelude::*;\n\nuse notation_bevy_utils::prelude::{LayoutData, GridData};\n\nuse notation_midi::prelude::{JumpToBarEvent, MidiState, PlayControlEvent, MidiSettings};\n\nuse notation_model::prelude::TabBarProps;\n\n\n\nuse crate::bar::bar_view::BarView;\n\nuse crate::chord::chord_view::ChordView;\n\nuse crate::mini::mini_bar::MiniBar;\n\n\n\nuse crate::play::play_button::PlayButton;\n\nuse crate::prelude::{\n\n AddTabEvent, MouseClickedEvent, MouseDraggedEvent, NotationAppState, NotationAssetsStates,\n\n NotationSettings, NotationTheme, TabAsset, TabBars, TabState,\n\n};\n\nuse crate::rhythm::rhythm_bar::{RhythmBarData};\n\nuse crate::rhythm::rhythm_view::RhythmView;\n\nuse crate::viewer::control::Control;\n\nuse crate::viewer::control_view::ControlView;\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 70, "score": 103346.6088423767 }, { "content": " pos = Some(app_state.convert_pos(evt.cursor_position));\n\n }\n\n if let Some(pos) = pos {\n\n if !app_state.hide_control {\n\n if app_state.window_width / 2.0 - pos.x > ControlView::calc_width(app_state.window_width) {\n\n app_state.hide_control = true;\n\n }\n\n } else {\n\n println!(\"tab_plugin::on_mouse_clicked() -> {:?}\", pos);\n\n for (mini_bar, layout, global_transform) in mini_bar_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n jump_to_bar(&mut jump_to_bar_evts, mini_bar.bar_props);\n\n return;\n\n }\n\n }\n\n for (button, layout, global_transform) in button_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n match button.action {\n\n crate::play::play_button::PlayButtonAction::PlayPause =>\n\n Control::play_or_pause(&mut midi_state, &mut play_control_evts),\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 71, "score": 103344.95782854306 }, { "content": " RhythmViewDoLayoutEvent::setup(app);\n\n app.add_event::<AddTabEvent>();\n\n app.add_event::<TabBarsResizedEvent>();\n\n app.add_event::<TabBarsResizedPreEvent>();\n\n app.add_asset::<TabAsset>();\n\n app.init_asset_loader::<TabAssetLoader>();\n\n app.add_system_set(\n\n SystemSet::on_update(NotationAssetsStates::Loaded)\n\n .with_system(on_mouse_clicked.system())\n\n .with_system(on_mouse_dragged.system())\n\n .with_system(TabView::do_layout.system())\n\n .with_system(TabContent::do_layout.system())\n\n .with_system(TabHeader::do_layout.system())\n\n .with_system(TabControl::do_layout.system())\n\n .with_system(RhythmView::do_layout.system())\n\n .with_system(RhythmBarData::update_rhythm.system())\n\n .with_system(TabChords::do_layout.system())\n\n .with_system(TabBars::on_resized_pre.system())\n\n .with_system(TabBars::do_layout.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 72, "score": 103342.80587421292 }, { "content": " crate::play::play_button::PlayButtonAction::Stop =>\n\n Control::stop(&mut midi_state, &mut play_control_evts),\n\n crate::play::play_button::PlayButtonAction::LoopMode => {\n\n settings.should_loop = !settings.should_loop;\n\n Control::sync_should_loop(&settings, &mut midi_state, &mut play_control_evts)\n\n }\n\n crate::play::play_button::PlayButtonAction::SetBegin =>\n\n Control::set_begin_bar_ordinal(&mut midi_state, &mut play_control_evts),\n\n crate::play::play_button::PlayButtonAction::SetEnd =>\n\n Control::set_end_bar_ordinal(&mut midi_state, &mut play_control_evts),\n\n crate::play::play_button::PlayButtonAction::Clear =>\n\n Control::clear_begin_end(&mut midi_state, &mut play_control_evts),\n\n }\n\n return;\n\n }\n\n }\n\n for (_rhythm_view, layout, global_transform) in rhythm_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n if app_state.hide_control {\n\n app_state.hide_control = false;\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 73, "score": 103341.92707730949 }, { "content": " }\n\n return;\n\n }\n\n }\n\n for (chord, layout, global_transform) in chord_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n let position =\n\n TabState::get_position(&tab_state_query, chord.chord.tab().map(|x| x.uuid));\n\n if let Some(next_bar) = chord.chord.search_next(true, position) {\n\n jump_to_bar(&mut jump_to_bar_evts, next_bar.props);\n\n }\n\n return;\n\n }\n\n }\n\n for (bar, layout, global_transform) in bar_query.iter() {\n\n if layout.is_pos_inside(pos, global_transform) {\n\n jump_to_bar(&mut jump_to_bar_evts, bar.bar_props);\n\n return;\n\n }\n\n }\n", "file_path": "crates/notation_bevy/src/tab/tab_plugin.rs", "rank": 74, "score": 103341.62228107858 }, { "content": "use std::collections::HashMap;\n\n\n\nuse notation_proto::prelude::*;\n\n\n\npub mod beginner;\n\npub mod songs;\n\npub mod test;\n\n\n\npub struct TabInfo<'a> {\n\n pub name: &'a str,\n\n pub new_tab: fn() -> Tab,\n\n}\n\n\n\nimpl<'a> TabInfo<'a> {\n\n pub fn new(name: &'a str, new_tab: fn() -> Tab) -> Self {\n\n Self { name, new_tab }\n\n }\n\n}\n\n\n", "file_path": "apps/notation_tool/src/tab/mod.rs", "rank": 75, "score": 95161.62194853775 }, { "content": "use notation_dsl::tab;\n\nuse notation_proto::prelude::*;\n\n\n", "file_path": "apps/notation_tool/src/tab/test.rs", "rank": 76, "score": 95152.64341178373 }, { "content": " ]\n\n Sections: [\n\n {\"A\" Verse [\n\n {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"Em\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"Em\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"G\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"1:246\" 1 ]\n\n guitar [ \"G\" 1 ; \"picks\" | ]\n\n }\n\n ]}\n\n ]\n\n Form: \"A\" \"A\"\n\n }\n\n}\n", "file_path": "apps/notation_tool/src/tab/test.rs", "rank": 77, "score": 95142.19250883962 }, { "content": "pub mod misc;\n\npub mod jay;\n\npub mod pu_shu;\n", "file_path": "apps/notation_tool/src/tab/songs/mod.rs", "rank": 78, "score": 93758.29655374806 }, { "content": "pub mod right_hand_1;\n", "file_path": "apps/notation_tool/src/tab/beginner/mod.rs", "rank": 79, "score": 93757.48597630902 }, { "content": "use notation_dsl::tab;\n\nuse notation_proto::prelude::*;\n\n\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 80, "score": 92414.33155761135 }, { "content": "use notation_dsl::tab;\n\nuse notation_proto::prelude::*;\n\n\n", "file_path": "apps/notation_tool/src/tab/beginner/right_hand_1.rs", "rank": 81, "score": 92414.33155761135 }, { "content": "pub mod stand_by_me;\n\npub mod scarborough_fair;", "file_path": "apps/notation_tool/src/tab/songs/misc/mod.rs", "rank": 82, "score": 92408.90214009173 }, { "content": "pub mod long_juan_feng;\n", "file_path": "apps/notation_tool/src/tab/songs/jay/mod.rs", "rank": 83, "score": 92408.27532253666 }, { "content": " \"v3:1\" Tone [ @ 6 _** 3, 5, 6 6 @ ] |\n\n \"v3:2\" Tone [ @ 5* _**+ ] |\n\n \"v3:3\" Tone [ _*+ 2 3 3 2* ] |\n\n \"v3:4\" Tone [ 1* _*+ 1 2 3 ] |\n\n \"v3:5\" Tone [ 1*+ _** 3 ] |\n\n \"v3:6\" Tone [ 3 2 1*+ 1 2* ] |\n\n \"v3:7\" Tone [ 1* _**+ ] |\n\n \"v3:8\" Tone [ _** _ 3 5 6 @ ] |\n\n \"v4:1\" Tone [ @ 6 _*+ 3 6+ _, ] |\n\n \"v4:2\" Tone [ @ 6* _ 5 6 5* 1, 2, ] |\n\n \"v4:3\" Tone [ 3 _*+ 1 2* ] |\n\n \"v4:4\" Tone [ 1* _*+ 1 2 3 ] |\n\n \"v4:5\" Tone [ 1*+ _* 1 3 2 ] |\n\n \"v4:6\" Tone [ 1*+ _* 3 2* ] |\n\n \"v4:7\" Tone [ 1* _** _ 5 ] |\n\n \"v4:8\" Tone [ 6* 5* ^1* 7* ] |\n\n ]}\n\n ]\n\n Sections: [\n\n {intro Intro [\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 84, "score": 92403.88065466724 }, { "content": " vocal [ \"v1:1\" | @ 1 ; \"v2:1\" | @ 2 ; \"v3:1\" | @ 3 ; \"v4:1\" | @ 4 ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:2\" | ]\n\n lyrics [ \"v1:2\" | @ 1 ; \"v2:2\" | @ 2 ; \"v3:2\" | @ 3 ; \"v4:2\" | @ 4 ]\n\n vocal [ \"v1:2\" | @ 1 ; \"v2:2\" | @ 2 ; \"v3:2\" | @ 3 ; \"v4:2\" | @ 4 ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:3\" | ]\n\n lyrics [ \"v1:3\" | @ 1 ; \"v2:3\" | @ 2 ; \"v3:3\" | @ 3 ; \"v4:3\" | @ 4 ]\n\n vocal [ \"v1:3\" | @ 1 ; \"v2:3\" | @ 2 ; \"v3:3\" | @ 3 ; \"v4:3\" | @ 4 ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:4\" | ]\n\n lyrics [ \"v1:4\" | @ 1 ; \"v2:4\" | @ 2 ; \"v3:4\" | @ 3 ; \"v4:4\" | @ 4 ]\n\n vocal [ \"v1:4\" | @ 1 ; \"v2:4\" | @ 2 ; \"v3:4\" | @ 3 ; \"v4:4\" | @ 4 ]\n\n }\n\n {\n\n chord [ \"4\" 1 ]\n\n guitar [ \"D\" 1 ; \"i:5\" | ]\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 85, "score": 92403.88065466724 }, { "content": " \"v1:5\" Tone [ 2+ _* _+ 1 3 2 @ ] |\n\n \"v1:6\" Tone [ @ 2, _, 2* _* 2 2* ] |\n\n \"v1:7\" Tone [ 1* ] |\n\n \"v1:8\" Tone [ _** _ 3 5 6 @ ] |\n\n \"v2:1\" Tone [ @ 6 _** 3 5 6 @ ] |\n\n \"v2:2\" Tone [ @ 6 _* 5 4 3 2 1, 2, ] |\n\n \"v2:3\" Tone [ 3* _*+ 2 1+ _, ] |\n\n \"v2:4\" Tone [ 1* _*+ 1 2 3 ] |\n\n \"v2:5\" Tone [ 2* _*+ 1 3 2 @ ] |\n\n \"v2:6\" Tone [ @ 2* _*+ 3 2* ] |\n\n \"v2:7\" Tone [ 1* _** _ 5 ] |\n\n \"v2:8\" Tone [ 6* 5* ^1* 7* ] |\n\n \"c:1\" Tone [ 6* _*+ 6* 6 @ ] |\n\n \"c:2\" Tone [ @ 6 _* 5 6*+ 3, 2, ] |\n\n \"c:3\" Tone [ 1, 2, 3* _* 3*+ ] |\n\n \"c:4\" Tone [ 1 _** 3* 2 ] |\n\n \"c:5\" Tone [ 1*+ _** _ ] |\n\n \"c:6\" Tone [ _* 3 2* 1* _ ] |\n\n \"c:7\" Tone [ _* 3 2*+ 1 _ ] |\n\n \"c:8\" Tone [ _** _ 3 5 6 @ ] |\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 86, "score": 92403.88065466724 }, { "content": " Sections: [\n\n {\"A\" Verse [\n\n {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"Em\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"Em\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"G\" 1 ; \"picks\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"G\" 1 ; \"picks\" | ]\n\n }\n\n ]}\n\n ]\n\n Form: \"A\" \"A\"\n\n }\n\n}\n", "file_path": "apps/notation_tool/src/tab/beginner/right_hand_1.rs", "rank": 87, "score": 92403.88065466724 }, { "content": " \"i:1\" Pick [ 3* _ 3* _ 4 3@1 ] |\n\n \"i:2\" Pick [ 3* _ 3* _ 3 3@1 ] |\n\n \"i:3\" Pick [ 4* _ 4* _ 4@2* ] |\n\n \"i:4\" Pick [ 4* _ 4* _ 4 4@2 ] |\n\n \"i:5\" Pick [ 4* _ 4* _ 4 4@4 ] |\n\n \"i:6\" Pick [ 4* _ 4* _ 4 3 ] |\n\n ]}\n\n {lyrics Lyrics [\n\n $duration = _1_8\n\n \"i:8\" Word [ _** _ \"when\" \"the\" \"night\" @ ] |\n\n \"v1:1\" Word [ @ \"\" _** _ \"has\" \"come\" @ ] |\n\n \"v1:2\" Word [ @ \"\" ] |\n\n \"v1:3\" Word [ _*+ \"and\", \"the\", \"land\"+ \"is\"+ \"dark\" @ ] |\n\n \"v1:4\" Word [ @ \"\"* _*+ \"and\" \"the\" \"moon\" ] |\n\n \"v1:5\" Word [ \"\"+ _* _+ \"is\" \"the\" \"on-\" @ ] |\n\n \"v1:6\" Word [ @ \"\", _, \"ly\"* _* \"light\" \"we'll\"* ] |\n\n \"v1:7\" Word [ \"see\"* ] |\n\n \"v1:8\" Word [ _** _ \"no\" \"I\" \"won't\" @ ] |\n\n \"v2:1\" Word [ @ \"\" _** \"be\" \"a-\" \"fraid\" @ ] |\n\n \"v2:2\" Word [ @ \"\" _* \"oh\" \"I\"** ] |\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 88, "score": 92403.88065466724 }, { "content": " \"v2:3\" Word [ \"won't\"* _*+ \"be\" \"a-\"+ _, ] |\n\n \"v2:4\" Word [ \"fraid\"* _*+ \"just\" \"as\" \"long\" ] |\n\n \"v2:5\" Word [ \"\"* _*+ \"as\" \"you\" \"stand\" @ ] |\n\n \"v2:6\" Word [ @ \"\"* _*+ \"stand\" \"by\"* ] |\n\n \"v2:7\" Word [ \"me\"* _** _ \"so\" ] |\n\n \"v2:8\" Word [ \"dar-\"* \"ling\"* \"dar-\"* \"ling\"* ] |\n\n \"c:1\" Word [ \"stand\"* _*+ \"by\"* \"me\" @ ] |\n\n \"c:2\" Word [ @ \"oh\" _* \"oh\" \"stand\"** ] |\n\n \"c:3\" Word [ \"\", \"\", \"by\"* _* \"me\"*+ ] |\n\n \"c:4\" Word [ \"\" _** \"oh\"* \"oh\" ] |\n\n \"c:5\" Word [ \"stand\"*+ _** _ ] |\n\n \"c:6\" Word [ _* \"stand\" \"by\"* \"me\"* _ ] |\n\n \"c:7\" Word [ _* \"stand\" \"by\"*+ \"me\" _ ] |\n\n \"c:8\" Word [ _** _ \"if\" \"the\" \"sky\" @ ] |\n\n \"v3:1\" Word [ @ \"\" _** \"that\", \"we\", \"look\" \"u-\" @ ] |\n\n \"v3:2\" Word [ @ \"pon\"* _**+ ] |\n\n \"v3:3\" Word [ _*+ \"should\" \"tum-\" \"ble\" \"and\"* ] |\n\n \"v3:4\" Word [ \"fall\"* _*+ \"or\" \"the\" \"moun-\" ] |\n\n \"v3:5\" Word [ \"tain\"*+ _** \"should\" ] |\n\n \"v3:6\" Word [ \"crum-\" \"ble\" @ \"\"*+ \"to\" \"the\"* ] |\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 89, "score": 92403.88065466724 }, { "content": " {\n\n chord [ \"5\" 1 ]\n\n guitar [ \"E\" 1 ; \"i:6\" | ]\n\n }\n\n {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n }\n\n {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"i:8\" | ]\n\n vocal [ \"i:8\" | ]\n\n }\n\n ]}\n\n {verse Verse [\n\n {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"v1:1\" | @ 1 ; \"v2:1\" | @ 2 ; \"v3:1\" | @ 3 ; \"v4:1\" | @ 4 ]\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 90, "score": 92403.88065466724 }, { "content": " {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n }\n\n {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:2\" | ]\n\n }\n\n {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:3\" | ]\n\n }\n\n {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:4\" | ]\n\n }\n\n {\n\n chord [ \"4\" 1 ]\n\n guitar [ \"D\" 1 ; \"i:5\" | ]\n\n }\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 91, "score": 92403.88065466724 }, { "content": " \"v3:7\" Word [ \"sea\"* _**+ ] |\n\n \"v3:8\" Word [ _** _ \"I\" \"won't\" \"cry\" @ ] |\n\n \"v4:1\" Word [ @ \"\" _*+ \"I\" \"won't\"+ _, ] |\n\n \"v4:2\" Word [ @ \"cry\"* _ \"no\" \"I\"** ] |\n\n \"v4:3\" Word [ \"wont\" _*+ \"shed\" \"a\"* ] |\n\n \"v4:4\" Word [ \"tear\"* _*+ \"just\" \"as\" \"long\" ] |\n\n \"v4:5\" Word [ \"\"*+ _* \"as\" \"you\" \"stand\" ] |\n\n \"v4:6\" Word [ \"\"*+ _* \"stand\" \"by\"* ] |\n\n \"v4:7\" Word [ \"me\"* _** _ \"oh\" ] |\n\n \"v4:8\" Word [ \"dar-\"* \"ling\"* \"dar-\"* \"ling\"* ] |\n\n ]}\n\n {vocal Vocal [\n\n $key = A\n\n $scale = Major\n\n $duration = _1_8\n\n \"i:8\" Tone [ _** _ 3 5 6 @ ] |\n\n \"v1:1\" Tone [ @ 6 _** _ 3 5 @ ] |\n\n \"v1:2\" Tone [ @ 5 ] |\n\n \"v1:3\" Tone [ _*+ 1, 2, 3+ 2+ 1 @ ] |\n\n \"v1:4\" Tone [ @ 1* _*+ 1 2 3 ] |\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 92, "score": 92403.88065466724 }, { "content": " {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"c:1\" | ]\n\n vocal [ \"c:1\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:2\" | ]\n\n lyrics [ \"c:2\" | ]\n\n vocal [ \"c:2\" | ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:3\" | ]\n\n lyrics [ \"c:3\" | ]\n\n vocal [ \"c:3\" | ]\n\n } {\n\n chord [ \"6-\" 1 ]\n\n guitar [ \"#Fm\" 1 ; \"i:4\" | ]\n\n lyrics [ \"c:4\" | ]\n\n vocal [ \"c:4\" | ]\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 93, "score": 92403.88065466724 }, { "content": " }\n\n {\n\n chord [ \"4\" 1 ]\n\n guitar [ \"D\" 1 ; \"i:5\" | ]\n\n lyrics [ \"c:5\" | ]\n\n vocal [ \"c:5\" | ]\n\n } {\n\n chord [ \"5\" 1 ]\n\n guitar [ \"E\" 1 ; \"i:6\" | ]\n\n lyrics [ \"c:6\" | ]\n\n vocal [ \"c:6\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"c:7\" | ]\n\n vocal [ \"c:7\" | ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"c:8\" | @ 1 ]\n\n vocal [ \"c:8\" | @ 1 ]\n\n }\n\n ]}\n\n ]\n\n Form: intro verse verse chorus verse verse chorus\n\n }\n\n}\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 94, "score": 92403.88065466724 }, { "content": " lyrics [ \"v1:5\" | @ 1 ; \"v2:5\" | @ 2 ; \"v3:5\" | @ 3 ; \"v4:5\" | @ 4 ]\n\n vocal [ \"v1:5\" | @ 1 ; \"v2:5\" | @ 2 ; \"v3:5\" | @ 3 ; \"v4:5\" | @ 4 ]\n\n } {\n\n chord [ \"5\" 1 ]\n\n guitar [ \"E\" 1 ; \"i:6\" | ]\n\n lyrics [ \"v1:6\" | @ 1 ; \"v2:6\" | @ 2 ; \"v3:6\" | @ 3 ; \"v4:6\" | @ 4 ]\n\n vocal [ \"v1:6\" | @ 1 ; \"v2:6\" | @ 2 ; \"v3:6\" | @ 3 ; \"v4:6\" | @ 4 ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"v1:7\" | @ 1 ; \"v2:7\" | @ 2 ; \"v3:7\" | @ 3 ; \"v4:7\" | @ 4 ]\n\n vocal [ \"v1:7\" | @ 1 ; \"v2:7\" | @ 2 ; \"v3:7\" | @ 3 ; \"v4:7\" | @ 4 ]\n\n } {\n\n chord [ \"1\" 1 ]\n\n guitar [ \"A\" 1 ; \"i:1\" | ]\n\n lyrics [ \"v1:8\" | @ 1 ; \"v2:8\" | @ 2 ; \"v3:8\" | @ 3 ; \"v4:8\" | @ 4 ]\n\n vocal [ \"v1:8\" | @ 1 ; \"v2:8\" | @ 2 ; \"v3:8\" | @ 3 ; \"v4:8\" | @ 4 ]\n\n }\n\n ]}\n\n {chorus Chorus [\n", "file_path": "apps/notation_tool/src/tab/songs/misc/stand_by_me.rs", "rank": 95, "score": 92403.88065466724 }, { "content": "use notation_dsl::tab;\n\nuse notation_proto::prelude::*;\n\n\n", "file_path": "apps/notation_tool/src/tab/songs/misc/scarborough_fair.rs", "rank": 96, "score": 91103.44612357253 }, { "content": "pub mod bai_hua_lin;\n", "file_path": "apps/notation_tool/src/tab/songs/pu_shu/mod.rs", "rank": 97, "score": 91097.38988849784 }, { "content": " $duration = _1_4\n\n Chord ( 4: 3 5 )\n\n Chord ( 1: 3 5 /3)\n\n Chord ( 2: 3- 5 ) |\n\n ]}\n\n {guitar Guitar [\n\n Fretboard capo: 7\n\n $duration = D_1_2\n\n \"Asus4\" Shape ( 0 0 4 0 3 0 )\n\n \"Asus4/D\" Shape ( 0 5 4 0 3 0 )\n\n \"G/C\" Shape ( _ 3 0 0 0 _ )\n\n \"Asus2\" Shape ( _ 0 2 2 0 0 )\n\n \"G'\" Shape ( 3 _ 0 0 0 _ )\n\n \"G\" Shape ( 3 2 0 0 0 3 )\n\n \"C\" Shape ( 0 3 2 0 1 0 )\n\n \"Am\" Shape ( 0 0 2 2 1 0 )\n\n \"C_D\"\n\n $duration = _1_4\n\n \"C\" Shape ( 0 3 2 0 1 0 )\n\n $duration = _1_2\n", "file_path": "apps/notation_tool/src/tab/songs/misc/scarborough_fair.rs", "rank": 98, "score": 91092.99522062842 }, { "content": " \"D\" Shape ( 0 0 0 2 3 2 ) |\n\n \"C_G/B_Am\"\n\n $duration = _1_4\n\n \"C\" Shape ( 0 3 2 0 1 0 )\n\n \"G/B\" Shape ( _ 2 0 0 3 0 )\n\n \"Am\" Shape ( 0 0 2 2 1 0 ) |\n\n $duration = _1_8\n\n \"i\" Pick [ 5 1 3 4 2 3 ] |\n\n \"i'\" Pick [ 5 1 3 4 1 3 ] |\n\n \"i:3\" Pick [ 5 2 3* 5@2* ] |\n\n \"v:7\" Pick [ (6 2) 3 4 (4@2 2@1) (4 2) 3 ] |\n\n \"v:12\" Pick [ (5 2 1) 3 (4 2 1) 3 (4 2) 3 ] |\n\n \"v:17\" Pick [ 5 1 3 4 5 5@2 ] |\n\n \"v:19\" Pick [ (5 1) 3 (5 2) 3 (5 2) 3 ] |\n\n \"v:20\" Pick [ (6 2) 3 4 3 (4@2 2@1) (4 2) ] |\n\n \"v:21\" Pick [ (4@2 2@1 *) (4 2 *) 6* ] |\n\n \"v:23\" Pick [ (6 2) 3 4 3 (4@2 2@1 *) ] |\n\n \"v:24\" Pick [ (4 2 *) (4 2 -) (4@2 2@1 -) (4 2 -) (6 3) 4 ] |\n\n $duration = D_1_2\n\n \"o:1\" Pick [ 5 ] |\n", "file_path": "apps/notation_tool/src/tab/songs/misc/scarborough_fair.rs", "rank": 99, "score": 91092.99522062842 } ]
Rust
src/delivery/http/change.rs
chef-boneyard/delivery-cli
ab957f957017798cc11a370a0af57c3334d58bc9
use errors::{DeliveryError, Kind}; use http::*; use hyper::status::StatusCode; use serde_json; use config::Config; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, PartialOrd)] pub struct Description { pub title: String, pub description: String, } impl Description { pub fn payload(title: &str, desc: &str) -> Result<String, DeliveryError> { let desc = Description { title: String::from(title), description: String::from(desc), }; desc.to_json() } pub fn to_json(&self) -> Result<String, DeliveryError> { let payload = serde_json::to_string(&self)?; Ok(payload) } pub fn parse_json(response: &str) -> Result<Description, DeliveryError> { Ok(serde_json::from_str::<Description>(response)?) } pub fn parse_text(text: &str) -> Result<Description, DeliveryError> { let mut iter = text.lines(); let title = iter.find(|&l| !l.trim().is_empty()) .unwrap_or("") .trim() .to_string(); let desc = iter.collect::<Vec<&str>>().join("\n").trim().to_string(); Ok(Description { title: title, description: desc, }) } } pub fn get(config: &Config, change: &str) -> Result<Description, DeliveryError> { let org = try!(config.organization()); let proj = try!(config.project()); let client = try!(APIClient::from_config(&config)); let path = format!( "orgs/{}/projects/{}/changes/{}/description", org, proj, change ); debug!("description path: {}", path); let mut result = try!(client.get(&path)); match result.status { StatusCode::Ok => { let mut body_string = String::new(); let _x = try!(result.read_to_string(&mut body_string)); let description = try!(Description::parse_json(&body_string)); Ok(description) } StatusCode::NotFound => { let msg1 = "API request returned 404 (not found) while trying to fetch this change's description.\n".to_string(); let msg2 = "This is usually because the Delivery organization in your config does not match the organization for this project.\n"; let msg3 = "Your organization is current set to:\n\n"; let msg4 = &org; let msg5 = "\n\nTo fix this, try editing your cli.toml file's organization setting to match the organization this project resides in."; let err_msg = msg1 + msg2 + msg3 + msg4 + msg5; Err(DeliveryError { kind: Kind::ChangeNotFound, detail: Some(err_msg), }) } StatusCode::Unauthorized => { let msg = "API request returned 401 (unauthorized)".to_string(); Err(DeliveryError { kind: Kind::AuthenticationFailed, detail: Some(msg), }) } error_code @ _ => { let msg = format!("API request returned {}", error_code); let mut detail = String::new(); let e = match result.read_to_string(&mut detail) { Ok(_) => Ok(detail), Err(e) => Err(e), }; Err(DeliveryError { kind: Kind::ApiError(error_code, e), detail: Some(msg), }) } } } pub fn set(config: &Config, change: &str, description: &Description) -> Result<(), DeliveryError> { let org = try!(config.organization()); let proj = try!(config.project()); let client = try!(APIClient::from_config(&config)); let path = format!( "orgs/{}/projects/{}/changes/{}/description", org, proj, change ); let payload = try!(description.to_json()); let mut result = try!(client.put(&path, &payload)); match result.status { StatusCode::NoContent => Ok(()), StatusCode::Unauthorized => { let msg = "API request returned 401".to_string(); Err(DeliveryError { kind: Kind::AuthenticationFailed, detail: Some(msg), }) } error_code @ _ => { let msg = format!("API request returned {}", error_code); let mut detail = String::new(); let e = result.read_to_string(&mut detail).and(Ok(detail)); Err(DeliveryError::throw( Kind::ApiError(error_code, e), Some(msg), )) } } } #[cfg(test)] mod tests { use super::*; #[test] fn description_payload_test() { let payload = Description::payload("a title", "so descriptive!"); let expect = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; assert_eq!(expect, payload.unwrap()); } #[test] fn description_to_json_test() { let desc = Description { title: "a title".to_string(), description: "so descriptive!".to_string(), }; let payload = desc.to_json().unwrap(); let expect = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; assert_eq!(expect, payload); } #[test] fn description_parse_json_test() { let response = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; let expect = Description { title: "a title".to_string(), description: "so descriptive!".to_string(), }; let description = Description::parse_json(response).unwrap(); assert_eq!(expect, description); } #[test] fn description_parse_text_1_test() { let text = "Just a title"; let expect = Description { title: text.to_string(), description: "".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_2_test() { let text = "Just a title\n\nWith some description"; let expect = Description { title: "Just a title".to_string(), description: "With some description".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_3_test() { let text = "Just a title\n\nL1\nL2\nL3\n"; let expect = Description { title: "Just a title".to_string(), description: "L1\nL2\nL3".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_4_test() { let text = "\n \nA title after some blank lines\n \nL1\nL2\nL3\n"; let expect = Description { title: "A title after some blank lines".to_string(), description: "L1\nL2\nL3".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } }
use errors::{DeliveryError, Kind}; use http::*; use hyper::status::StatusCode; use serde_json; use config::Config; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, PartialOrd)] pub struct Description { pub title: String, pub description: String, } impl Description { pub fn payload(title: &str, desc: &str) -> Result<String, DeliveryError> { let desc = Description { title: String::from(title), description: String::from(desc), }; desc.to_json() } pub fn to_json(&self) -> Result<String, DeliveryError> { let payload = serde_json::to_string(&self)?; Ok(payload) } pub fn parse_json(response: &str)
t.read_to_string(&mut detail).and(Ok(detail)); Err(DeliveryError::throw( Kind::ApiError(error_code, e), Some(msg), )) } } } #[cfg(test)] mod tests { use super::*; #[test] fn description_payload_test() { let payload = Description::payload("a title", "so descriptive!"); let expect = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; assert_eq!(expect, payload.unwrap()); } #[test] fn description_to_json_test() { let desc = Description { title: "a title".to_string(), description: "so descriptive!".to_string(), }; let payload = desc.to_json().unwrap(); let expect = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; assert_eq!(expect, payload); } #[test] fn description_parse_json_test() { let response = "{\"title\":\"a title\",\"description\":\"so descriptive!\"}"; let expect = Description { title: "a title".to_string(), description: "so descriptive!".to_string(), }; let description = Description::parse_json(response).unwrap(); assert_eq!(expect, description); } #[test] fn description_parse_text_1_test() { let text = "Just a title"; let expect = Description { title: text.to_string(), description: "".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_2_test() { let text = "Just a title\n\nWith some description"; let expect = Description { title: "Just a title".to_string(), description: "With some description".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_3_test() { let text = "Just a title\n\nL1\nL2\nL3\n"; let expect = Description { title: "Just a title".to_string(), description: "L1\nL2\nL3".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } #[test] fn description_parse_text_4_test() { let text = "\n \nA title after some blank lines\n \nL1\nL2\nL3\n"; let expect = Description { title: "A title after some blank lines".to_string(), description: "L1\nL2\nL3".to_string(), }; let desc = Description::parse_text(text).unwrap(); assert_eq!(expect, desc); } }
-> Result<Description, DeliveryError> { Ok(serde_json::from_str::<Description>(response)?) } pub fn parse_text(text: &str) -> Result<Description, DeliveryError> { let mut iter = text.lines(); let title = iter.find(|&l| !l.trim().is_empty()) .unwrap_or("") .trim() .to_string(); let desc = iter.collect::<Vec<&str>>().join("\n").trim().to_string(); Ok(Description { title: title, description: desc, }) } } pub fn get(config: &Config, change: &str) -> Result<Description, DeliveryError> { let org = try!(config.organization()); let proj = try!(config.project()); let client = try!(APIClient::from_config(&config)); let path = format!( "orgs/{}/projects/{}/changes/{}/description", org, proj, change ); debug!("description path: {}", path); let mut result = try!(client.get(&path)); match result.status { StatusCode::Ok => { let mut body_string = String::new(); let _x = try!(result.read_to_string(&mut body_string)); let description = try!(Description::parse_json(&body_string)); Ok(description) } StatusCode::NotFound => { let msg1 = "API request returned 404 (not found) while trying to fetch this change's description.\n".to_string(); let msg2 = "This is usually because the Delivery organization in your config does not match the organization for this project.\n"; let msg3 = "Your organization is current set to:\n\n"; let msg4 = &org; let msg5 = "\n\nTo fix this, try editing your cli.toml file's organization setting to match the organization this project resides in."; let err_msg = msg1 + msg2 + msg3 + msg4 + msg5; Err(DeliveryError { kind: Kind::ChangeNotFound, detail: Some(err_msg), }) } StatusCode::Unauthorized => { let msg = "API request returned 401 (unauthorized)".to_string(); Err(DeliveryError { kind: Kind::AuthenticationFailed, detail: Some(msg), }) } error_code @ _ => { let msg = format!("API request returned {}", error_code); let mut detail = String::new(); let e = match result.read_to_string(&mut detail) { Ok(_) => Ok(detail), Err(e) => Err(e), }; Err(DeliveryError { kind: Kind::ApiError(error_code, e), detail: Some(msg), }) } } } pub fn set(config: &Config, change: &str, description: &Description) -> Result<(), DeliveryError> { let org = try!(config.organization()); let proj = try!(config.project()); let client = try!(APIClient::from_config(&config)); let path = format!( "orgs/{}/projects/{}/changes/{}/description", org, proj, change ); let payload = try!(description.to_json()); let mut result = try!(client.put(&path, &payload)); match result.status { StatusCode::NoContent => Ok(()), StatusCode::Unauthorized => { let msg = "API request returned 401".to_string(); Err(DeliveryError { kind: Kind::AuthenticationFailed, detail: Some(msg), }) } error_code @ _ => { let msg = format!("API request returned {}", error_code); let mut detail = String::new(); let e = resul
random
[ { "content": "/// Request an API token for a user from a Delivery server.\n\npub fn request(config: &Config, pass: &str) -> Result<String, DeliveryError> {\n\n let client = try!(APIClient::from_config_no_auth(config));\n\n let user = try!(config.user());\n\n let payload = try!(TokenRequest::payload(&user, pass));\n\n let path = format!(\"users/{}/get-token\", &user);\n\n let mut result = try!(client.post(&path, &payload));\n\n match result.status {\n\n StatusCode::Ok => {\n\n let mut body_string = String::new();\n\n try!(result.read_to_string(&mut body_string));\n\n let token = try!(TokenResponse::parse_token(&body_string));\n\n Ok(token)\n\n }\n\n StatusCode::Unauthorized => {\n\n let ent = try!(config.enterprise());\n\n let server = try!(config.server());\n\n let msg = format!(\n\n \"Details: server={}, enterprise={}, user={}\",\n\n &server, &ent, &user\n\n );\n", "file_path": "src/delivery/http/token.rs", "rank": 0, "score": 298658.9509267114 }, { "content": "pub fn clone(project: &str, git_url: &str) -> Result<(), DeliveryError> {\n\n try!(git_command(&[\"clone\", git_url, project], &cwd()));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 1, "score": 286873.0900191928 }, { "content": "/// Edit a provided string in an external editor and returned the\n\n/// edited content.\n\n///\n\n/// The provided `content` is written to a tempdir using a file name\n\n/// of `name` and the `edit_path` function is used to open this file\n\n/// in the editor defined in the `EDITOR` environment variable. The\n\n/// contents of the file are returned as a string after the external\n\n/// editor completes.\n\npub fn edit_str(name: &str, content: &str) -> Result<String, DeliveryError> {\n\n let tempdir = try!(TempDir::new(\"delivery-edit\"));\n\n let tfile_path = tempdir.path().join_many(&[name]);\n\n let tfile_str = tfile_path.to_str().unwrap();\n\n let mut in_file = try!(File::create(tfile_path.clone()));\n\n try!(in_file.write_all(content.as_bytes()));\n\n try!(edit_path(tfile_str));\n\n let mut f = try!(File::open(&tfile_str));\n\n let mut content = String::new();\n\n try!(f.read_to_string(&mut content));\n\n Ok(content)\n\n}\n", "file_path": "src/delivery/utils/open.rs", "rank": 2, "score": 284764.1541895571 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn read(prompt: &str) -> String {\n\n println!(\"{0}\", prompt);\n\n let mut pass = String::new();\n\n let mut ch = read_char();\n\n while ch != '\\r' {\n\n pass.push(ch);\n\n ch = read_char();\n\n }\n\n pass\n\n}\n\n\n", "file_path": "src/delivery/getpass/mod.rs", "rank": 5, "score": 266683.0258881122 }, { "content": "pub fn checkout_branch_name(change: &str, patchset: &str) -> String {\n\n if patchset == \"latest\" {\n\n return String::from(change);\n\n } else {\n\n return format!(\"{}/{}\", change, patchset);\n\n }\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 6, "score": 265409.05382671295 }, { "content": "// Extract the Environment Variable of the provided `key`\n\npub fn env_variable(key: &str) -> Option<String> {\n\n env::var(key).ok()\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 7, "score": 256247.14767656155 }, { "content": "// Return the project name or try to extract it from the current path\n\npub fn project_or_from_cwd(proj: &str) -> DeliveryResult<String> {\n\n if proj.is_empty() {\n\n project_from_cwd()\n\n } else {\n\n Ok(proj.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 8, "score": 253350.9352546208 }, { "content": "pub fn generate_command_from_string(cmd_str: &str) -> process::Command {\n\n let mut cmd_vec = cmd_str.split(\" \").collect::<Vec<_>>();\n\n let mut cmd = make_command(&cmd_vec.remove(0));\n\n if cmd_vec.len() > 0 {\n\n cmd.args(&cmd_vec);\n\n }\n\n cmd\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 9, "score": 231433.44151048127 }, { "content": "pub fn get_head() -> Result<String, DeliveryError> {\n\n let gitr = try!(git_command(&[\"branch\"], &cwd()));\n\n let result = try!(parse_get_head(&gitr.stdout));\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 10, "score": 223135.44954529463 }, { "content": "pub fn checkout_review(change: &str, patchset: &str, pipeline: &str) -> Result<(), DeliveryError> {\n\n try!(git_command(&[\"fetch\", \"delivery\"], &cwd()));\n\n let branchname = checkout_branch_name(change, patchset);\n\n let result = git_command(\n\n &[\n\n \"branch\",\n\n \"--track\",\n\n &branchname,\n\n &format!(\"delivery/_reviews/{}/{}/{}\", pipeline, change, patchset),\n\n ],\n\n &cwd(),\n\n );\n\n match result {\n\n Ok(_) => {\n\n try!(git_command(&[\"checkout\", &branchname], &cwd()));\n\n return Ok(());\n\n }\n\n Err(e) => match e.detail {\n\n Some(msg) => {\n\n if msg.contains(\"already exists.\") {\n", "file_path": "src/delivery/git/mod.rs", "rank": 11, "score": 222235.63647424028 }, { "content": "pub fn git_current_sha() -> Result<String, DeliveryError> {\n\n git_command(&[\"rev-parse\", \"HEAD\"], &cwd()).and_then(|msg| Ok(msg.stdout))\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 12, "score": 220968.83822302593 }, { "content": "fn parse_get_head(stdout: &str) -> Result<String, DeliveryError> {\n\n for line in stdout.lines() {\n\n let r = Regex::new(r\"(.) (.+)\").unwrap();\n\n let caps_result = r.captures(line);\n\n let caps = match caps_result {\n\n Some(caps) => caps,\n\n None => {\n\n return Err(DeliveryError {\n\n kind: Kind::BadGitOutputMatch,\n\n detail: Some(format!(\"Failed to match: {}\", line)),\n\n })\n\n }\n\n };\n\n let token = caps.get(1).unwrap().as_str();\n\n if token == \"*\" {\n\n let branch = caps.get(2).unwrap().as_str();\n\n return Ok(String::from(branch));\n\n }\n\n }\n\n return Err(DeliveryError {\n\n kind: Kind::NotOnABranch,\n\n detail: None,\n\n });\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 13, "score": 217683.89560187707 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn item(path: &str) -> Result<(), DeliveryError> {\n\n process_response(\n\n \"start\",\n\n try!(\n\n Command::new(\"cmd.exe\")\n\n .arg(\"/c\")\n\n .arg(\"start\")\n\n .arg(path)\n\n .output()\n\n ),\n\n )\n\n}\n\n\n", "file_path": "src/delivery/utils/open.rs", "rank": 14, "score": 217306.64991648804 }, { "content": "// Push pipeline content to the Server\n\npub fn git_push(pipeline: &str) -> Result<(), DeliveryError> {\n\n // Check if the pipeline branch exists and has commits.\n\n // If the pipeline branch exists and does not have commits,\n\n // then `git branch` will not return it, so just checking\n\n // `git branch` output will handle both cases (pipeline does\n\n // not exist and pipeline exists but without commits).\n\n match git_command(&[\"branch\"], &cwd()) {\n\n Ok(msg) => {\n\n if !msg.stdout.contains(pipeline) {\n\n sayln(\n\n \"red\",\n\n &format!(\"A {} branch does not exist locally.\", pipeline),\n\n );\n\n sayln(\n\n \"red\",\n\n &format!(\n\n \"A {} branch with commits is needed to create the {} \\\n\n pipeline.\\n\",\n\n pipeline, pipeline\n\n ),\n", "file_path": "src/delivery/git/mod.rs", "rank": 15, "score": 215145.69879969588 }, { "content": "/// Open `path` in an external editor as found in the environment\n\n/// variable `EDITOR` and wait for the editor process to finish.\n\n///\n\n/// An `Err` is returned if no `EDITOR` variable is set or we are\n\n/// unable to read it for some reason.\n\n///\n\n/// The configured editor is spawned with `path` as the last command\n\n/// line argument. The value of the `EDITOR` environment variable is\n\n/// split on spaces. The first item is taken as the editor command and\n\n/// all subsequent items are passed to the editor command.\n\n///\n\npub fn edit_path(path: &str) -> Result<(), DeliveryError> {\n\n debug!(\"{}\", \"in edit!\");\n\n let editor_env = match env::var(\"EDITOR\") {\n\n Ok(e) => e,\n\n Err(_) => {\n\n return Err(DeliveryError {\n\n kind: Kind::NoEditor,\n\n detail: None,\n\n })\n\n }\n\n };\n\n // often, EDITOR has args provided\n\n let split = editor_env.trim().split(\" \");\n\n let mut items = split.collect::<Vec<&str>>();\n\n let editor = items.remove(0);\n\n let mut cmd = Command::new(editor);\n\n for arg in items.iter() {\n\n cmd.arg(arg);\n\n }\n\n let mut child = try!(cmd.arg(path).spawn());\n\n try!(child.wait());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/open.rs", "rank": 16, "score": 215145.69879969588 }, { "content": "// Commit content to local repo\n\n//\n\n// This fun will commit the changes you have loaded in the current repo,\n\n// it will also detect if the commit failed and transform the error to a\n\n// more specific one. (Ex. If we try to commit when nothing has changed)\n\npub fn git_commit(message: &str) -> Result<(), DeliveryError> {\n\n match git_command(&[\"commit\", \"-m\", message], &try!(project_path())) {\n\n Err(DeliveryError {\n\n kind,\n\n detail: Some(output),\n\n }) => {\n\n if output.contains(\"nothing to commit\") {\n\n return Err(DeliveryError {\n\n kind: Kind::EmptyGitCommit,\n\n detail: None,\n\n });\n\n }\n\n\n\n Err(DeliveryError {\n\n kind: kind,\n\n detail: Some(output),\n\n })\n\n }\n\n Err(e) => Err(e),\n\n Ok(_) => Ok(()),\n", "file_path": "src/delivery/git/mod.rs", "rank": 17, "score": 215145.69879969588 }, { "content": "pub fn git_push_review(branch: &str, target: &str) -> Result<ReviewResult, DeliveryError> {\n\n let gitr = try!(git_command(\n\n &[\n\n \"push\",\n\n \"--porcelain\",\n\n \"--progress\",\n\n \"--verbose\",\n\n \"delivery\",\n\n &format!(\"{}:_for/{}/{}\", branch, target, branch),\n\n ],\n\n &cwd()\n\n ));\n\n parse_git_push_output(&gitr.stdout, &gitr.stderr)\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum PushResultFlag {\n\n SuccessfulFastForward,\n\n SuccessfulForcedUpdate,\n\n SuccessfulDeletedRef,\n", "file_path": "src/delivery/git/mod.rs", "rank": 18, "score": 213920.98061181174 }, { "content": "// Bump the metadata version, only if:\n\n// * The project is a cookbook\n\n// * The version hasn't been updated\n\n//\n\n// @param p_root [&PathBuf] The project root path\n\n// @param pipeline [&str] Pipeline the change is targeting to\n\n// @return () if success\n\npub fn bump_version(p_root: &PathBuf, pipeline: &str, project: &str) -> Result<(), DeliveryError> {\n\n if is_cookbook(&p_root) {\n\n say(\"white\", \"Project \");\n\n say(\"yellow\", &project);\n\n sayln(\"white\", \" is a cookbook\");\n\n sayln(\"white\", \"Validating version in metadata\");\n\n\n\n let meta_f_p = PathBuf::from(metadata_file(&p_root));\n\n let meta_f_c = try!(read_file(&meta_f_p));\n\n let current_meta_v = try!(metadata_version_from(&meta_f_c));\n\n let current_v = current_meta_v.to_string();\n\n let mut t_file = pipeline.to_string();\n\n t_file.push_str(\":metadata.rb\");\n\n let pipeline_meta = try!(git::git_command(&[\"show\", &t_file], &p_root));\n\n let pipeline_meta_v = try!(metadata_version_from(&pipeline_meta.stdout));\n\n let pipeline_v = pipeline_meta_v.to_string();\n\n\n\n if current_v == pipeline_v {\n\n say(\"yellow\", \"The version hasn't been updated (\");\n\n say(\"red\", &pipeline_v);\n", "file_path": "src/delivery/cookbook/mod.rs", "rank": 19, "score": 210841.38359540247 }, { "content": "// Verify the content of the repo:pipeline on the server\n\npub fn server_content(pipeline: &str) -> Result<bool, DeliveryError> {\n\n let p_ref = &format!(\"refs/heads/{}\", pipeline);\n\n match git_command(&[\"ls-remote\", \"delivery\", p_ref], &cwd()) {\n\n Ok(msg) => {\n\n if msg.stdout.contains(p_ref) {\n\n return Ok(true);\n\n } else {\n\n return Ok(false);\n\n }\n\n }\n\n Err(e) => return Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 20, "score": 209122.48691953364 }, { "content": "pub fn say(color: &str, to_say: &str) {\n\n match term::stdout() {\n\n Some(t) => unsafe {\n\n if SHOW_OUTPUT {\n\n if COLORIZE {\n\n say_term(t, color, to_say)\n\n } else {\n\n print!(\"{}\", to_say)\n\n }\n\n } else {\n\n debug!(\"{}\", to_say)\n\n }\n\n },\n\n None => {\n\n print!(\"{}\", to_say);\n\n io::stdout().flush().ok().expect(\"Could not flush stdout\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 21, "score": 208690.65141768224 }, { "content": "pub fn sayln(color: &str, to_say: &str) {\n\n say(color, to_say);\n\n say(color, \"\\n\");\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 22, "score": 208690.65141768224 }, { "content": "pub fn print_error(primary_error_str: &str, secondary_error_str: &str) -> () {\n\n let final_error_str_primary = \"ERROR: \".to_string() + primary_error_str;\n\n sayln(\"error\", &final_error_str_primary);\n\n sayln(\"white\", &secondary_error_str);\n\n}\n", "file_path": "src/delivery/utils/say.rs", "rank": 23, "score": 207191.8382592601 }, { "content": "pub fn home_dir(to_append: &[&str]) -> Result<PathBuf, DeliveryError> {\n\n match dirs::home_dir() {\n\n Some(home) => Ok(home.join_many(to_append)),\n\n None => {\n\n let msg = \"unable to find home dir\".to_string();\n\n Err(DeliveryError {\n\n kind: Kind::NoHomedir,\n\n detail: Some(msg),\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 24, "score": 207119.7012494204 }, { "content": "pub fn ca_path() -> String {\n\n String::from(\"C:\\\\opscode\\\\chefdk\\\\embedded\\\\ssl\\\\certs\\\\cacert.pem\")\n\n}\n\n\n\n// ---------------\n\n// dummy functions\n\n// ---------------\n\n//\n\n// These functions are no-ops to allow for compatibility with unix\n\n// system. For now, we aren't attempting to handle user/privilege\n\n// dropping on Windows.\n\n//\n", "file_path": "src/delivery/utils/windows.rs", "rank": 25, "score": 206993.40646885207 }, { "content": "pub fn ca_path() -> String {\n\n String::from(\"/opt/chefdk/embedded/ssl/certs/cacert.pem\")\n\n}\n", "file_path": "src/delivery/utils/unix.rs", "rank": 26, "score": 206993.40646885207 }, { "content": "pub fn server_arg_str<'a>() -> &'a str {\n\n \"-s --server=[server] 'The Automate server address'\"\n\n}\n\n\n", "file_path": "src/delivery/cli/arguments.rs", "rank": 27, "score": 205872.89737723832 }, { "content": "pub fn api_port_arg_str<'a>() -> &'a str {\n\n \"--api-port=[api-port] 'Port for Automate server'\"\n\n}\n\n\n\nfn_arg!(server_arg, server_arg_str());\n\n\n\nfn_arg!(api_port_arg, api_port_arg_str());\n\n\n\nfn_arg!(\n\n config_project_arg,\n\n \"-c --config-json=[config-json] 'Path of a custom config.json file'\"\n\n);\n\n\n\nfn_arg!(\n\n patchset_arg,\n\n \"-P --patchset=[patchset] 'A patchset number (default: latest)'\"\n\n);\n\n\n\nfn_arg!(project_arg, \"-p --project=[project] 'The project name'\");\n\n\n", "file_path": "src/delivery/cli/arguments.rs", "rank": 28, "score": 203700.0468208046 }, { "content": "// Read from STDIN\n\n//\n\n// Useful helper method to ask questions to the end-user\n\n//\n\n// Example:\n\n// ```\n\n// say(\"yellow\", \"How cool is the delivery-cli? [1-10] \");\n\n// let coolness = utils::read_from_terminal()?;\n\n// assert_eq!(coolness, 10);\n\n// ```\n\npub fn read_from_terminal() -> DeliveryResult<String> {\n\n let mut buff = String::new();\n\n try!(io::stdin().read_line(&mut buff));\n\n Ok(buff.trim().to_string())\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 29, "score": 198518.43015460682 }, { "content": "// Return the project name from the current path\n\npub fn project_from_cwd() -> DeliveryResult<String> {\n\n let cwd = try!(self::root_dir(&utils::cwd()));\n\n Ok(cwd.file_name().unwrap().to_str().unwrap().to_string())\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 30, "score": 198510.08133460418 }, { "content": "pub fn git_pull(branch: &str, rebase: bool) -> Result<GitResult, DeliveryError> {\n\n // First, check if branch exists because for some reason rust\n\n // will hang forever when trying to git pull a branch that doesn't exist.\n\n match git_command(&[\"ls-remote\", \"--heads\", \"delivery\"], &cwd()) {\n\n Ok(result) => {\n\n if !result.stdout.contains(&format!(\"refs/heads/{}\", branch)) {\n\n return Err(DeliveryError {\n\n kind: Kind::BranchNotFoundOnDeliveryRemote,\n\n detail: None,\n\n });\n\n }\n\n }\n\n Err(err) => return Err(err),\n\n }\n\n\n\n if rebase {\n\n git_command(&[\"pull\", \"delivery\", branch, \"--rebase\"], &cwd())\n\n } else {\n\n git_command(&[\"pull\", \"delivery\", branch], &cwd())\n\n }\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 31, "score": 196270.21471657685 }, { "content": "pub fn review(target: &str, head: &str) -> DeliveryResult<ReviewResult> {\n\n if target == head {\n\n Err(DeliveryError {\n\n kind: Kind::CannotReviewSameBranch,\n\n detail: None,\n\n })\n\n } else {\n\n Ok(try!(git::git_push_review(head, target)))\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 32, "score": 195552.46465973108 }, { "content": "pub fn chmod<P: ?Sized>(path: &P, setting: &str) -> Result<(), DeliveryError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let result = try!(\n\n Command::new(\"chmod\")\n\n .arg(setting)\n\n .arg(path.as_ref().to_str().unwrap())\n\n .output()\n\n );\n\n super::cmd_success_or_err(&result, Kind::ChmodFailed)\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 33, "score": 194961.0520424631 }, { "content": "#[allow(unused_variables)]\n\npub fn chmod<P: ?Sized>(path: &P, setting: &str) -> Result<(), DeliveryError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/windows.rs", "rank": 34, "score": 194961.0520424631 }, { "content": "pub fn value_of<'a>(matches: &'a ArgMatches, key: &str) -> &'a str {\n\n matches.value_of(key).unwrap_or(\"\")\n\n}\n\n\n\nmacro_rules! make_arg_vec {\n\n ( $( $x:expr ),* ) => {\n\n {\n\n let mut temp_vec = Vec::new();\n\n $(\n\n temp_vec.push(Arg::from_usage($x));\n\n )*\n\n temp_vec\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! fn_arg {\n\n ($fn_name:ident, $usage:expr) => {\n\n pub fn $fn_name<'a>() -> Arg<'a, 'a> {\n\n Arg::from_usage($usage)\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/delivery/cli/arguments.rs", "rank": 35, "score": 194854.98117302568 }, { "content": "pub fn make_command(cmd: &str) -> Command {\n\n // could do \"cmd.exe /c cmd\" instead and less overhead.\n\n let mut c = Command::new(\"powershell.exe\");\n\n c.arg(\"-noprofile\").arg(\"-nologo\").arg(\"-command\").arg(cmd);\n\n c\n\n}\n\n\n", "file_path": "src/delivery/utils/windows.rs", "rank": 36, "score": 193545.36887145773 }, { "content": "// Abstraction for command creation. Needed because of how we're\n\n// wrapping commands in Windows. See this function in the\n\n// corresponding windows module.\n\npub fn make_command(cmd: &str) -> Command {\n\n Command::new(cmd)\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 37, "score": 193545.36887145773 }, { "content": "pub fn fixture_file(names: &str) -> PathBuf {\n\n fixtures().join_many(&[names])\n\n}\n", "file_path": "tests/support/paths.rs", "rank": 38, "score": 193545.36887145773 }, { "content": "// What is this crazy type signature, you ask? Let me explain!\n\n//\n\n// Where <P: ?Sized> == Any Type (Sized or Unsized)\n\n// Where P: AsRef<Path> == Any type that implements the AsRef<Path> trait\n\npub fn git_command<P>(args: &[&str], c: &P) -> Result<GitResult, DeliveryError>\n\nwhere\n\n P: AsRef<Path> + ?Sized,\n\n{\n\n let cwd = c.as_ref();\n\n let spinner = Spinner::start();\n\n let command_path = match find_command(\"git\") {\n\n Some(path) => path,\n\n None => {\n\n return Err(DeliveryError {\n\n kind: Kind::FailedToExecute,\n\n detail: Some(\"git executable not found\".to_owned()),\n\n })\n\n }\n\n };\n\n let mut command = Command::new(command_path);\n\n\n\n // SSH Agent is required on windows Hence,\n\n // Rebuilding the command in a way; that\n\n // Invoke an agent; Run the SSH Command and kill the agent then and there\n", "file_path": "src/delivery/git/mod.rs", "rank": 39, "score": 193104.08012898412 }, { "content": "// Convert a path into a String. Panic if the path contains\n\n// non-unicode sequences.\n\npub fn path_to_string<P: AsRef<Path>>(p: P) -> String {\n\n let path = p.as_ref();\n\n match path.to_str() {\n\n Some(s) => s.to_string(),\n\n None => {\n\n let s = format!(\"invalid path (non-unicode): {}\", path.to_string_lossy());\n\n panic!(s)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 40, "score": 192516.4746988006 }, { "content": "// Verify if the `process::Output` of a `process::Command`\n\n// was executed successfully, otherwise return the provided\n\n// error and printing the STDOUT & STDERR\n\npub fn cmd_success_or_err(out: &CmdOutput, e_kind: Kind) -> DeliveryResult<()> {\n\n if !out.status.success() {\n\n return Err(DeliveryError {\n\n kind: e_kind,\n\n detail: Some(format!(\n\n \"STDOUT: {}\\nSTDERR: {}\\n\",\n\n String::from_utf8_lossy(&out.stdout),\n\n String::from_utf8_lossy(&out.stderr)\n\n )),\n\n });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 41, "score": 192098.1367672035 }, { "content": "pub fn chown_all<P: AsRef<Path>>(who: &str, paths: &[P]) -> Result<(), DeliveryError> {\n\n let mut command = Command::new(\"chown\");\n\n command.arg(\"-R\").arg(who);\n\n for p in paths {\n\n command.arg(&path_to_string(p));\n\n }\n\n let output = match command.output() {\n\n Ok(o) => o,\n\n Err(e) => {\n\n return Err(DeliveryError {\n\n kind: Kind::FailedToExecute,\n\n detail: Some(format!(\n\n \"failed to execute chown: {}\",\n\n error::Error::description(&e)\n\n )),\n\n })\n\n }\n\n };\n\n super::cmd_success_or_err(&output, Kind::ChmodFailed)\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 42, "score": 191998.1848635209 }, { "content": "#[allow(unused_variables)]\n\npub fn chown_all<P: AsRef<Path>>(who: &str, paths: &[P]) -> Result<(), DeliveryError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/windows.rs", "rank": 43, "score": 191998.1848635209 }, { "content": "pub fn fixture_file(names: &str) -> PathBuf {\n\n fixtures().join_many(&[names])\n\n}\n\n\n", "file_path": "src/delivery/utils/test_paths.rs", "rank": 44, "score": 189137.26332194937 }, { "content": "/// Returns the absolute path for a given command, if it exists, by searching the `PATH`\n\n/// environment variable.\n\n///\n\n/// If the command represents an absolute path, then the `PATH` searching will not be performed.\n\n/// If no absolute path can be found for the command, then `None` is returned.\n\npub fn find_command(command: &str) -> Option<PathBuf> {\n\n // If the command path is absolute and a file exists, then use that.\n\n let candidate = PathBuf::from(command);\n\n if candidate.is_absolute() && candidate.is_file() {\n\n return Some(candidate);\n\n }\n\n // Find the command by checking each entry in `PATH`. If we still can't find it,\n\n // give up and return `None`.\n\n if let Some(paths) = env::var_os(\"PATH\") {\n\n for path in env::split_paths(&paths) {\n\n let candidate = PathBuf::from(&path).join(command);\n\n if candidate.is_file() {\n\n return Some(candidate);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 45, "score": 186057.7710899409 }, { "content": "pub fn join_many(v: &Vec<&str>) -> PathBuf {\n\n root().join_many(v)\n\n}\n", "file_path": "src/delivery/utils/test_paths.rs", "rank": 46, "score": 186057.7710899409 }, { "content": "/// Returns the absolute path for a given command, if it exists, by searching the `PATH`\n\n/// environment variable.\n\n///\n\n/// If the command represents an absolute path, then the `PATH` seaching will not be performed.\n\n/// If no absolute path can be found for the command, then `None` is returned.\n\n///\n\n/// On Windows, the PATHEXT environment variable contains common extensions for commands,\n\n/// for example allowing \"docker.exe\" to be found when searching for \"docker\".\n\npub fn find_command(command: &str) -> Option<PathBuf> {\n\n // If the command path is absolute and a file exists, then use that.\n\n let candidate = PathBuf::from(command);\n\n if candidate.is_absolute() && candidate.is_file() {\n\n return Some(candidate);\n\n }\n\n\n\n // For SSH based authentication, If git execution is required, \n\n // use the bash embedded in git\\bin\n\n // TODO: Check for SSH type authencation required\n\n if command == \"git\" {\n\n let exe_path = env::current_exe().unwrap();\n\n let parent_path = exe_path.parent().unwrap();\n\n let command_path = parent_path.to_path_buf().join(\"..\\\\embedded\\\\git\\\\bin\\\\bash\");\n\n return Some(command_path);\n\n }\n\n\n\n // Find the command by checking each entry in `PATH`. If we still can't find it,\n\n // give up and return `None`.\n\n if let Some(paths) = env::var_os(\"PATH\") {\n", "file_path": "src/delivery/utils/windows.rs", "rank": 47, "score": 186057.7710899409 }, { "content": "// Create a Delivery Project with Delivery as SCP (default).\n\n// If the project is created, return true.\n\n// If the project already exists, return false\n\npub fn create_delivery_project(client: &APIClient, org: &str, proj: &str) -> DeliveryResult<bool> {\n\n if client.project_exists(org, proj) {\n\n return Ok(false);\n\n } else {\n\n try!(client.create_delivery_project(org, proj));\n\n return Ok(true);\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 48, "score": 184025.58221072957 }, { "content": "/// Lookup if Delivery server is SAML-enabled.\n\npub fn is_enabled(config: &Config) -> Result<bool, DeliveryError> {\n\n let client = try!(APIClient::from_config_no_auth(config));\n\n let path = \"saml/enabled\";\n\n let mut result = try!(client.get(&path));\n\n match result.status {\n\n StatusCode::Ok => {\n\n let mut body_string = String::new();\n\n try!(result.read_to_string(&mut body_string));\n\n let resp = try!(LookupResponse::parse_saml_enabled(&body_string));\n\n Ok(resp)\n\n }\n\n StatusCode::NotFound => {\n\n // 404 received if API does not exist\n\n debug!(\"endpoint 'saml/enabled' not found\");\n\n Ok(false)\n\n }\n\n error_code @ _ => {\n\n let msg = format!(\"lookup of SAML authentication returned {}\", error_code);\n\n let mut detail = String::new();\n\n let e = match result.read_to_string(&mut detail) {\n", "file_path": "src/delivery/http/saml.rs", "rank": 49, "score": 183679.41200555634 }, { "content": "// Verify an API token for a user against a Delivery Server\n\n//\n\n// This method verifies that a user has an existing Token on disk,\n\n// that it is valid and has not yet expired. Otherwise it will return\n\n// false saying that a token needs to be regenerated\n\npub fn verify(config: &Config) -> Result<bool, DeliveryError> {\n\n let api_server = try!(config.api_base_resource());\n\n let ent = try!(config.enterprise());\n\n let user = try!(config.user());\n\n let tstore = try!(TokenStore::from_home());\n\n let auth = try!(\n\n APIAuth::from_token_store(tstore, &api_server, &ent, &user).or_else(|e| {\n\n debug!(\"Ignoring {:?}\\nRequesting token from config\", e);\n\n APIAuth::from_token_request(&config)\n\n })\n\n );\n\n let client = try!(APIClient::from_config_no_auth(config).and_then(|mut c| {\n\n c.set_auth(auth);\n\n Ok(c)\n\n }));\n\n let mut response = try!(client.get(\"orgs\"));\n\n match response.status {\n\n StatusCode::Ok => Ok(true),\n\n StatusCode::Unauthorized => {\n\n let content = try!(APIClient::extract_pretty_json(&mut response));\n", "file_path": "src/delivery/http/token.rs", "rank": 50, "score": 183679.41200555634 }, { "content": "/// Return the content of the provided file\n\n///\n\n/// An easy way to read a file\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::fs::{File, remove_file};\n\n/// use std::io::prelude::*;\n\n/// use std::path::PathBuf;\n\n/// use delivery::utils::read_file;\n\n///\n\n/// let mut f = File::create(\"foo.txt\").unwrap();\n\n/// f.write_all(b\"Cool beans!\");\n\n///\n\n/// let f = PathBuf::from(\"foo.txt\");\n\n/// assert_eq!(\"Cool beans!\", read_file(&f).unwrap());\n\n///\n\n/// remove_file(\"foo.txt\");\n\n/// ```\n\npub fn read_file<P>(path: P) -> DeliveryResult<String>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut buffer = String::new();\n\n let mut f = try!(File::open(path));\n\n try!(f.read_to_string(&mut buffer));\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 51, "score": 182597.50884677639 }, { "content": "// Push local content to the Delivery Server if no upstream commits.\n\n// Returns true if commits pushed, returns false if upstream commits found.\n\npub fn push_project_content_to_delivery(pipeline: &str) -> DeliveryResult<bool> {\n\n if try!(git::server_content(pipeline)) {\n\n Ok(false)\n\n } else {\n\n try!(git::git_push(pipeline));\n\n Ok(true)\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 52, "score": 181975.3706883313 }, { "content": "// Returns the (Git) delivery remote URL form the specified repository path\n\n//\n\n// ex.=> ssh://user@[email protected]:8989/ent/organization/foo\n\npub fn delivery_remote_from_repo<P>(path: P) -> DeliveryResult<String>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n git_command(&[\"config\", \"--get\", \"remote.delivery.url\"], path.as_ref())\n\n .map(|g| g.stdout.trim().to_string())\n\n // If there is no 'delivery' remote, return an empty String\n\n .or(Ok(String::from(\"\")))\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 53, "score": 180579.74820979024 }, { "content": "/// Walk up a file hierarchy searching for `dir/target`.\n\npub fn walk_tree_for_path<P>(dir: P, target: &str) -> Option<PathBuf>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut current = dir.as_ref();\n\n loop {\n\n let candidate = current.join(target);\n\n if fs::metadata(&candidate).is_ok() {\n\n let ans = PathBuf::from(candidate);\n\n return Some(ans);\n\n }\n\n match current.parent() {\n\n Some(p) => current = p,\n\n None => return None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 54, "score": 170240.42854515626 }, { "content": "pub fn create_build_cookbook<P>(pipeline: &str, path: P) -> DeliveryResult<Command>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut command = utils::make_command(\"chef\");\n\n command\n\n .arg(\"generate\")\n\n .arg(\"build-cookbook\")\n\n .arg(path.as_ref())\n\n .arg(\"--pipeline\")\n\n .arg(pipeline)\n\n .current_dir(&try!(project_path()));\n\n let output = command.output()?;\n\n cmd_success_or_err(&output, Kind::ChefdkGenerateFailed)?;\n\n Ok(command)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum CustomCookbookSource {\n\n Cached,\n\n Disk,\n\n Git,\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 55, "score": 170240.42854515626 }, { "content": "pub fn clap_subcommand<'c>() -> App<'c, 'c> {\n\n SubCommand::with_name(SUBCOMMAND_NAME)\n\n .about(\"Clone a project repository\")\n\n .args_from_usage(\n\n \"<project> 'Name of project to clone'\n\n -g --git-url=[url] \\\n\n 'Git URL (-u -s -e -o ignored if used)'\",\n\n )\n\n .args(&u_e_s_o_args())\n\n .args(&project_specific_args())\n\n .args(&vec![a2_mode_arg()])\n\n}\n", "file_path": "src/delivery/cli/clone.rs", "rank": 56, "score": 167146.4313723722 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn item_for_cmds(path: &str, cmds: &[&str]) -> Result<(), DeliveryError> {\n\n let mut res = Err(DeliveryError {\n\n kind: Kind::OpenFailed,\n\n detail: None,\n\n });\n\n for cmd in cmds {\n\n res = item_for_cmd(path, cmd);\n\n match res {\n\n Ok(_) => break,\n\n Err(_) => continue,\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "src/delivery/utils/open.rs", "rank": 57, "score": 166939.71967079552 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn item_for_cmd(path: &str, cmd: &str) -> Result<(), DeliveryError> {\n\n process_response(cmd, try!(Command::new(cmd).arg(path).output()))\n\n}\n\n\n", "file_path": "src/delivery/utils/open.rs", "rank": 58, "score": 166939.71967079552 }, { "content": "// Extract the cookbook version from the provided metadata content\n\n//\n\n// This function expects you to read the metadata in advance and\n\n// pass just the content of the file. From there it will read every\n\n// line and search for the version to return it.\n\n//\n\n// There are two valid version formats:\n\n// a) 'x.y.z' - The normal semantic version. (major.minor.patch)\n\n// b) 'x.y' - Where the patchset will be 0 by default. (major.minor.0)\n\nfn metadata_version_from(content: &str) -> Result<MetadataVersion, DeliveryError> {\n\n for l in content.lines() {\n\n let r_m_m_p =\n\n Regex::new(r\"version\\s+'(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)'\").unwrap();\n\n if let Some(version) = r_m_m_p.captures(l) {\n\n return generate_metadata_version(version);\n\n }\n\n let r_m_m = Regex::new(r\"version\\s+'(?P<major>\\d+)\\.(?P<minor>\\d+)'\").unwrap();\n\n if let Some(version) = r_m_m.captures(l) {\n\n return generate_metadata_version(version);\n\n }\n\n }\n\n return Err(DeliveryError {\n\n kind: Kind::MissingMetadataVersion,\n\n detail: None,\n\n });\n\n}\n\n\n", "file_path": "src/delivery/cookbook/mod.rs", "rank": 59, "score": 151098.31812428345 }, { "content": "// Saves the new version to the metadata and commit the changes\n\nfn save_version(metadata: &PathBuf, version: String) -> Result<(), DeliveryError> {\n\n let current_meta = try!(read_file(metadata));\n\n let current_meta_version = try!(metadata_version_from(&current_meta));\n\n let current_version = current_meta_version.to_string();\n\n let new_metadata = current_meta.replace(&*current_version, &*version);\n\n\n\n // Recreate the file and dump the processed contents to it\n\n let mut recreate_meta = try!(File::create(metadata));\n\n try!(recreate_meta.write(new_metadata.as_bytes()));\n\n\n\n // Commit the changes made to the metadata\n\n let mut commit_msg = String::from(\"Bump version to \");\n\n commit_msg.push_str(&version);\n\n try!(git::git_command(\n\n &[\"add\", metadata.to_str().unwrap()],\n\n &utils::cwd()\n\n ));\n\n try!(git::git_command(\n\n &[\"commit\", \"-m\", &commit_msg],\n\n &utils::cwd()\n", "file_path": "src/delivery/cookbook/mod.rs", "rank": 60, "score": 148244.55302126048 }, { "content": "fn process_response(cmd: &str, res: Output) -> Result<(), DeliveryError> {\n\n if res.status.success() {\n\n Ok(())\n\n } else {\n\n let code = match res.status.code() {\n\n Some(c) => format!(\"{}\", c),\n\n None => format!(\"{}\", \"terminated by signal\"),\n\n };\n\n let msg = format!(\"Command '{}' failed with code {}\", cmd, code);\n\n Err(DeliveryError {\n\n kind: Kind::OpenFailed,\n\n detail: Some(msg),\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/open.rs", "rank": 61, "score": 148127.93434111253 }, { "content": "// This function is not currently used, but will be when we\n\n// add a --force option to the init command.\n\npub fn create_repo(path: &PathBuf) -> Result<(), DeliveryError> {\n\n say(\"white\", \"Creating repo in: \");\n\n say(\"magenta\", &format!(\"{} \", path.display()));\n\n let result = git_command(&[\"init\"], path);\n\n match result {\n\n Ok(_) => {\n\n sayln(\"white\", \"'git init' done.\");\n\n return Ok(());\n\n }\n\n Err(e) => return Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 62, "score": 147987.93056461244 }, { "content": "pub fn check_repo_init(path: &PathBuf) -> Result<(), DeliveryError> {\n\n say(\"white\", \"Is \");\n\n say(\"magenta\", &format!(\"{} \", path.display()));\n\n say(\"white\", \"a git repo? \");\n\n\n\n let git_dir = path.join(\".git\");\n\n\n\n if is_dir(git_dir.as_path()) {\n\n sayln(\"white\", \"yes\");\n\n return Ok(());\n\n } else {\n\n sayln(\n\n \"red\",\n\n \"no. Run 'git init' here and then 'delivery init' again.\",\n\n );\n\n return Err(DeliveryError {\n\n kind: Kind::GitSetupFailed,\n\n detail: None,\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 63, "score": 146656.86521249087 }, { "content": "fn version() -> String {\n\n let build_version = option_env!(\"DELIV_CLI_VERSION\").unwrap_or(\"0.0.0\");\n\n format!(\"{}\", build_version)\n\n}\n\n\n", "file_path": "src/delivery/cli/mod.rs", "rank": 64, "score": 143986.69650618563 }, { "content": "/// Makes a change to a project on the named branch. Creates a\n\n/// file named `filename` and writes some stuff to it.\n\n///\n\n/// When it returns, the project you pass in will be left on your\n\n/// new branch.\n\nfn setup_change(tmpdir: &Path, branch: &str, filename: &str) {\n\n panic_on_error!(git_command(&[\"checkout\", \"master\"], tmpdir));\n\n panic_on_error!(git_command(&[\"branch\", branch], tmpdir));\n\n {\n\n let mut f = panic_on_error!(File::create(&tmpdir.join(filename)));\n\n panic_on_error!(f.write_all(b\"I like cookies\"));\n\n }\n\n panic_on_error!(git_command(&[\"add\", filename], tmpdir));\n\n panic_on_error!(git_command(&[\"commit\", \"-a\", \"-m\", filename], tmpdir));\n\n}\n\n\n", "file_path": "tests/cli.rs", "rank": 65, "score": 142863.23382455265 }, { "content": "fn with_default<'a>(val: &'a str, default: &'a str, local: &bool) -> &'a str {\n\n if !local || !val.is_empty() {\n\n val\n\n } else {\n\n default\n\n }\n\n}\n\n\n", "file_path": "src/delivery/cli/job.rs", "rank": 66, "score": 142556.86098530373 }, { "content": "fn stunnel_path() -> String {\n\n if cfg!(target_os = \"windows\") {\n\n String::from(\"C:\\\\opscode\\\\chefdk\\\\embedded\\\\bin\\\\stunnel.exe\")\n\n } else {\n\n String::from(\"/opt/chefdk/embedded/bin/stunnel\")\n\n }\n\n}\n\n\n", "file_path": "src/delivery/fips/mod.rs", "rank": 67, "score": 142213.68072144606 }, { "content": "pub fn remove_recursive<P: ?Sized>(path: &P) -> Result<(), DeliveryError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n match fs::metadata(path) {\n\n Ok(_) => {\n\n // only remove if there is something there\n\n let result = try!(\n\n make_command(\"Remove-Item\")\n\n .arg(\"-recurse\")\n\n .arg(\"-force\")\n\n .arg(path.as_ref().to_str().unwrap())\n\n .output()\n\n );\n\n super::cmd_success_or_err(&result, Kind::RemoveFailed)\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {\n\n // probably should get specific. Re-raise unless this is\n\n // not found\n\n Ok(())\n", "file_path": "src/delivery/utils/windows.rs", "rank": 68, "score": 141124.45497549072 }, { "content": "pub fn remove_recursive<P: ?Sized>(path: &P) -> Result<(), DeliveryError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n try!(\n\n Command::new(\"rm\")\n\n .arg(\"-rf\")\n\n .arg(path.as_ref().to_str().unwrap())\n\n .output()\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 69, "score": 141124.45497549072 }, { "content": "pub fn mkdir_recursive<P: ?Sized>(path: &P) -> Result<(), DeliveryError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n try!(fs::create_dir_all(path.as_ref()));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 70, "score": 141124.45497549072 }, { "content": "fn build_git_sha() -> String {\n\n let sha = option_env!(\"DELIV_CLI_GIT_SHA\").unwrap_or(\"0000\");\n\n format!(\"({})\", sha)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use cli;\n\n use cli::{api, checkout, clone, diff, init, job, review, setup, spin, token};\n\n\n\n #[test]\n\n fn test_clap_api_options() {\n\n let build_version = format!(\"{} {}\", cli::version(), cli::build_git_sha());\n\n let app = cli::make_app(&build_version);\n\n let matches = app.get_matches_from(vec![\n\n \"delivery\",\n\n \"api\",\n\n \"get\",\n\n \"endpoint\",\n\n \"--data\",\n", "file_path": "src/delivery/cli/mod.rs", "rank": 71, "score": 140514.3316444221 }, { "content": "pub fn copy_recursive<A, B>(f: &A, t: &B) -> Result<(), DeliveryError>\n\nwhere\n\n A: AsRef<Path> + ?Sized,\n\n B: AsRef<Path> + ?Sized,\n\n{\n\n let from = f.as_ref();\n\n let to = t.as_ref();\n\n let result = try!(\n\n Command::new(\"cp\")\n\n .arg(\"-R\")\n\n .arg(\"-a\")\n\n .arg(from.to_str().unwrap())\n\n .arg(to.to_str().unwrap())\n\n .output()\n\n );\n\n super::cmd_success_or_err(&result, Kind::CopyFailed)\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 72, "score": 139045.3765803241 }, { "content": "pub fn copy_recursive<A, B>(f: &A, t: &B) -> Result<(), DeliveryError>\n\nwhere\n\n A: AsRef<Path> + ?Sized,\n\n B: AsRef<Path> + ?Sized,\n\n{\n\n let from = f.as_ref();\n\n let to = t.as_ref();\n\n let result = try!(\n\n make_command(\"Copy-Item\")\n\n .arg(\"-recurse\")\n\n .arg(\"-Force\")\n\n .arg(from.to_str().unwrap())\n\n .arg(to.to_str().unwrap())\n\n .output()\n\n );\n\n super::cmd_success_or_err(&result, Kind::CopyFailed)\n\n}\n\n\n", "file_path": "src/delivery/utils/windows.rs", "rank": 73, "score": 139045.37658032414 }, { "content": "pub fn diff(\n\n change: &str,\n\n patchset: &str,\n\n pipeline: &str,\n\n local: &bool,\n\n) -> Result<(), DeliveryError> {\n\n try!(git_command(&[\"fetch\", \"delivery\"], &cwd()));\n\n let mut first_branch = format!(\"delivery/{}\", pipeline);\n\n if *local {\n\n first_branch = String::from(\"HEAD\");\n\n }\n\n let diff = try!(git_command(\n\n &[\n\n \"diff\",\n\n \"--color=always\",\n\n &first_branch,\n\n &format!(\"delivery/_reviews/{}/{}/{}\", pipeline, change, patchset),\n\n ],\n\n &cwd()\n\n ));\n\n say(\"white\", \"\\n\");\n\n sayln(\"white\", &diff.stdout);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/git/mod.rs", "rank": 74, "score": 138536.56979365487 }, { "content": "pub fn run() {\n\n let build_version = format!(\"{} {}\", version(), build_git_sha());\n\n\n\n let app = make_app(&build_version);\n\n let app_matches = app.get_matches();\n\n\n\n match match_command_and_start(&app_matches, &build_version) {\n\n // You can exit with any integer, can also be used to bypass default\n\n // error handling if you handled an error and returned non-zero.\n\n Ok(exit_status) => process::exit(exit_status),\n\n // Handles DeliveryError and exits 1.\n\n Err(e) => exit_with(e, 1),\n\n }\n\n}\n\n\n", "file_path": "src/delivery/cli/mod.rs", "rank": 75, "score": 138536.56979365487 }, { "content": "pub fn turn_off_output() {\n\n unsafe {\n\n SHOW_OUTPUT = false;\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 76, "score": 136781.8112012552 }, { "content": "pub fn turn_off_spinner() {\n\n unsafe {\n\n SHOW_SPINNER = false;\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 77, "score": 136781.8112012552 }, { "content": "pub fn turn_on_output() {\n\n unsafe {\n\n SHOW_OUTPUT = true;\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 78, "score": 136781.8112012552 }, { "content": "pub fn turn_off_color() {\n\n unsafe {\n\n COLORIZE = false;\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 79, "score": 136781.8112012552 }, { "content": "pub fn handle_review_result(\n\n review: &ReviewResult,\n\n no_open: &bool,\n\n) -> DeliveryResult<Option<String>> {\n\n match review.url {\n\n Some(ref url) => {\n\n if !no_open {\n\n try!(utils::open::item(&url));\n\n }\n\n Ok(Some(url.clone()))\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::root_dir;\n\n use std::path::Path;\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 80, "score": 135099.9607518345 }, { "content": "// Create a Delivery Pipeline.\n\n// Returns true if created, returns false if already exists.\n\npub fn create_delivery_pipeline(\n\n client: &APIClient,\n\n org: &str,\n\n proj: &str,\n\n pipe: &str,\n\n) -> DeliveryResult<bool> {\n\n if client.pipeline_exists(org, proj, pipe) {\n\n return Ok(false);\n\n } else {\n\n try!(client.create_pipeline(org, proj, pipe, Some(pipe)));\n\n return Ok(true);\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 81, "score": 135099.9607518345 }, { "content": "pub fn setup_and_start_stunnel(\n\n config: &Config,\n\n child_processes: &mut Vec<std::process::Child>,\n\n) -> DeliveryResult<()> {\n\n if !Path::new(&stunnel_path()).exists() {\n\n return Err(DeliveryError {\n\n kind: Kind::FipsNotSupportedForChefDKPlatform,\n\n detail: None,\n\n });\n\n }\n\n\n\n let server = validate!(config, server);\n\n let fips_git_port = validate!(config, fips_git_port);\n\n try!(generate_stunnel_config(\n\n &try!(stunnel_config_path()),\n\n &server,\n\n &fips_git_port,\n\n config.fips_custom_cert_filename.as_ref()\n\n ));\n\n try!(start_stunnel(child_processes));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/delivery/fips/mod.rs", "rank": 82, "score": 135099.9607518345 }, { "content": "pub fn root() -> PathBuf {\n\n let mut exe = exe_path(); // support\n\n exe.pop(); // tests/\n\n exe.pop(); // debug/\n\n exe.pop(); // target/\n\n exe.pop(); // delivery-cli/\n\n exe.join(\"tests\")\n\n}\n\n\n", "file_path": "tests/support/paths.rs", "rank": 83, "score": 134304.587331548 }, { "content": "pub fn fixtures() -> PathBuf {\n\n root().join_many(&[\"fixtures\"])\n\n}\n\n\n", "file_path": "tests/support/paths.rs", "rank": 84, "score": 134304.587331548 }, { "content": "/// Builds the command to run delivery review\n\nfn delivery_review_command(pipeline: &str) -> Command {\n\n let mut command = delivery_cmd();\n\n command\n\n .arg(\"review\")\n\n .arg(\"--no-open\")\n\n .arg(\"--for\")\n\n .arg(pipeline);\n\n command\n\n}\n\n\n", "file_path": "tests/cli.rs", "rank": 85, "score": 133744.34767832592 }, { "content": "pub fn merge_fips_options_and_config(\n\n fips: bool,\n\n fips_git_port: &str,\n\n fips_custom_cert_filename: &str,\n\n mut config: Config,\n\n) -> DeliveryResult<Config> {\n\n if config.fips.is_none() {\n\n config.fips = Some(fips);\n\n }\n\n\n\n // Only override config default with option if user specified something\n\n // other than the default of empty string.\n\n if fips_custom_cert_filename.len() > 0 {\n\n config.fips_custom_cert_filename = Some(String::from(fips_custom_cert_filename));\n\n }\n\n\n\n let new_config = config.set_fips_git_port(fips_git_port);\n\n Ok(new_config)\n\n}\n\n\n", "file_path": "src/delivery/fips/mod.rs", "rank": 86, "score": 133486.56704990542 }, { "content": "pub fn parse_git_push_output(\n\n push_output: &str,\n\n push_error: &str,\n\n) -> Result<ReviewResult, DeliveryError> {\n\n let mut review_result = ReviewResult::default();\n\n for line in push_error.lines() {\n\n debug!(\"error: {}\", line);\n\n if line.starts_with(\"remote\") {\n\n parse_line_from_remote(&line, &mut review_result);\n\n }\n\n }\n\n for line in push_output.lines() {\n\n debug!(\"output: {}\", line);\n\n if line.starts_with(\"To\") || line.starts_with(\"Done\") {\n\n continue;\n\n }\n\n let r = Regex::new(r\"^(.)\\t(.*):(.+)\\t(?:\\[(.+)\\]|([^ ]+))(?: \\((.+)\\))?$\").unwrap();\n\n let caps_result = r.captures(line);\n\n let caps = match caps_result {\n\n Some(caps) => caps,\n", "file_path": "src/delivery/git/mod.rs", "rank": 87, "score": 133486.56704990542 }, { "content": "// Create the feature branch `add-delivery-config`\n\n//\n\n// This branch is created to start modifying the project repository\n\n// In the case of a failure, we could roll back fairly easy by checking\n\n// out master and deleting this feature branch.\n\n//\n\n// If feature branch created, return true, else return false.\n\npub fn create_feature_branch_if_missing(\n\n project_path: &PathBuf,\n\n branch_name: &str,\n\n) -> DeliveryResult<bool> {\n\n match git::git_command(&[\"checkout\", \"-b\", branch_name], project_path) {\n\n Ok(_) => {\n\n return Ok(true);\n\n }\n\n Err(e) => {\n\n match e.detail.clone() {\n\n Some(msg) => {\n\n if msg.contains(&format!(\"A branch named '{}' already exists\", branch_name)) {\n\n try!(git::git_command(&[\"checkout\", branch_name], project_path));\n\n return Ok(false);\n\n } else {\n\n return Err(e);\n\n }\n\n }\n\n // Unexpected error, raise.\n\n None => Err(e),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 88, "score": 133486.56704990542 }, { "content": "pub fn privileged_process() -> bool {\n\n true\n\n}\n\n// -------------------\n\n// end dummy functions\n\n// -------------------\n", "file_path": "src/delivery/utils/windows.rs", "rank": 89, "score": 132622.73688212733 }, { "content": "// Return the current directory path\n\npub fn cwd() -> PathBuf {\n\n env::current_dir().unwrap()\n\n}\n\n\n", "file_path": "src/delivery/utils/mod.rs", "rank": 90, "score": 132622.73688212733 }, { "content": "pub fn privileged_process() -> bool {\n\n match unsafe { libc::getuid() } {\n\n 0 => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/delivery/utils/unix.rs", "rank": 91, "score": 132622.73688212733 }, { "content": "pub fn exe_path() -> PathBuf {\n\n env::current_exe().unwrap()\n\n}\n\n\n", "file_path": "tests/support/paths.rs", "rank": 92, "score": 132622.73688212733 }, { "content": "fn maybe_add_flag_value(cmd: &mut process::Command, flag: &str, value: &str) {\n\n if !value.is_empty() {\n\n cmd.arg(flag).arg(value);\n\n }\n\n}\n\n\n", "file_path": "src/delivery/command/job.rs", "rank": 93, "score": 132286.12675827125 }, { "content": "// Return the path to the metadata.rb file\n\nfn metadata_file(path: &PathBuf) -> String {\n\n let mut metadata = path.to_str().unwrap().to_string();\n\n metadata.push_str(\"/metadata.rb\");\n\n return metadata;\n\n}\n\n\n", "file_path": "src/delivery/cookbook/mod.rs", "rank": 94, "score": 132185.04782073162 }, { "content": "fn say_term(mut t: Box<term::StdoutTerminal>, color: &str, to_say: &str) {\n\n let color_const = match color {\n\n \"success\" => term::color::BRIGHT_GREEN,\n\n \"error\" => term::color::BRIGHT_RED,\n\n \"green\" => term::color::BRIGHT_GREEN,\n\n \"yellow\" => term::color::BRIGHT_YELLOW,\n\n \"red\" => term::color::BRIGHT_RED,\n\n \"magenta\" => term::color::BRIGHT_MAGENTA,\n\n \"white\" => term::color::WHITE,\n\n \"cyan\" => term::color::BRIGHT_CYAN,\n\n _ => term::color::WHITE,\n\n };\n\n t.fg(color_const).unwrap();\n\n t.write_all(to_say.as_bytes()).unwrap();\n\n t.reset().unwrap();\n\n io::stdout().flush().ok().expect(\"Could not flush stdout\");\n\n}\n\n\n", "file_path": "src/delivery/utils/say.rs", "rank": 95, "score": 132017.34997033293 }, { "content": "// Generate the build_cookbook using ChefDK generate\n\npub fn chef_generate_build_cookbook_from_generator(\n\n generator: &Path,\n\n project_path: &Path,\n\n) -> DeliveryResult<Command> {\n\n let mut command = utils::make_command(\"chef\");\n\n command\n\n .arg(\"generate\")\n\n .arg(\"build-cookbook\")\n\n .arg(\".\")\n\n .arg(\"-g\")\n\n .arg(generator)\n\n .current_dir(&project_path);\n\n\n\n let output = command.output()?;\n\n cmd_success_or_err(&output, Kind::ChefdkGenerateFailed)?;\n\n Ok(command)\n\n}\n\n\n", "file_path": "src/delivery/project/mod.rs", "rank": 96, "score": 131942.07930504886 }, { "content": "use hyper::status::StatusCode;\n\nuse serde_json;\n\nuse token::TokenStore;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct TokenRequest {\n\n username: String,\n\n password: String,\n\n}\n\n\n\nimpl TokenRequest {\n\n pub fn payload(user: &str, pass: &str) -> Result<String, DeliveryError> {\n\n let treq = TokenRequest {\n\n username: String::from(user),\n\n password: String::from(pass),\n\n };\n\n let payload = serde_json::to_string(&treq)?;\n\n Ok(payload)\n\n }\n\n}\n", "file_path": "src/delivery/http/token.rs", "rank": 98, "score": 38.59078400545128 }, { "content": "use errors::DeliveryError;\n\nuse errors::Kind::UserNotFound;\n\nuse http::APIClient;\n\nuse serde_json;\n\nuse types::DeliveryResult;\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct User {\n\n pub first: String,\n\n pub last: String,\n\n pub name: String,\n\n pub email: String,\n\n pub user_type: String,\n\n ssh_pub_key: String,\n\n}\n\n\n\nimpl Default for User {\n\n fn default() -> Self {\n\n User {\n\n first: String::from(\"\"),\n", "file_path": "src/delivery/user/mod.rs", "rank": 99, "score": 34.247950233971224 } ]
Rust
mutagen-core/src/transformer/arg_ast.rs
alarsyo/mutagen
f8249256c40769c916b5b00bd284f204d5540588
use proc_macro2::{Delimiter, TokenStream, TokenTree}; #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgAstList(pub Vec<ArgAst>); #[derive(Debug, Eq, PartialEq, Clone)] pub enum ArgAst { ArgFn(ArgFn), ArgEq(ArgEq), } #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgFn { pub name: String, pub args: ArgAstList, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgEq { pub name: String, pub val: ArgFn, } impl ArgAstList { pub fn parse_list(input: TokenStream) -> Result<Self, ()> { let mut args = Vec::new(); let mut tt_iter = input.into_iter(); while let Some(next) = tt_iter.next() { let name = if let TokenTree::Ident(next) = next { next.to_string() } else if let TokenTree::Literal(next) = next { next.to_string() } else { return Err(()); }; args.push(ArgAst::parse_single(name, &mut tt_iter)?); } Ok(Self(args)) } pub fn find_named_arg(&self, name: &str) -> Result<Option<&ArgFn>, ()> { let named_args = self .0 .iter() .filter(|ast| ast.name() == name) .map(|ast| Ok(&ast.expect_eq_ref()?.val)) .collect::<Result<Vec<&ArgFn>, ()>>()?; if named_args.len() > 1 { return Err(()); } Ok(named_args.get(0).copied()) } } impl ArgAst { fn new_fn(name: String, args: ArgAstList) -> Self { Self::ArgFn(ArgFn::new(name, args)) } fn new_eq(name: String, val: ArgFn) -> Self { Self::ArgEq(ArgEq::new(name, val)) } pub fn name(&self) -> &str { match self { ArgAst::ArgFn(ArgFn { name, .. }) => name, ArgAst::ArgEq(ArgEq { name, .. }) => name, } } fn parse_single( name: String, tt_iter: &mut impl Iterator<Item = TokenTree>, ) -> Result<Self, ()> { match tt_iter.next() { None => return Ok(Self::new_fn(name, ArgAstList(vec![]))), Some(TokenTree::Group(g)) => { if g.delimiter() != Delimiter::Parenthesis { return Err(()); } let args = ArgAstList::parse_list(g.stream())?; tt_expect_comma_or_end(tt_iter)?; return Ok(Self::new_fn(name, args)); } Some(TokenTree::Punct(p)) => { if p.as_char() == ',' { return Ok(Self::new_fn(name, ArgAstList(vec![]))); } if p.as_char() != '=' { return Err(()); } let next = tt_iter.next(); let next = if let Some(TokenTree::Ident(next)) = next { next.to_string() } else if let Some(TokenTree::Literal(next)) = next { next.to_string() } else { return Err(()); }; let val = Self::parse_single(next, tt_iter)?.expect_fn()?; return Ok(Self::new_eq(name, val)); } _ => return Err(()), } } pub fn expect_fn(self) -> Result<ArgFn, ()> { match self { ArgAst::ArgFn(f) => Ok(f), ArgAst::ArgEq(_) => Err(()), } } pub fn expect_fn_ref(&self) -> Result<&ArgFn, ()> { match self { ArgAst::ArgFn(f) => Ok(f), ArgAst::ArgEq(_) => Err(()), } } pub fn expect_eq_ref(&self) -> Result<&ArgEq, ()> { match self { ArgAst::ArgFn(_) => Err(()), ArgAst::ArgEq(e) => Ok(e), } } } fn tt_expect_comma_or_end(tt_iter: &mut impl Iterator<Item = TokenTree>) -> Result<(), ()> { match tt_iter.next() { None => {} Some(TokenTree::Punct(p)) => { if p.as_char() != ',' { return Err(()); } } _ => return Err(()), } Ok(()) } impl ArgFn { pub fn new(name: String, args: ArgAstList) -> Self { Self { name, args } } } impl ArgEq { pub fn new(name: String, val: ArgFn) -> Self { Self { name, val } } } #[cfg(test)] mod tests { use super::*; use proc_macro2::TokenStream; use std::str::FromStr; #[test] fn no_args() { let input = TokenStream::new(); let parsed = ArgAstList::parse_list(input); assert_eq!(parsed, Ok(ArgAstList(vec![]))); } #[test] fn single_arg() { let input = TokenStream::from_str("a1").unwrap(); let parsed = ArgAstList::parse_list(input); let expected = ArgAst::new_fn("a1".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_int() { let input = TokenStream::from_str("1").unwrap(); let parsed = ArgAstList::parse_list(input); let expected = ArgAst::new_fn("1".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_args() { let input = TokenStream::from_str("a2(x, y, z)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected .args .0 .push(ArgAst::new_fn("x".to_string(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_fn("y".to_string(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_fn("z".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_trailing_comma() { let input = TokenStream::from_str("a2(x,)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected .args .0 .push(ArgAst::new_fn("x".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_eq_args() { let input = TokenStream::from_str("a2(x=a)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected.args.0.push(ArgAst::new_eq( "x".to_string(), ArgFn::new("a".to_owned(), ArgAstList(vec![])), )); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn chained_eq_gives_error() { let input = TokenStream::from_str("a = b = c").unwrap(); let parsed = ArgAstList::parse_list(input); assert_eq!(parsed, Err(())); } #[test] fn multiple_args() { let input = TokenStream::from_str("a2, b5").unwrap(); let parsed = ArgAstList::parse_list(input); let expected1 = ArgAst::new_fn("a2".to_string(), ArgAstList(vec![])); let expected2 = ArgAst::new_fn("b5".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected1, expected2]))); } #[test] fn nested_args() { let input = TokenStream::from_str("g55(h3(X))").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("g55".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("h3".to_string(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("X".to_string(), ArgAstList(vec![]))); expected.args.0.push(ArgAst::ArgFn(expected1)); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn nested_args_with_trailing_arg() { let input = TokenStream::from_str("g55(h3(X), z)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("g55".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("h3".to_string(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("X".to_string(), ArgAstList(vec![]))); expected.args.0.push(ArgAst::ArgFn(expected1)); expected .args .0 .push(ArgAst::new_fn("z".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_eq_args_nested() { let input = TokenStream::from_str("a2(x=a(b))").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("a".to_owned(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("b".to_owned(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_eq("x".to_string(), expected1)); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn list_of_eq_args() { let input = TokenStream::from_str("x = a, y = b").unwrap(); let parsed = ArgAstList::parse_list(input); let expected1 = ArgEq::new( "x".to_string(), ArgFn::new("a".to_owned(), ArgAstList(vec![])), ); let expected2 = ArgEq::new( "y".to_string(), ArgFn::new("b".to_owned(), ArgAstList(vec![])), ); let expected = ArgAstList(vec![ArgAst::ArgEq(expected1), ArgAst::ArgEq(expected2)]); assert_eq!(parsed, Ok(expected)); } }
use proc_macro2::{Delimiter, TokenStream, TokenTree}; #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgAstList(pub Vec<ArgAst>); #[derive(Debug, Eq, PartialEq, Clone)] pub enum ArgAst { ArgFn(ArgFn), ArgEq(ArgEq), } #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgFn { pub name: String, pub args: ArgAstList, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct ArgEq { pub name: String, pub val: ArgFn, } impl ArgAstList { pub fn parse_list(input: TokenStream) -> Result<Self, ()> { let mut args = Vec::new(); let mut tt_iter = input.into_iter(); while let Some(next) = tt_iter.next() { let name = if let TokenTree::Ident(next) = next { next.to_string() } else if let TokenTree::Literal(next) = next { next.to_string() } else { return Err(()); }; args.push(ArgAst::parse_single(name, &mut tt_iter)?); } Ok(Self(args)) } pub fn find_named_arg(&self, name: &str) -> Result<Option<&ArgFn>, ()> { let named_args = self .0 .iter() .filter(|ast| ast.name() == name) .map(|ast| Ok(&ast.expect_eq_ref()?.val)) .collect::<Result<Vec<&ArgFn>, ()>>()?; if named_args.len() > 1 { return Err(()); } Ok(named_args.get(0).copied()) } } impl ArgAst { fn new_fn(name: String, args: ArgAstList) -> Self { Self::ArgFn(ArgFn::new(name, args)) } fn new_eq(name: String, val: ArgFn) -> Self { Self::ArgEq(ArgEq::new(name, val)) } pub fn name(&self) -> &str { match self { ArgAst::ArgFn(ArgFn { name, .. }) => name, ArgAst::ArgEq(ArgEq { name, .. }) => name, } } fn parse_single( name: String, tt_iter: &mut impl Iterator<Item = TokenTree>, ) -> Result<Self, ()> { match tt_iter.next() { None => return Ok(Self::new_fn(name, ArgAstList(vec![]))), Some(TokenTree::Group(g)) => { if g.delimiter() != Delimiter::Parenthesis { return Err(()); } let args = ArgAstList::parse_list(g.stream())?; tt_expect_comma_or_end(tt_iter)?; return Ok(Self::new_fn(name, args)); } Some(TokenTree::Punct(p)) => { if p.as_char() == ',' { return Ok(Self::new_fn(name, ArgAstList(vec![]))); } if p.as_char() != '=' { return Err(()); } let next = tt_iter.next(); let next = if let Some(TokenTree::Ident(next)) = next { next.to_string() } else if let Some(TokenTree::Literal(next)) = next { next.to_string() } else { return Err(()); }; let val = Self::parse_single(next, tt_iter)?.expect_fn()?; return Ok(Self::new_eq(name, val)); } _ => return Err(()), } } pub fn expect_fn(self) -> Result<ArgFn, ()> { match self { ArgAst::ArgFn(f) => Ok(f), ArgAst::ArgEq(_) => Err(()), } } pub fn expect_fn_ref(&self) -> Result<&ArgFn, ()> { match self { ArgAst::ArgFn(f) => Ok(f), ArgAst::ArgEq(_) => Err(()), } } pub fn expect_eq_ref(&self) -> Result<&ArgEq, ()> { match self { ArgAst::ArgFn(_) => Err(()), ArgAst::ArgEq(e) => Ok(e), } } } fn tt_expect_comma_or_end(tt_iter: &mut impl Iterator<Item = TokenTree>) -> Result<(), ()> { match tt_iter.next() { None => {} Some(TokenTree::Punct(p)) => { if p.as_char() != ',' { return Err(()); } } _ => return Err(()), } Ok(()) } impl ArgFn { pub fn new(name: String, args: ArgAstList) -> Self { Self { name, args } } } impl ArgEq { pub fn new(name: String, val: ArgFn) -> Self { Self { name, val } } } #[cfg(test)] mod tests { use super::*; use proc_macro2::TokenStream; use std::str::FromStr; #[test] fn no_args() { let input = TokenStream::new(); let parsed = ArgAstList::parse_list(input); assert_eq!(parsed, Ok(ArgAstList(vec![]))); } #[test] fn single_arg() { let input = TokenStream::from_str("a1").unwrap(); let parsed = ArgAstList::parse_list(input); let expected = ArgAst::new_fn("a1".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_int() { let input = TokenStream::from_str("1").unwrap(); let parsed = ArgAstList::parse_list(input); let expected = ArgAst::new_fn("1".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_args() { let input = TokenStream::from_str("a2(x, y, z)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected .args .0 .push(ArgAst::new_fn("x".to_string(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_fn("y".to_string(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_fn("z".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_trailing_comma() { let input = TokenStream::from_str("a2(x,)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected .args .0 .push(ArgAst::new_fn("x".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_eq_args() { let input = TokenStream::from_str("a2(x=a)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); expected.args.0.push(ArgAst::new_eq( "x".to_string(), ArgFn::new("a".to_owned(), ArgAstList(vec![])), )); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn chained_eq_gives_error() { let input = TokenStream::from_str("a = b = c").unwrap(); let parsed = ArgAstList::parse_list(input); assert_eq!(parsed, Err(())); } #[test] fn multiple_args() { let input = TokenStream::from_str("a2, b5").unwrap(); let parsed = ArgAstList::parse_list(input); let expected1 = ArgAst::new_fn("a2".to_string(), ArgAstList(vec![])); let expected2 = ArgAst::new_fn("b5".to_string(), ArgAstList(vec![])); assert_eq!(parsed, Ok(ArgAstList(vec![expected1, expected2]))); } #[test] fn nested_args() { let input = TokenStream::from_str("g55(h3(X))").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("g55".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("h3".to_string(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("X".to_string(), ArgAstList(vec![])));
#[test] fn nested_args_with_trailing_arg() { let input = TokenStream::from_str("g55(h3(X), z)").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("g55".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("h3".to_string(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("X".to_string(), ArgAstList(vec![]))); expected.args.0.push(ArgAst::ArgFn(expected1)); expected .args .0 .push(ArgAst::new_fn("z".to_string(), ArgAstList(vec![]))); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn single_arg_with_eq_args_nested() { let input = TokenStream::from_str("a2(x=a(b))").unwrap(); let parsed = ArgAstList::parse_list(input); let mut expected = ArgFn::new("a2".to_string(), ArgAstList(vec![])); let mut expected1 = ArgFn::new("a".to_owned(), ArgAstList(vec![])); expected1 .args .0 .push(ArgAst::new_fn("b".to_owned(), ArgAstList(vec![]))); expected .args .0 .push(ArgAst::new_eq("x".to_string(), expected1)); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); } #[test] fn list_of_eq_args() { let input = TokenStream::from_str("x = a, y = b").unwrap(); let parsed = ArgAstList::parse_list(input); let expected1 = ArgEq::new( "x".to_string(), ArgFn::new("a".to_owned(), ArgAstList(vec![])), ); let expected2 = ArgEq::new( "y".to_string(), ArgFn::new("b".to_owned(), ArgAstList(vec![])), ); let expected = ArgAstList(vec![ArgAst::ArgEq(expected1), ArgAst::ArgEq(expected2)]); assert_eq!(parsed, Ok(expected)); } }
expected.args.0.push(ArgAst::ArgFn(expected1)); let expected = ArgAst::ArgFn(expected); assert_eq!(parsed, Ok(ArgAstList(vec![expected]))); }
function_block-function_prefix_line
[ { "content": "pub fn do_transform_item(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let input = match syn::parse2::<syn::Item>(input) {\n\n Ok(ast) => ast,\n\n Err(e) => return TokenStream::from(e.to_compile_error()),\n\n };\n\n MutagenTransformerBundle::setup_from_attr(args.into()).mutagen_process_item(input)\n\n}\n\n\n\npub enum MutagenTransformer {\n\n Expr(Box<MutagenExprTransformer>),\n\n Stmt(Box<MutagenStmtTransformer>),\n\n}\n\n\n\npub struct MutagenTransformerBundle {\n\n transform_info: SharedTransformInfo,\n\n transform_context: TransformContext,\n\n expr_transformers: Vec<Box<MutagenExprTransformer>>,\n\n stmt_transformers: Vec<Box<MutagenStmtTransformer>>,\n\n}\n\n\n", "file_path": "mutagen-core/src/transformer.rs", "rank": 1, "score": 175928.81117135024 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn ggt_loop(mut a: u32, mut b: u32) -> u32 {\n\n loop {\n\n if a == 0 {\n\n return b;\n\n }\n\n if b == 0 {\n\n return a;\n\n }\n\n if a > b {\n\n a -= b;\n\n } else {\n\n b -= a;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/simple/src/ggt.rs", "rank": 2, "score": 154618.1016985147 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn ggt_rec(mut a: u32, mut b: u32) -> u32 {\n\n if a == b || a == 0 || b == 0 {\n\n return a | b;\n\n }\n\n if a == 1 || b == 1 {\n\n return 1;\n\n }\n\n if a > b {\n\n ggt_rec(a - b, b)\n\n } else {\n\n ggt_rec(a, b - a)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "examples/simple/src/ggt.rs", "rank": 3, "score": 154618.1016985147 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn bubblesort_while(arr: &mut [u8]) {\n\n let n = arr.len();\n\n let mut change = true;\n\n while change {\n\n change = false;\n\n for i in 1..n {\n\n if arr[i - 1] > arr[i] {\n\n arr.swap(i - 1, i);\n\n change = true;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "examples/simple/src/bubblesort.rs", "rank": 4, "score": 140672.87453689525 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn bubblesort_for(arr: &mut [u8]) {\n\n let n = arr.len();\n\n for _ in 1..n {\n\n for i in 1..n {\n\n if arr[i - 1] > arr[i] {\n\n arr.swap(i - 1, i);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/simple/src/bubblesort.rs", "rank": 5, "score": 140672.87453689525 }, { "content": "pub fn run<L: PartialEq<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n original_op: BinopEq,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> bool {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopEq::possible_mutations(original_op);\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n m.mutate(left, right)\n\n } else {\n\n original_op.eq(left, right)\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_eq.rs", "rank": 6, "score": 135661.40162274323 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprBinopEq::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationBinopEq::possible_mutations(e.op)\n\n .iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let left = &e.left;\n\n let right = &e.right;\n\n let op = e.op_tokens();\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_eq.rs", "rank": 7, "score": 129735.50144712345 }, { "content": "#[cfg_attr(test, ::mutagen::mutate)]\n\npub fn fib(n: u32) -> u32 {\n\n if n == 0 || n == 1 {\n\n return n;\n\n }\n\n fib(n - 1) + fib(n - 2)\n\n}\n", "file_path": "examples/with-integration-tests-v1/src/lib.rs", "rank": 8, "score": 117306.05292791466 }, { "content": "#[cfg_attr(feature = \"mutationtest\", ::mutagen::mutate)]\n\npub fn fib(n: u32) -> u32 {\n\n if n == 0 || n == 1 {\n\n return n;\n\n }\n\n fib(n - 1) + fib(n - 2)\n\n}\n", "file_path": "examples/with-integration-tests-v2/src/lib.rs", "rank": 9, "score": 117302.57333358479 }, { "content": "pub fn stmt_call_to_none<T: StmtCallToNone>() -> T {\n\n <T as StmtCallToNone>::stmt_call_to_none()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn stmt_inactive() {\n\n let result = should_run(1, &MutagenRuntimeConfig::without_mutation());\n\n assert_eq!(result, true);\n\n }\n\n #[test]\n\n fn stmt_active() {\n\n let result = should_run(1, &MutagenRuntimeConfig::with_mutation_id(1));\n\n assert_eq!(result, false);\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_stmt_call.rs", "rank": 10, "score": 115744.29848177399 }, { "content": "pub fn append_item<T: Serialize + ?Sized>(file: &mut File, item: &T) -> Fallible<()> {\n\n let mut w = BufWriter::new(file);\n\n serde_json::to_writer(&mut w, item)?;\n\n writeln!(&mut w)?; // write newline\n\n Ok(())\n\n}\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 11, "score": 106101.56764755856 }, { "content": "#[proc_macro_attribute]\n\npub fn mutate(\n\n attr: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n do_transform_item(attr.into(), input.into()).into()\n\n}\n", "file_path": "mutagen-transform/src/lib.rs", "rank": 12, "score": 105153.1647135971 }, { "content": "pub fn should_run(mutator_id: usize, runtime: impl Deref<Target = MutagenRuntimeConfig>) -> bool {\n\n runtime.covered(mutator_id);\n\n // should run if mutation is inactive\n\n !runtime.is_mutation_active(mutator_id)\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_stmt_call.rs", "rank": 13, "score": 104955.17385232836 }, { "content": "/// Generate a string used for logging\n\nfn mutation_log_string(m: &BakedMutation) -> String {\n\n format!(\n\n \"{}: {}, {}, at {}@{}{}\",\n\n m.id(),\n\n m.mutator_name(),\n\n m.mutation_description(),\n\n m.source_file().display(),\n\n m.location_in_file(),\n\n m.context_description_in_brackets(),\n\n )\n\n}\n", "file_path": "mutagen-runner/src/progress.rs", "rank": 14, "score": 104940.98558775609 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn simple_assert_not_covered() {\n\n 1 < 3;\n\n}\n\n\n\n// There are no tests since the function above is supposed to be no covered by tests\n", "file_path": "examples/simple/src/not_covered.rs", "rank": 15, "score": 101106.72138653665 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprUnopNot::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutation(Mutation::new_spanned(\n\n &context,\n\n \"unop_not\".to_owned(),\n\n \"!\".to_owned(),\n\n \"\".to_owned(),\n\n e.span(),\n\n ));\n\n\n\n let expr = &e.expr;\n\n let op_token = e.op_token;\n", "file_path": "mutagen-core/src/mutator/mutator_unop_not.rs", "rank": 16, "score": 101103.17366025932 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprBinopBit::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationBinopBit::possible_mutations(e.op)\n\n .iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let left = &e.left;\n\n let right = &e.right;\n\n\n\n let run_fn = match e.op {\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 17, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprLitBool::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutation(Mutation::new_spanned(\n\n &context,\n\n \"lit_bool\".to_owned(),\n\n format!(\"{:?}\", e.value),\n\n format!(\"{:?}\", !e.value),\n\n e.span,\n\n ));\n\n\n\n let value = e.value;\n\n syn::parse2(quote_spanned! {e.span=>\n\n ::mutagen::mutator::mutator_lit_bool::run(\n\n #mutator_id,\n\n #value,\n\n ::mutagen::MutagenRuntimeConfig::get_default()\n\n )\n\n })\n\n .expect(\"transformed code invalid\")\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_lit_bool.rs", "rank": 18, "score": 99260.2886188144 }, { "content": "pub fn run(\n\n mutator_id: usize,\n\n original_lit: bool,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> bool {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n !original_lit\n\n } else {\n\n original_lit\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_lit_bool.rs", "rank": 19, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprBinopBool::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationBinopBool::possible_mutations(e.op)\n\n .iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let left = &e.left;\n\n let right = &e.right;\n\n let op = e.op_tokens();\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bool.rs", "rank": 20, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprLitInt::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationLitInt::possible_mutations(e.value)\n\n .into_iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let original_lit = e.lit;\n\n syn::parse2(quote_spanned! {e.span=>\n\n ::mutagen::mutator::mutator_lit_int::run(\n\n #mutator_id,\n\n #original_lit,\n\n ::mutagen::MutagenRuntimeConfig::get_default()\n\n )\n\n })\n\n .expect(\"transformed code invalid\")\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_lit_int.rs", "rank": 21, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprBinopCmp::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationBinopCmp::possible_mutations(e.op)\n\n .iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let left = &e.left;\n\n let right = &e.right;\n\n let op = e.op_tokens();\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_cmp.rs", "rank": 22, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n e: Expr,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Expr {\n\n let e = match ExprBinopNum::try_from(e) {\n\n Ok(e) => e,\n\n Err(e) => return e,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutations(\n\n MutationBinopNum::possible_mutations(e.op)\n\n .iter()\n\n .map(|m| m.to_mutation(&e, context)),\n\n );\n\n\n\n let left = &e.left;\n\n let right = &e.right;\n\n let run_fn = match e.op {\n\n BinopNum::Add => quote_spanned! {e.span()=> run_add},\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 23, "score": 99260.2886188144 }, { "content": "pub fn transform(\n\n s: Stmt,\n\n transform_info: &SharedTransformInfo,\n\n context: &TransformContext,\n\n) -> Stmt {\n\n let s = match StmtCall::try_from(s) {\n\n Ok(s) => s,\n\n Err(s) => return s,\n\n };\n\n\n\n let mutator_id = transform_info.add_mutation(Mutation::new_spanned(\n\n &context,\n\n \"stmt_call\".to_owned(),\n\n format!(\n\n \"{}\",\n\n context\n\n .original_stmt\n\n .to_token_stream()\n\n .to_string()\n\n .replace(\"\\n\", \" \")\n", "file_path": "mutagen-core/src/mutator/mutator_stmt_call.rs", "rank": 24, "score": 99260.2886188144 }, { "content": "#[cfg_attr(all(test, feature = \"with_mutagen\"), ::mutagen::mutate)]\n\npub fn foo() -> i32 {\n\n 1 + 2\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::foo;\n\n\n\n #[test]\n\n fn test_foo() {\n\n assert_eq!(foo(), 3);\n\n }\n\n}\n", "file_path": "examples/feature-gated/src/lib.rs", "rank": 25, "score": 97706.27390213966 }, { "content": "pub fn run_left(\n\n mutator_id: usize,\n\n original_op: BinopBool,\n\n left: bool,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> Option<bool> {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopBool::possible_mutations(original_op);\n\n let op = runtime\n\n .get_mutation_for_mutator(mutator_id, &mutations)\n\n .map(|m| m.op)\n\n .unwrap_or(original_op);\n\n op.short_circuit_left(left)\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bool.rs", "rank": 26, "score": 97524.67654616343 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct MutationBinopEq {\n\n op: BinopEq,\n\n}\n\n\n\nimpl MutationBinopEq {\n\n fn possible_mutations(original_op: BinopEq) -> Vec<Self> {\n\n [BinopEq::Eq, BinopEq::Ne]\n\n .iter()\n\n .copied()\n\n .filter(|&op| op != original_op)\n\n .map(|op| MutationBinopEq { op })\n\n .collect()\n\n }\n\n\n\n fn mutate<L: PartialEq<R>, R>(self, left: L, right: R) -> bool {\n\n self.op.eq(left, right)\n\n }\n\n\n\n fn to_mutation(self, original_op: &ExprBinopEq, context: &TransformContext) -> Mutation {\n\n Mutation::new_spanned(\n\n &context,\n\n \"binop_eq\".to_owned(),\n\n format!(\"{}\", original_op.op),\n\n format!(\"{}\", self.op),\n\n original_op.span,\n\n )\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_eq.rs", "rank": 27, "score": 95997.55464038326 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprBinopEq {\n\n op: BinopEq,\n\n left: Expr,\n\n right: Expr,\n\n span: Span,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprBinopEq {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Binary(expr) => match expr.op {\n\n BinOp::Eq(t) => Ok(ExprBinopEq {\n\n op: BinopEq::Eq,\n\n left: *expr.left,\n\n right: *expr.right,\n\n span: t.span(),\n\n }),\n\n BinOp::Ne(t) => Ok(ExprBinopEq {\n\n op: BinopEq::Ne,\n", "file_path": "mutagen-core/src/mutator/mutator_binop_eq.rs", "rank": 28, "score": 95993.55502466703 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn simple_add() -> i32 {\n\n 1 + 2\n\n}\n\n\n", "file_path": "examples/simple/src/simple_add.rs", "rank": 29, "score": 95863.58569631784 }, { "content": "#[test]\n\nfn fib_4() {\n\n assert_eq!(3, fib(4))\n\n}\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 30, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_1() {\n\n assert_eq!(1, fib(1))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 31, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_1() {\n\n assert_eq!(1, fib(1))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 32, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_5() {\n\n assert_eq!(5, fib(5))\n\n}\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 33, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_0() {\n\n assert_eq!(0, fib(0))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 34, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_5() {\n\n assert_eq!(5, fib(5))\n\n}\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 35, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_4() {\n\n assert_eq!(3, fib(4))\n\n}\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 36, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_3() {\n\n assert_eq!(2, fib(3))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 37, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_3() {\n\n assert_eq!(2, fib(3))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 38, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_2() {\n\n assert_eq!(1, fib(2))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 39, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_2() {\n\n assert_eq!(1, fib(2))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 40, "score": 94399.05021044705 }, { "content": "#[test]\n\nfn fib_0() {\n\n assert_eq!(0, fib(0))\n\n}\n\n\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 41, "score": 94399.05021044705 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn simple_add_u8() -> u8 {\n\n 1 + 2\n\n}\n\n\n", "file_path": "examples/simple/src/simple_add.rs", "rank": 42, "score": 94127.97362366685 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn add_repeated_u8() -> u8 {\n\n 1 + 2 + 3 * 2\n\n}\n\n\n", "file_path": "examples/simple/src/simple_add.rs", "rank": 43, "score": 94127.97362366685 }, { "content": "pub fn run<T: Not>(\n\n mutator_id: usize,\n\n val: T,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <T as Not>::Output {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n val.may_none()\n\n } else {\n\n !val\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_unop_not.rs", "rank": 44, "score": 92997.20360014401 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn primetest(n: u32) -> bool {\n\n if n % 2 == 0u32 {\n\n return n == 2;\n\n }\n\n if n == 1 {\n\n return false;\n\n }\n\n let mut k = 3;\n\n while k * k <= n {\n\n if n % k == 0u32 {\n\n return false;\n\n }\n\n k += 2;\n\n }\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n", "file_path": "examples/simple/src/primetest.rs", "rank": 45, "score": 92400.13998537241 }, { "content": "pub fn run<T: IntMutable>(\n\n mutator_id: usize,\n\n original_lit: T,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> T {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationLitInt::possible_mutations(original_lit.as_u128());\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n m.mutate(original_lit)\n\n } else {\n\n original_lit\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_lit_int.rs", "rank": 46, "score": 88076.78149642752 }, { "content": "#[cfg_attr(test, mutate)]\n\npub fn add_two_u8(x: u8) -> u8 {\n\n x + 2\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_simple_add() {\n\n assert_eq!(simple_add(), 3);\n\n }\n\n\n\n #[test]\n\n fn test_simple_add_u8() {\n\n assert_eq!(simple_add_u8(), 3);\n\n }\n\n\n\n #[test]\n\n fn test_add_two_u8() {\n\n assert_eq!(add_two_u8(1), 3);\n\n }\n\n\n\n #[test]\n\n fn test_add_repeated_u8() {\n\n assert_eq!(add_repeated_u8(), 9);\n\n }\n\n}\n", "file_path": "examples/simple/src/simple_add.rs", "rank": 47, "score": 87184.20224417519 }, { "content": "pub fn get_coverage_file() -> Fallible<PathBuf> {\n\n Ok(mutagen_dir()?.join(DEFAULT_COVERAGE_FILENAME))\n\n}\n\n\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 48, "score": 86612.25038263158 }, { "content": "/// Finds the file that contains the descriptions of all mutations as written by the procedural macro\n\npub fn get_mutations_file() -> Fallible<PathBuf> {\n\n Ok(mutagen_dir()?.join(DEFAULT_MUTAGEN_FILENAME))\n\n}\n\n\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 49, "score": 86612.25038263158 }, { "content": "pub fn get_mutations_file_json() -> Fallible<PathBuf> {\n\n Ok(mutagen_dir()?.join(JSON_MUTAGEN_FILENAME))\n\n}\n\n\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 50, "score": 85224.07886586174 }, { "content": "/// trait that is used to optimistically remove a negation `!` from an expression\n\n///\n\n/// This trait provides a function `may_none` that passes the input value unchanged\n\n/// If the value cannot be converted to the output type of the negation using `Into`, the optimistic assumption fails.\n\npub trait NotToNone {\n\n type Output;\n\n // do nothing\n\n fn may_none(self) -> Self::Output;\n\n}\n\n\n\nimpl<T> NotToNone for T\n\nwhere\n\n T: Not,\n\n{\n\n type Output = <T as Not>::Output;\n\n\n\n default fn may_none(self) -> <T as Not>::Output {\n\n MutagenRuntimeConfig::get_default().optimistic_assumption_failed();\n\n }\n\n}\n\n\n\nimpl<T> NotToNone for T\n\nwhere\n\n T: Not,\n", "file_path": "mutagen-core/src/mutator/mutator_unop_not.rs", "rank": 51, "score": 84166.81023169508 }, { "content": "/// check if an expression has numeric type.\n\n///\n\n/// This is implemented via a heuristic. An expression has an numeric type if:\n\n/// * it is a numeric literal\n\n/// * it is an binary arithmetic- or bit-operation that has an integer expression on the left side\n\n/// * it is an unary operation with an numeric expression\n\n/// * it is a reference to a numeric expression. This lets us count `*&1` as numeric expression.\n\n/// * it is a block that ends in a numeric expression. This lets us count {...; 1} as numeric expression.\n\n/// * it is a if expression with an numeric expression as one of the cases\n\npub fn is_num_expr(e: &syn::Expr) -> bool {\n\n match e {\n\n syn::Expr::Lit(expr) => match expr.lit {\n\n syn::Lit::Int(_) => true,\n\n syn::Lit::Byte(_) => true,\n\n syn::Lit::Float(_) => true,\n\n _ => false,\n\n },\n\n syn::Expr::Binary(expr) => match expr.op {\n\n syn::BinOp::Add(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Sub(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Mul(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Div(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Rem(_) => is_num_expr(&expr.left),\n\n syn::BinOp::BitAnd(_) => is_num_expr(&expr.left),\n\n syn::BinOp::BitOr(_) => is_num_expr(&expr.left),\n\n syn::BinOp::BitXor(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Shl(_) => is_num_expr(&expr.left),\n\n syn::BinOp::Shr(_) => is_num_expr(&expr.left),\n\n _ => false,\n\n },\n\n syn::Expr::Unary(expr) => is_num_expr(&expr.expr),\n\n syn::Expr::Reference(expr) => is_num_expr(&expr.expr),\n\n syn::Expr::Paren(expr) => is_num_expr(&expr.expr),\n\n syn::Expr::Block(expr) => is_num_block(&expr.block),\n\n syn::Expr::If(expr) => is_num_expr_if(&expr),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/transformer/ast_inspect.rs", "rank": 52, "score": 83526.55283753677 }, { "content": "pub fn run_and<L: BitAnd<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as BitAnd<R>>::Output {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopBit::possible_mutations(BinopBit::And);\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n match m.op {\n\n BinopBit::Or => left.and_may_or(right),\n\n BinopBit::Xor => left.and_may_xor(right),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n left & right\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 53, "score": 82058.7481737163 }, { "content": "pub fn run_or<L: BitOr<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as BitOr<R>>::Output {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopBit::possible_mutations(BinopBit::Or);\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n match m.op {\n\n BinopBit::And => left.or_may_and(right),\n\n BinopBit::Xor => left.or_may_xor(right),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n left | right\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 54, "score": 82058.7481737163 }, { "content": "pub fn run<L: PartialOrd<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n original_op: BinopCmp,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> bool {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopCmp::possible_mutations(original_op);\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n m.mutate(left, right)\n\n } else {\n\n original_op.cmp(left, right)\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_cmp.rs", "rank": 55, "score": 80670.57665694645 }, { "content": "pub fn run_div<L: Div<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as Div<R>>::Output {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n left.may_mul(right)\n\n } else {\n\n left / right\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 56, "score": 80670.57665694645 }, { "content": "pub fn run_mul<L: Mul<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as Mul<R>>::Output {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n left.may_div(right)\n\n } else {\n\n left * right\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 57, "score": 80670.57665694645 }, { "content": "pub fn run_sub<L: Sub<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as Sub<R>>::Output {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n left.may_add(right)\n\n } else {\n\n left - right\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 58, "score": 80670.57665694645 }, { "content": "pub fn run_add<L: Add<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as Add<R>>::Output {\n\n runtime.covered(mutator_id);\n\n if runtime.is_mutation_active(mutator_id) {\n\n left.may_sub(right)\n\n } else {\n\n left + right\n\n }\n\n}\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 59, "score": 80670.57665694645 }, { "content": "pub fn run_xor<L: BitXor<R>, R>(\n\n mutator_id: usize,\n\n left: L,\n\n right: R,\n\n runtime: impl Deref<Target = MutagenRuntimeConfig>,\n\n) -> <L as BitXor<R>>::Output {\n\n runtime.covered(mutator_id);\n\n let mutations = MutationBinopBit::possible_mutations(BinopBit::Xor);\n\n if let Some(m) = runtime.get_mutation_for_mutator(mutator_id, &mutations) {\n\n match m.op {\n\n BinopBit::And => left.xor_may_and(right),\n\n BinopBit::Or => left.xor_may_or(right),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n left ^ right\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 60, "score": 79352.94446797107 }, { "content": "/// a trait for optimistically removing a statement containing a method- or function call.\n\n///\n\n/// This operation is optimistic, since the statement could have the type `!` and can be used in surprising contexts:\n\n///\n\n/// * `let x = {f(return y);}`\n\n/// * `let x = {std::process::abort();}`\n\n///\n\n/// Above examples compile and it is not possible to remove the statements without introducing compiler errors.\n\npub trait StmtCallToNone {\n\n fn stmt_call_to_none() -> Self;\n\n}\n\n\n\nimpl<T> StmtCallToNone for T {\n\n default fn stmt_call_to_none() -> Self {\n\n MutagenRuntimeConfig::get_default().optimistic_assumption_failed();\n\n }\n\n}\n\n\n\nimpl StmtCallToNone for () {\n\n fn stmt_call_to_none() -> () {}\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_stmt_call.rs", "rank": 61, "score": 78860.14494764677 }, { "content": "pub fn read_items<T: DeserializeOwned>(filepath: &Path) -> Fallible<Vec<T>> {\n\n BufReader::new(File::open(filepath)?)\n\n .lines()\n\n .map(|line| {\n\n serde_json::from_str(&line?).map_err(|e| format_err!(\"mutation format error: {}\", e))\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 62, "score": 72013.04847281748 }, { "content": "/// build all tests and collect test-suite executables\n\nfn compile_tests(opt: &Options) -> Fallible<Vec<PathBuf>> {\n\n let mut tests: Vec<PathBuf> = Vec::new();\n\n\n\n let mut feature_args: Vec<&str> = vec![];\n\n if let Some(f) = &opt.features {\n\n feature_args.extend(&[\"--features\", f]);\n\n }\n\n if opt.all_features {\n\n feature_args.push(\"--all-features\");\n\n }\n\n\n\n // execute `cargo test --no-run --message-format=json` and collect output\n\n let compile_out = Command::new(\"cargo\")\n\n .args(&[\"test\", \"--no-run\", \"--message-format=json\"])\n\n .args(&feature_args)\n\n .stderr(Stdio::inherit())\n\n .output()?;\n\n if !compile_out.status.success() {\n\n bail!(\"`cargo test --no-run` returned non-zero exit status\");\n\n }\n", "file_path": "mutagen-runner/src/main.rs", "rank": 63, "score": 67195.00062974714 }, { "content": " })\n\n }\n\n #[test]\n\n fn ne_active1() {\n\n MutagenRuntimeConfig::test_with_mutation_id(1, || {\n\n assert_eq!(ne(1, 2), false);\n\n assert_eq!(ne(3, 3), true);\n\n })\n\n }\n\n}\n\n\n\nmod eq_but_not_copy {\n\n\n\n use ::mutagen::mutate;\n\n use ::mutagen::MutagenRuntimeConfig;\n\n\n\n #[derive(PartialEq, Eq)]\n\n struct EqButNotCopy;\n\n\n\n #[mutate(conf = local(expected_mutations = 1), mutators = only(binop_eq))]\n", "file_path": "mutagen-selftest/src/mutator/test_binop_eq.rs", "rank": 64, "score": 63643.393479900886 }, { "content": "mod test_eq {\n\n\n\n use ::mutagen::mutate;\n\n use ::mutagen::MutagenRuntimeConfig;\n\n\n\n // simple comparison\n\n #[mutate(conf = local(expected_mutations = 1), mutators = only(binop_eq))]\n\n fn eq(left: i32, right: i32) -> bool {\n\n left == right\n\n }\n\n #[test]\n\n fn eq_inactive() {\n\n MutagenRuntimeConfig::test_without_mutation(|| {\n\n assert_eq!(eq(1, 2), false);\n\n assert_eq!(eq(3, 3), true);\n\n })\n\n }\n\n #[test]\n\n fn eq_active1() {\n\n MutagenRuntimeConfig::test_with_mutation_id(1, || {\n", "file_path": "mutagen-selftest/src/mutator/test_binop_eq.rs", "rank": 65, "score": 63643.317979070904 }, { "content": " assert_eq!(eq(1, 2), true);\n\n assert_eq!(eq(3, 3), false);\n\n })\n\n }\n\n}\n\nmod test_ne {\n\n\n\n use ::mutagen::mutate;\n\n use ::mutagen::MutagenRuntimeConfig;\n\n\n\n // simple comparison\n\n #[mutate(conf = local(expected_mutations = 1), mutators = only(binop_eq))]\n\n fn ne(left: i32, right: i32) -> bool {\n\n left != right\n\n }\n\n #[test]\n\n fn ne_inactive() {\n\n MutagenRuntimeConfig::test_without_mutation(|| {\n\n assert_eq!(ne(1, 2), true);\n\n assert_eq!(ne(3, 3), false);\n", "file_path": "mutagen-selftest/src/mutator/test_binop_eq.rs", "rank": 66, "score": 63642.65065972058 }, { "content": " fn eq(x: &EqButNotCopy, y: &EqButNotCopy) -> bool {\n\n *x == *y\n\n }\n\n #[test]\n\n fn eq_inactive() {\n\n MutagenRuntimeConfig::test_without_mutation(|| {\n\n assert!(eq(&EqButNotCopy, &EqButNotCopy));\n\n })\n\n }\n\n #[test]\n\n fn eq_active1() {\n\n MutagenRuntimeConfig::test_with_mutation_id(1, || {\n\n assert!(!eq(&EqButNotCopy, &EqButNotCopy));\n\n })\n\n }\n\n}\n\n\n\nmod divides {\n\n\n\n use ::mutagen::mutate;\n", "file_path": "mutagen-selftest/src/mutator/test_binop_eq.rs", "rank": 67, "score": 63641.339769815684 }, { "content": " use ::mutagen::MutagenRuntimeConfig;\n\n\n\n #[mutate(conf = local(expected_mutations = 1), mutators = only(binop_eq))]\n\n fn divides(x: u32, y: u32) -> bool {\n\n x % y == 0u32\n\n }\n\n\n\n #[test]\n\n fn divides_inactive() {\n\n MutagenRuntimeConfig::test_without_mutation(|| {\n\n assert!(divides(2, 2));\n\n assert!(!divides(3, 4));\n\n })\n\n }\n\n #[test]\n\n fn divides_active() {\n\n MutagenRuntimeConfig::test_with_mutation_id(1, || {\n\n assert!(!divides(2, 2));\n\n assert!(divides(3, 4));\n\n })\n\n }\n\n}\n", "file_path": "mutagen-selftest/src/mutator/test_binop_eq.rs", "rank": 68, "score": 63638.502353752665 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Options {\n\n /// Space-separated list of features to activate\n\n #[structopt(long, name = \"FEATURES\")]\n\n features: Option<String>,\n\n\n\n /// Activate all available features\n\n #[structopt(long)]\n\n all_features: bool,\n\n}\n\n\n", "file_path": "mutagen-runner/src/main.rs", "rank": 69, "score": 61598.984032262524 }, { "content": "enum LazyAdd {\n\n Val(u8),\n\n Lazy(Box<LazyAdd>, Box<LazyAdd>),\n\n}\n\n\n\nimpl From<u8> for LazyAdd {\n\n fn from(v: u8) -> Self {\n\n Self::Val(v)\n\n }\n\n}\n\n\n\nimpl std::ops::Add<LazyAdd> for LazyAdd {\n\n type Output = LazyAdd;\n\n fn add(self, rhs: LazyAdd) -> LazyAdd {\n\n LazyAdd::Lazy(Box::new(self), Box::new(rhs))\n\n }\n\n}\n\n\n\n#[cfg_attr(test, mutate)]\n\nimpl LazyAdd {\n", "file_path": "examples/simple/src/lazy_add.rs", "rank": 70, "score": 60311.01183534713 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprUnopNot {\n\n expr: Expr,\n\n op_token: syn::UnOp,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprUnopNot {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Unary(expr) => match expr.op {\n\n UnOp::Not(_) => Ok(ExprUnopNot {\n\n expr: *expr.expr,\n\n op_token: expr.op,\n\n }),\n\n _ => Err(Expr::Unary(expr)),\n\n },\n\n e => Err(e),\n\n }\n\n }\n\n}\n\n\n\nimpl syn::spanned::Spanned for ExprUnopNot {\n\n fn span(&self) -> Span {\n\n self.op_token.span()\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_unop_not.rs", "rank": 71, "score": 58387.21916262817 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy, Debug)]\n\nenum BinopNum {\n\n Add,\n\n Sub,\n\n Mul,\n\n Div,\n\n}\n\n\n\nuse std::fmt;\n\n\n\nimpl fmt::Display for BinopNum {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n BinopNum::Add => write!(f, \"+\"),\n\n BinopNum::Sub => write!(f, \"-\"),\n\n BinopNum::Mul => write!(f, \"*\"),\n\n BinopNum::Div => write!(f, \"/\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 72, "score": 58364.941506717005 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy, Debug)]\n\nenum BinopBit {\n\n And,\n\n Or,\n\n Xor,\n\n}\n\n\n\nuse std::fmt;\n\n\n\nimpl fmt::Display for BinopBit {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n BinopBit::And => write!(f, \"&\"),\n\n BinopBit::Or => write!(f, \"|\"),\n\n BinopBit::Xor => write!(f, \"^\"),\n\n }\n\n }\n\n}\n\n\n\n// specification of the traits `AndToOr`, `OrToAnd`, ...\n\n//\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 73, "score": 58364.941506717005 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nenum MutationLitInt {\n\n Relative(i128),\n\n}\n\n\n\nimpl MutationLitInt {\n\n fn possible_mutations(val: u128) -> Vec<Self> {\n\n let mut mutations = vec![];\n\n if val != u128::max_value() {\n\n mutations.push(MutationLitInt::Relative(1));\n\n }\n\n if val != 0 {\n\n mutations.push(MutationLitInt::Relative(-1));\n\n }\n\n mutations\n\n }\n\n\n\n fn mutate<T: IntMutable>(self, val: T) -> T {\n\n match self {\n\n Self::Relative(r) => IntMutable::from_u128(val.as_u128().wrapping_add(r as u128)),\n\n }\n", "file_path": "mutagen-core/src/mutator/mutator_lit_int.rs", "rank": 74, "score": 57473.1380889588 }, { "content": "#[derive(Debug, Clone)]\n\nstruct StmtCall {\n\n call: TokenStream,\n\n span: Span,\n\n}\n\n\n\nimpl TryFrom<Stmt> for StmtCall {\n\n type Error = Stmt;\n\n fn try_from(stmt: Stmt) -> Result<Self, Stmt> {\n\n match stmt {\n\n Stmt::Semi(Expr::MethodCall(call), _) => Ok(StmtCall {\n\n span: call.span(),\n\n call: call.into_token_stream(),\n\n }),\n\n Stmt::Semi(Expr::Call(call), _) => Ok(StmtCall {\n\n span: call.span(),\n\n call: call.into_token_stream(),\n\n }),\n\n _ => return Err(stmt),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_stmt_call.rs", "rank": 75, "score": 57444.99646496256 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct MutationBinopCmp {\n\n op: BinopCmp,\n\n}\n\n\n\nimpl MutationBinopCmp {\n\n fn possible_mutations(original_op: BinopCmp) -> Vec<Self> {\n\n [BinopCmp::Lt, BinopCmp::Le, BinopCmp::Ge, BinopCmp::Gt]\n\n .iter()\n\n .copied()\n\n .filter(|&op| op != original_op)\n\n .map(|op| MutationBinopCmp { op })\n\n .collect()\n\n }\n\n\n\n fn mutate<L: PartialOrd<R>, R>(self, left: L, right: R) -> bool {\n\n self.op.cmp(left, right)\n\n }\n\n\n\n fn to_mutation(self, original_op: &ExprBinopCmp, context: &TransformContext) -> Mutation {\n\n Mutation::new_spanned(\n\n &context,\n\n \"binop_cmp\".to_owned(),\n\n format!(\"{}\", original_op.op),\n\n format!(\"{}\", self.op),\n\n original_op.span,\n\n )\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_cmp.rs", "rank": 76, "score": 56561.61946606802 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct MutationBinopBool {\n\n op: BinopBool,\n\n}\n\n\n\nimpl MutationBinopBool {\n\n fn possible_mutations(original_op: BinopBool) -> Vec<Self> {\n\n [BinopBool::And, BinopBool::Or]\n\n .iter()\n\n .copied()\n\n .filter(|&op| op != original_op)\n\n .map(|op| MutationBinopBool { op })\n\n .collect()\n\n }\n\n\n\n fn to_mutation(self, original_op: &ExprBinopBool, context: &TransformContext) -> Mutation {\n\n Mutation::new_spanned(\n\n &context,\n\n \"binop_bool\".to_owned(),\n\n format!(\"{}\", original_op),\n\n format!(\"{}\", self.op),\n\n original_op.span,\n\n )\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bool.rs", "rank": 77, "score": 56561.61946606802 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct MutationBinopNum {\n\n op: BinopNum,\n\n}\n\n\n\nimpl MutationBinopNum {\n\n fn possible_mutations(original_op: BinopNum) -> Vec<Self> {\n\n match original_op {\n\n BinopNum::Add => vec![MutationBinopNum { op: BinopNum::Sub }],\n\n BinopNum::Sub => vec![MutationBinopNum { op: BinopNum::Add }],\n\n BinopNum::Mul => vec![MutationBinopNum { op: BinopNum::Div }],\n\n BinopNum::Div => vec![MutationBinopNum { op: BinopNum::Mul }],\n\n }\n\n }\n\n\n\n fn to_mutation(self, original_expr: &ExprBinopNum, context: &TransformContext) -> Mutation {\n\n Mutation::new_spanned(\n\n &context,\n\n \"binop_num\".to_owned(),\n\n format!(\"{}\", original_expr.op),\n\n format!(\"{}\", self.op),\n\n original_expr.span(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 78, "score": 56561.61946606802 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nstruct MutationBinopBit {\n\n op: BinopBit,\n\n}\n\n\n\nimpl MutationBinopBit {\n\n fn possible_mutations(original_op: BinopBit) -> Vec<Self> {\n\n [BinopBit::And, BinopBit::Or, BinopBit::Xor]\n\n .iter()\n\n .copied()\n\n .filter(|&op| op != original_op)\n\n .map(|op| MutationBinopBit { op })\n\n .collect()\n\n }\n\n\n\n fn to_mutation(self, original_expr: &ExprBinopBit, context: &TransformContext) -> Mutation {\n\n Mutation::new_spanned(\n\n &context,\n\n \"binop_bit\".to_owned(),\n\n format!(\"{}\", original_expr.op),\n\n format!(\"{}\", self.op),\n\n original_expr.span(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 79, "score": 56561.61946606802 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprBinopCmp {\n\n op: BinopCmp,\n\n left: Expr,\n\n right: Expr,\n\n span: Span,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprBinopCmp {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Binary(expr) => match expr.op {\n\n BinOp::Lt(t) => Ok(ExprBinopCmp {\n\n op: BinopCmp::Lt,\n\n left: *expr.left,\n\n right: *expr.right,\n\n span: t.span(),\n\n }),\n\n BinOp::Le(t) => Ok(ExprBinopCmp {\n\n op: BinopCmp::Le,\n", "file_path": "mutagen-core/src/mutator/mutator_binop_cmp.rs", "rank": 80, "score": 56557.619850351795 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprBinopNum {\n\n op: BinopNum,\n\n left: Expr,\n\n right: Expr,\n\n op_token: syn::BinOp,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprBinopNum {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Binary(expr) => match expr.op {\n\n BinOp::Add(_) => Ok(ExprBinopNum {\n\n op: BinopNum::Add,\n\n left: *expr.left,\n\n right: *expr.right,\n\n op_token: expr.op,\n\n }),\n\n BinOp::Sub(_) => Ok(ExprBinopNum {\n\n op: BinopNum::Sub,\n", "file_path": "mutagen-core/src/mutator/mutator_binop_num.rs", "rank": 81, "score": 56557.619850351795 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprLitBool {\n\n value: bool,\n\n span: Span,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprLitBool {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Lit(ExprLit {\n\n lit: Lit::Bool(LitBool { value, span }),\n\n ..\n\n }) => Ok(ExprLitBool { value, span }),\n\n _ => Err(expr),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "mutagen-core/src/mutator/mutator_lit_bool.rs", "rank": 82, "score": 56557.619850351795 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprBinopBit {\n\n op: BinopBit,\n\n left: Expr,\n\n right: Expr,\n\n op_token: syn::BinOp,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprBinopBit {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Binary(expr) => match expr.op {\n\n BinOp::BitAnd(_) => Ok(ExprBinopBit {\n\n op: BinopBit::And,\n\n left: *expr.left,\n\n right: *expr.right,\n\n op_token: expr.op,\n\n }),\n\n BinOp::BitOr(_) => Ok(ExprBinopBit {\n\n op: BinopBit::Or,\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bit.rs", "rank": 83, "score": 56557.619850351795 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ExprBinopBool {\n\n op: BinopBool,\n\n left: Expr,\n\n right: Expr,\n\n span: Span,\n\n}\n\n\n\nimpl TryFrom<Expr> for ExprBinopBool {\n\n type Error = Expr;\n\n fn try_from(expr: Expr) -> Result<Self, Expr> {\n\n match expr {\n\n Expr::Binary(expr) => match expr.op {\n\n BinOp::And(t) => Ok(ExprBinopBool {\n\n op: BinopBool::And,\n\n left: *expr.left,\n\n right: *expr.right,\n\n span: t.span(),\n\n }),\n\n BinOp::Or(t) => Ok(ExprBinopBool {\n\n op: BinopBool::Or,\n", "file_path": "mutagen-core/src/mutator/mutator_binop_bool.rs", "rank": 84, "score": 56557.619850351795 }, { "content": "fn main() {\n\n if let Err(err) = run() {\n\n eprintln!();\n\n eprintln!(\"Error!\");\n\n eprintln!(\"{}\", err);\n\n process::exit(1);\n\n }\n\n}\n\n\n\nuse structopt::StructOpt;\n", "file_path": "mutagen-runner/src/main.rs", "rank": 85, "score": 53534.571515033684 }, { "content": "/// run all mutations on all test-executables\n\nfn run_mutations(\n\n progress: &mut Progress,\n\n test_bins: &[TestBinTested],\n\n mutations: Vec<BakedMutation>,\n\n coverage: &CoverageCollection,\n\n) -> Fallible<MutagenReport> {\n\n let mut mutagen_report = MutagenReport::new();\n\n\n\n for m in mutations {\n\n let mutant_status = if coverage.is_covered(m.id()) {\n\n progress.start_mutation_covered(&m)?;\n\n\n\n // run all test binaries\n\n let mut mutant_status = MutantStatus::Survived;\n\n for bin in test_bins {\n\n mutant_status = bin.check_mutant(&m)?;\n\n if mutant_status != MutantStatus::Survived {\n\n break;\n\n }\n\n }\n", "file_path": "mutagen-runner/src/main.rs", "rank": 86, "score": 52429.28936919676 }, { "content": "#[test]\n\nfn without_mutation() {\n\n MutagenRuntimeConfig::test_without_mutation(|| {\n\n assert_eq!(MutagenRuntimeConfig::get_default().mutation_id(), None);\n\n })\n\n}\n", "file_path": "mutagen-selftest/src/runtime_config.rs", "rank": 87, "score": 51391.39980586707 }, { "content": "#[test]\n\nfn with_mutation_id_1() {\n\n MutagenRuntimeConfig::test_with_mutation_id(1, || {\n\n assert_eq!(MutagenRuntimeConfig::get_default().mutation_id(), Some(1));\n\n })\n\n}\n", "file_path": "mutagen-selftest/src/runtime_config.rs", "rank": 88, "score": 51391.39980586707 }, { "content": "#[test]\n\n#[should_panic]\n\nfn with_mutation_id_0() {\n\n MutagenRuntimeConfig::with_mutation_id(0);\n\n}\n", "file_path": "mutagen-selftest/src/runtime_config.rs", "rank": 89, "score": 51391.32299678137 }, { "content": "/// struct that collects coverage of mutators.\n\n///\n\n/// It has to be created with a known size.\n\n///\n\n/// The method `hit`, is used for recording coverage hits.\n\nstruct CoverageHitCollector(Vec<AtomicU64>);\n\n\n\nimpl CoverageHitCollector {\n\n /// constructs a HotCoverageCollection for a given number of mutations\n\n fn new(num_mutations: usize) -> Self {\n\n Self((0..=num_mutations).map(|_| AtomicU64::new(0)).collect())\n\n }\n\n\n\n /// records a single coverage hit.\n\n ///\n\n /// Returns true iff this hit was the first for this mutator\n\n fn hit(&self, mutator_id: usize) -> bool {\n\n 0 == self.0[mutator_id].fetch_add(1, Ordering::Relaxed)\n\n }\n\n}\n\n\n\n/// module with functions used for isolated and exhaustive tests of the `#[mutate]` attribute\n\n#[cfg(any(test, feature = \"self_test\"))]\n\nmod test_tools {\n\n\n", "file_path": "mutagen-core/src/runtime_config.rs", "rank": 90, "score": 51097.77476557622 }, { "content": "fn run() -> Fallible<()> {\n\n let mutagen_start = Instant::now();\n\n\n\n // drop \"mutagen\" arg in cargo-subcommand mode\n\n let mut args = env::args();\n\n if env::var(\"CARGO\").is_ok() {\n\n // we're invoked by cargo, drop the first arg\n\n args.next();\n\n }\n\n let opt = Options::from_iter(args);\n\n\n\n // build the testsuites and collect mutations\n\n let test_bins = compile_tests(&opt)?;\n\n if test_bins.is_empty() {\n\n bail!(\"no test executable(s) found\");\n\n }\n\n let mutations = read_mutations()?;\n\n let num_mutations = mutations.len();\n\n\n\n let mut progress = Progress::new(mutations.len());\n", "file_path": "mutagen-runner/src/main.rs", "rank": 91, "score": 50560.554165751986 }, { "content": "// trait for operations that mutate integers of any type\n\npub trait IntMutable: Copy {\n\n fn from_u128(val: u128) -> Self;\n\n fn as_u128(self) -> u128;\n\n}\n\n\n\n// implementation for `IntMutable` for all integer types\n\nmacro_rules! lit_int_mutables {\n\n { $($suf:ident, $ty:ident,)* } => {\n\n $(\n\n impl IntMutable for $ty {\n\n fn from_u128(val: u128) -> Self {\n\n val as $ty\n\n }\n\n fn as_u128(self) -> u128 {\n\n self as u128\n\n }\n\n }\n\n )*\n\n\n\n }\n", "file_path": "mutagen-core/src/mutator/mutator_lit_int.rs", "rank": 92, "score": 48074.89446043347 }, { "content": "use example_with_integration_tests_v2::fib;\n\n\n\n#[test]\n", "file_path": "examples/with-integration-tests-v2/tests/test_fib.rs", "rank": 93, "score": 44658.19784906282 }, { "content": "use example_with_integration_tests_v1::fib;\n\n\n\n#[test]\n", "file_path": "examples/with-integration-tests-v1/tests/test_fib.rs", "rank": 94, "score": 44658.19784906282 }, { "content": "/// queries `cargo` for the workspace root and locates the directory to write mutagen-specific information\n\nfn mutagen_dir() -> Fallible<PathBuf> {\n\n let metadata = Command::new(\"cargo\").arg(\"metadata\").output()?;\n\n if !metadata.status.success() {\n\n bail!(\"{}\", str::from_utf8(&metadata.stderr)?);\n\n }\n\n let meta_json = json::parse(str::from_utf8(&metadata.stdout)?)?;\n\n let root_dir = Path::new(\n\n meta_json[\"workspace_root\"]\n\n .as_str()\n\n .ok_or_else(|| format_err!(\"cargo metadata misses workspace_root\"))?,\n\n );\n\n Ok(root_dir.join(DEFAULT_MUTAGEN_DIR))\n\n}\n\n\n", "file_path": "mutagen-core/src/comm/mutagen_files.rs", "rank": 95, "score": 44412.0908098547 }, { "content": "/// read all mutations from the given file\n\n///\n\n/// This functions gets the file that describes all mutations performed on the target program and ensures that it exists.\n\n/// The list of mutations is also preserved\n\nfn read_mutations() -> Fallible<Vec<BakedMutation>> {\n\n let mutations_file = comm::get_mutations_file()?;\n\n if !mutations_file.exists() {\n\n bail!(\n\n \"file `target/mutagen/mutations` is not found\\n\\\n\n maybe there are no mutations defined or the attribute `#[mutate]` is not enabled\"\n\n )\n\n }\n\n\n\n let mutations = comm::read_items::<BakedMutation>(&mutations_file)?;\n\n\n\n // write the collected mutations\n\n let mutations_map = mutations\n\n .iter()\n\n .map(|m| (m.id(), m.as_ref()))\n\n .collect::<HashMap<_, _>>();\n\n let mutations_writer = BufWriter::new(File::create(comm::get_mutations_file_json()?)?);\n\n serde_json::to_writer(mutations_writer, &mutations_map)?;\n\n\n\n Ok(mutations)\n\n}\n", "file_path": "mutagen-runner/src/main.rs", "rank": 96, "score": 43924.69096020593 }, { "content": "fn is_num_expr_if(expr: &syn::ExprIf) -> bool {\n\n is_num_block(&expr.then_branch)\n\n || match &expr.else_branch {\n\n Some((_, else_expr)) => is_num_expr(else_expr),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "mutagen-core/src/transformer/ast_inspect.rs", "rank": 97, "score": 41232.11840354861 }, { "content": "fn is_num_stmt(stmt: &syn::Stmt) -> bool {\n\n match stmt {\n\n syn::Stmt::Expr(expr) => is_num_expr(&expr),\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use syn::parse_quote;\n\n\n\n #[test]\n\n fn num_expr_lit_int() {\n\n let tt = parse_quote! {1};\n\n\n\n assert!(is_num_expr(&tt));\n\n }\n\n\n", "file_path": "mutagen-core/src/transformer/ast_inspect.rs", "rank": 98, "score": 41232.11840354861 }, { "content": "fn is_num_block(block: &syn::Block) -> bool {\n\n match block.stmts.last() {\n\n Some(stmt) => is_num_stmt(&stmt),\n\n _ => false,\n\n }\n\n}\n", "file_path": "mutagen-core/src/transformer/ast_inspect.rs", "rank": 99, "score": 41232.11840354861 } ]
Rust
interface/rust/src/kubernetes_applier.rs
cosmonic/kubernetes-applier
693bbe6f53d42de0bf9a738a8c23f8699538732b
#[allow(unused_imports)] use async_trait::async_trait; #[allow(unused_imports)] use serde::{Deserialize, Serialize}; #[allow(unused_imports)] use std::{borrow::Borrow, borrow::Cow, io::Write, string::ToString}; #[allow(unused_imports)] use wasmbus_rpc::{ cbor::*, common::{ deserialize, message_format, serialize, Context, Message, MessageDispatch, MessageFormat, SendOpts, Transport, }, error::{RpcError, RpcResult}, Timestamp, }; pub const SMITHY_VERSION: &str = "1.0"; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct DeleteRequest { #[serde(default)] pub group: String, #[serde(default)] pub kind: String, #[serde(default)] pub name: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub namespace: Option<String>, #[serde(default)] pub version: String, } #[doc(hidden)] pub fn encode_delete_request<W: wasmbus_rpc::cbor::Write>( e: &mut wasmbus_rpc::cbor::Encoder<W>, val: &DeleteRequest, ) -> RpcResult<()> { e.map(5)?; e.str("group")?; e.str(&val.group)?; e.str("kind")?; e.str(&val.kind)?; e.str("name")?; e.str(&val.name)?; if let Some(val) = val.namespace.as_ref() { e.str("namespace")?; e.str(val)?; } else { e.null()?; } e.str("version")?; e.str(&val.version)?; Ok(()) } #[doc(hidden)] pub fn decode_delete_request( d: &mut wasmbus_rpc::cbor::Decoder<'_>, ) -> Result<DeleteRequest, RpcError> { let __result = { let mut group: Option<String> = None; let mut kind: Option<String> = None; let mut name: Option<String> = None; let mut namespace: Option<Option<String>> = Some(None); let mut version: Option<String> = None; let is_array = match d.datatype()? { wasmbus_rpc::cbor::Type::Array => true, wasmbus_rpc::cbor::Type::Map => false, _ => { return Err(RpcError::Deser( "decoding struct DeleteRequest, expected array or map".to_string(), )) } }; if is_array { let len = d.array()?.ok_or_else(|| { RpcError::Deser( "decoding struct DeleteRequest: indefinite array not supported".to_string(), ) })?; for __i in 0..(len as usize) { match __i { 0 => group = Some(d.str()?.to_string()), 1 => kind = Some(d.str()?.to_string()), 2 => name = Some(d.str()?.to_string()), 3 => { namespace = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } 4 => version = Some(d.str()?.to_string()), _ => d.skip()?, } } } else { let len = d.map()?.ok_or_else(|| { RpcError::Deser( "decoding struct DeleteRequest: indefinite map not supported".to_string(), ) })?; for __i in 0..(len as usize) { match d.str()? { "group" => group = Some(d.str()?.to_string()), "kind" => kind = Some(d.str()?.to_string()), "name" => name = Some(d.str()?.to_string()), "namespace" => { namespace = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } "version" => version = Some(d.str()?.to_string()), _ => d.skip()?, } } } DeleteRequest { group: if let Some(__x) = group { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.group (#0)".to_string(), )); }, kind: if let Some(__x) = kind { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.kind (#1)".to_string(), )); }, name: if let Some(__x) = name { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.name (#2)".to_string(), )); }, namespace: namespace.unwrap(), version: if let Some(__x) = version { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.version (#4)".to_string(), )); }, } }; Ok(__result) } #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct OperationResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<String>, #[serde(default)] pub succeeded: bool, } #[doc(hidden)] pub fn encode_operation_response<W: wasmbus_rpc::cbor::Write>( e: &mut wasmbus_rpc::cbor::Encoder<W>, val: &OperationResponse, ) -> RpcResult<()> { e.map(2)?; if let Some(val) = val.error.as_ref() { e.str("error")?; e.str(val)?; } else { e.null()?; } e.str("succeeded")?; e.bool(val.succeeded)?; Ok(()) } #[doc(hidden)] pub fn decode_operation_response( d: &mut wasmbus_rpc::cbor::Decoder<'_>, ) -> Result<OperationResponse, RpcError> { let __result = { let mut error: Option<Option<String>> = Some(None); let mut succeeded: Option<bool> = None; let is_array = match d.datatype()? { wasmbus_rpc::cbor::Type::Array => true, wasmbus_rpc::cbor::Type::Map => false, _ => { return Err(RpcError::Deser( "decoding struct OperationResponse, expected array or map".to_string(), )) } }; if is_array { let len = d.array()?.ok_or_else(|| { RpcError::Deser( "decoding struct OperationResponse: indefinite array not supported".to_string(), ) })?; for __i in 0..(len as usize) { match __i { 0 => { error = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } 1 => succeeded = Some(d.bool()?), _ => d.skip()?, } } } else { let len = d.map()?.ok_or_else(|| { RpcError::Deser( "decoding struct OperationResponse: indefinite map not supported".to_string(), ) })?; for __i in 0..(len as usize) { match d.str()? { "error" => { error = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } "succeeded" => succeeded = Some(d.bool()?), _ => d.skip()?, } } } OperationResponse { error: error.unwrap(), succeeded: if let Some(__x) = succeeded { __x } else { return Err(RpcError::Deser( "missing field OperationResponse.succeeded (#1)".to_string(), )); }, } }; Ok(__result) } #[async_trait] pub trait KubernetesApplier { fn contract_id() -> &'static str { "cosmonic:kubernetes_applier" } async fn apply(&self, ctx: &Context, arg: &Vec<u8>) -> RpcResult<OperationResponse>; async fn delete(&self, ctx: &Context, arg: &DeleteRequest) -> RpcResult<OperationResponse>; } #[doc(hidden)] #[async_trait] pub trait KubernetesApplierReceiver: MessageDispatch + KubernetesApplier { async fn dispatch<'disp__, 'ctx__, 'msg__>( &'disp__ self, ctx: &'ctx__ Context, message: &Message<'msg__>, ) -> Result<Message<'msg__>, RpcError> { match message.method { "Apply" => { let value: Vec<u8> = wasmbus_rpc::common::deserialize(&message.arg) .map_err(|e| RpcError::Deser(format!("'Blob': {}", e)))?; let resp = KubernetesApplier::apply(self, ctx, &value).await?; let buf = wasmbus_rpc::common::serialize(&resp)?; Ok(Message { method: "KubernetesApplier.Apply", arg: Cow::Owned(buf), }) } "Delete" => { let value: DeleteRequest = wasmbus_rpc::common::deserialize(&message.arg) .map_err(|e| RpcError::Deser(format!("'DeleteRequest': {}", e)))?; let resp = KubernetesApplier::delete(self, ctx, &value).await?; let buf = wasmbus_rpc::common::serialize(&resp)?; Ok(Message { method: "KubernetesApplier.Delete", arg: Cow::Owned(buf), }) } _ => Err(RpcError::MethodNotHandled(format!( "KubernetesApplier::{}", message.method ))), } } } #[derive(Debug)] pub struct KubernetesApplierSender<T: Transport> { transport: T, } impl<T: Transport> KubernetesApplierSender<T> { pub fn via(transport: T) -> Self { Self { transport } } pub fn set_timeout(&self, interval: std::time::Duration) { self.transport.set_timeout(interval); } } #[cfg(target_arch = "wasm32")] impl KubernetesApplierSender<wasmbus_rpc::actor::prelude::WasmHost> { pub fn new() -> Self { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider( "cosmonic:kubernetes_applier", "default", ) .unwrap(); Self { transport } } pub fn new_with_link(link_name: &str) -> wasmbus_rpc::error::RpcResult<Self> { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider( "cosmonic:kubernetes_applier", link_name, )?; Ok(Self { transport }) } } #[async_trait] impl<T: Transport + std::marker::Sync + std::marker::Send> KubernetesApplier for KubernetesApplierSender<T> { #[allow(unused)] async fn apply(&self, ctx: &Context, arg: &Vec<u8>) -> RpcResult<OperationResponse> { let buf = wasmbus_rpc::common::serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "KubernetesApplier.Apply", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value: OperationResponse = wasmbus_rpc::common::deserialize(&resp) .map_err(|e| RpcError::Deser(format!("'{}': OperationResponse", e)))?; Ok(value) } #[allow(unused)] async fn delete(&self, ctx: &Context, arg: &DeleteRequest) -> RpcResult<OperationResponse> { let buf = wasmbus_rpc::common::serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "KubernetesApplier.Delete", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value: OperationResponse = wasmbus_rpc::common::deserialize(&resp) .map_err(|e| RpcError::Deser(format!("'{}': OperationResponse", e)))?; Ok(value) } }
#[allow(unused_imports)] use async_trait::async_trait; #[allow(unused_imports)] use serde::{Deserialize, Serialize}; #[allow(unused_imports)] use std::{borrow::Borrow, borrow::Cow, io::Write, string::ToString}; #[allow(unused_imports)] use wasmbus_rpc::{ cbor::*, common::{ deserialize, message_format, serialize, Context, Message, MessageDispatch, MessageFormat, SendOpts, Transport, }, error::{RpcError, RpcResult}, Timestamp, }; pub const SMITHY_VERSION: &str = "1.0"; #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct DeleteRequest { #[serde(default)] pub group: String, #[serde(default)] pub kind: String, #[serde(default)] pub name: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub namespace: Option<String>, #[serde(default)] pub version: String, } #[doc(hidden)] pub fn encode_delete_request<W: wasmbus_rpc::cbor::Write>( e: &mut wasmbus_rpc::cbor::Encoder<W>, val: &DeleteRequest, ) -> RpcResult<()> { e.map(5)?; e.str("group")?; e.str(&val.group)?; e.str("kind")?; e.str(&val.kind)?; e.str("name")?; e.str(&val.name)?; if let Some(val) = val.namespace.as_ref() { e.str("namespace")?; e.str(val)?; } else { e.null()?; } e.str("version")?; e.str(&val.version)?; Ok(()) } #[doc(hidden)] pub fn decode_delete_request( d: &mut wasmbus_rpc::cbor::Decoder<'_>, ) -> Result<DeleteRequest, RpcError> { let __result = { let mut group: Option<String> = None; let mut kind: Option<String> = None; let mut name: Option<String> = None; let mut namespace: Option<Option<String>> = Some(None); let mut version: Option<String> = None; let is_array = match d.datatype()? { wasmbus_rpc::cbor::Type::Array => true, wasmbus_rpc::cbor::Type::Map => false, _ => { return Err(RpcError::Deser( "decoding struct DeleteRequest, expected array or map".to_string(), )) } }; if is_array { let len = d.array()?.ok_or_else(|| { RpcError::Deser( "decoding struct DeleteRequest: indefinite array not supported".to_string(), ) })?; for __i in 0..(len as usize) { match __i { 0 => group = Some(d.str()?.to_string()), 1 => kind = Some(d.str()?.to_string()), 2 => name = Some(d.str()?.to_string()), 3 => { namespace = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } 4 => version = Some(d.str()?.to_string()), _ => d.skip()?, } } } else { let len = d.map()?.ok_or_else(|| { RpcError::Deser( "decoding struct DeleteRequest: indefinite map not supported".to_string(), ) })?; for __i in 0..(len as usize) {
} } DeleteRequest { group: if let Some(__x) = group { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.group (#0)".to_string(), )); }, kind: if let Some(__x) = kind { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.kind (#1)".to_string(), )); }, name: if let Some(__x) = name { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.name (#2)".to_string(), )); }, namespace: namespace.unwrap(), version: if let Some(__x) = version { __x } else { return Err(RpcError::Deser( "missing field DeleteRequest.version (#4)".to_string(), )); }, } }; Ok(__result) } #[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)] pub struct OperationResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<String>, #[serde(default)] pub succeeded: bool, } #[doc(hidden)] pub fn encode_operation_response<W: wasmbus_rpc::cbor::Write>( e: &mut wasmbus_rpc::cbor::Encoder<W>, val: &OperationResponse, ) -> RpcResult<()> { e.map(2)?; if let Some(val) = val.error.as_ref() { e.str("error")?; e.str(val)?; } else { e.null()?; } e.str("succeeded")?; e.bool(val.succeeded)?; Ok(()) } #[doc(hidden)] pub fn decode_operation_response( d: &mut wasmbus_rpc::cbor::Decoder<'_>, ) -> Result<OperationResponse, RpcError> { let __result = { let mut error: Option<Option<String>> = Some(None); let mut succeeded: Option<bool> = None; let is_array = match d.datatype()? { wasmbus_rpc::cbor::Type::Array => true, wasmbus_rpc::cbor::Type::Map => false, _ => { return Err(RpcError::Deser( "decoding struct OperationResponse, expected array or map".to_string(), )) } }; if is_array { let len = d.array()?.ok_or_else(|| { RpcError::Deser( "decoding struct OperationResponse: indefinite array not supported".to_string(), ) })?; for __i in 0..(len as usize) { match __i { 0 => { error = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } 1 => succeeded = Some(d.bool()?), _ => d.skip()?, } } } else { let len = d.map()?.ok_or_else(|| { RpcError::Deser( "decoding struct OperationResponse: indefinite map not supported".to_string(), ) })?; for __i in 0..(len as usize) { match d.str()? { "error" => { error = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } "succeeded" => succeeded = Some(d.bool()?), _ => d.skip()?, } } } OperationResponse { error: error.unwrap(), succeeded: if let Some(__x) = succeeded { __x } else { return Err(RpcError::Deser( "missing field OperationResponse.succeeded (#1)".to_string(), )); }, } }; Ok(__result) } #[async_trait] pub trait KubernetesApplier { fn contract_id() -> &'static str { "cosmonic:kubernetes_applier" } async fn apply(&self, ctx: &Context, arg: &Vec<u8>) -> RpcResult<OperationResponse>; async fn delete(&self, ctx: &Context, arg: &DeleteRequest) -> RpcResult<OperationResponse>; } #[doc(hidden)] #[async_trait] pub trait KubernetesApplierReceiver: MessageDispatch + KubernetesApplier { async fn dispatch<'disp__, 'ctx__, 'msg__>( &'disp__ self, ctx: &'ctx__ Context, message: &Message<'msg__>, ) -> Result<Message<'msg__>, RpcError> { match message.method { "Apply" => { let value: Vec<u8> = wasmbus_rpc::common::deserialize(&message.arg) .map_err(|e| RpcError::Deser(format!("'Blob': {}", e)))?; let resp = KubernetesApplier::apply(self, ctx, &value).await?; let buf = wasmbus_rpc::common::serialize(&resp)?; Ok(Message { method: "KubernetesApplier.Apply", arg: Cow::Owned(buf), }) } "Delete" => { let value: DeleteRequest = wasmbus_rpc::common::deserialize(&message.arg) .map_err(|e| RpcError::Deser(format!("'DeleteRequest': {}", e)))?; let resp = KubernetesApplier::delete(self, ctx, &value).await?; let buf = wasmbus_rpc::common::serialize(&resp)?; Ok(Message { method: "KubernetesApplier.Delete", arg: Cow::Owned(buf), }) } _ => Err(RpcError::MethodNotHandled(format!( "KubernetesApplier::{}", message.method ))), } } } #[derive(Debug)] pub struct KubernetesApplierSender<T: Transport> { transport: T, } impl<T: Transport> KubernetesApplierSender<T> { pub fn via(transport: T) -> Self { Self { transport } } pub fn set_timeout(&self, interval: std::time::Duration) { self.transport.set_timeout(interval); } } #[cfg(target_arch = "wasm32")] impl KubernetesApplierSender<wasmbus_rpc::actor::prelude::WasmHost> { pub fn new() -> Self { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider( "cosmonic:kubernetes_applier", "default", ) .unwrap(); Self { transport } } pub fn new_with_link(link_name: &str) -> wasmbus_rpc::error::RpcResult<Self> { let transport = wasmbus_rpc::actor::prelude::WasmHost::to_provider( "cosmonic:kubernetes_applier", link_name, )?; Ok(Self { transport }) } } #[async_trait] impl<T: Transport + std::marker::Sync + std::marker::Send> KubernetesApplier for KubernetesApplierSender<T> { #[allow(unused)] async fn apply(&self, ctx: &Context, arg: &Vec<u8>) -> RpcResult<OperationResponse> { let buf = wasmbus_rpc::common::serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "KubernetesApplier.Apply", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value: OperationResponse = wasmbus_rpc::common::deserialize(&resp) .map_err(|e| RpcError::Deser(format!("'{}': OperationResponse", e)))?; Ok(value) } #[allow(unused)] async fn delete(&self, ctx: &Context, arg: &DeleteRequest) -> RpcResult<OperationResponse> { let buf = wasmbus_rpc::common::serialize(arg)?; let resp = self .transport .send( ctx, Message { method: "KubernetesApplier.Delete", arg: Cow::Borrowed(&buf), }, None, ) .await?; let value: OperationResponse = wasmbus_rpc::common::deserialize(&resp) .map_err(|e| RpcError::Deser(format!("'{}': OperationResponse", e)))?; Ok(value) } }
match d.str()? { "group" => group = Some(d.str()?.to_string()), "kind" => kind = Some(d.str()?.to_string()), "name" => name = Some(d.str()?.to_string()), "namespace" => { namespace = if wasmbus_rpc::cbor::Type::Null == d.datatype()? { d.skip()?; Some(None) } else { Some(Some(d.str()?.to_string())) } } "version" => version = Some(d.str()?.to_string()), _ => d.skip()?, }
if_condition
[ { "content": "fn ensure_no_path(item: &Option<String>, entity: &str, name: &str) -> Result<(), RpcError> {\n\n if item.is_some() {\n\n return Err(RpcError::ProviderInit(format!(\n\n \"{} {} {}\",\n\n CERT_PATH_ERROR, entity, name\n\n )));\n\n }\n\n Ok(())\n\n}\n", "file_path": "applier/src/main.rs", "rank": 0, "score": 106034.18504852353 }, { "content": "fn get_port(values: HashMap<String, String>) -> RpcResult<i32> {\n\n let port = if let Some(p) = values.get(PORT_KEY) {\n\n p.parse()\n\n .map_err(|_| RpcError::InvalidParameter(\"Port value is malformed\".to_string()))?\n\n } else if let Some(p) = values.get(ADDRESS_KEY) {\n\n let addr: SocketAddr = p\n\n .parse()\n\n .map_err(|_| RpcError::InvalidParameter(\"Address value is malformed\".to_string()))?;\n\n addr.port() as i32\n\n } else {\n\n // The default port from the HTTP server is 8080, so we are going to default it here as well\n\n 8080\n\n };\n\n\n\n Ok(port)\n\n}\n", "file_path": "service-applier/src/lib.rs", "rank": 2, "score": 84122.15326753998 }, { "content": "fn unwrap_the_thingz<'a>(thing: &'a serde_json::Value, key: &str) -> RpcResult<&'a str> {\n\n thing\n\n .get(key)\n\n .ok_or_else(|| RpcError::InvalidParameter(format!(\"Event does not have key {}\", key)))?\n\n .as_str()\n\n .ok_or_else(|| RpcError::InvalidParameter(format!(\"Event does not have key {}\", key)))\n\n}\n\n\n\n#[async_trait]\n\nimpl MessageSubscriber for ServiceApplierActor {\n\n async fn handle_message(&self, ctx: &Context, msg: &SubMessage) -> RpcResult<()> {\n\n let raw: serde_json::Value = serde_json::from_slice(&msg.body)\n\n .map_err(|e| RpcError::Deser(format!(\"Invalid JSON data in message: {}\", e)))?;\n\n let evt = EventWrapper { raw };\n\n\n\n let event_type = evt.ty()?;\n\n match event_type {\n\n LINKDEF_SET_EVENT_TYPE if evt.contract_id()? == EXPECTED_CONTRACT_ID => {\n\n debug!(\"Found new link definition for HTTP server\");\n\n handle_apply(ctx, evt.linkdef()?).await\n", "file_path": "service-applier/src/lib.rs", "rank": 6, "score": 60219.75399065418 }, { "content": "#[derive(Default, Clone, Provider)]\n\n#[services(KubernetesApplier)]\n\nstruct ApplierProvider {\n\n clients: Arc<RwLock<HashMap<String, Client>>>,\n\n}\n\n\n\nimpl ProviderDispatch for ApplierProvider {}\n\n#[async_trait]\n\nimpl ProviderHandler for ApplierProvider {\n\n #[instrument(level = \"debug\", skip(self, ld), fields(actor_id = %ld.actor_id))]\n\n async fn put_link(&self, ld: &LinkDefinition) -> Result<bool, RpcError> {\n\n debug!(\"Got link request\");\n\n // Normalize keys to lowercase\n\n let values: HashMap<String, String> = ld\n\n .values\n\n .iter()\n\n .map(|(k, v)| (k.to_lowercase(), v.to_owned()))\n\n .collect();\n\n\n\n // Attempt to load the config. If nothing it passed attempt to infer it from the pod or the\n\n // default kubeconfig path\n\n let config = if let Some(p) = values.get(CONFIG_FILE_KEY) {\n", "file_path": "applier/src/main.rs", "rank": 8, "score": 39119.4163398955 }, { "content": "struct EventWrapper {\n\n raw: serde_json::Value,\n\n}\n\n\n\n// TODO: How do we handle configuring existing links/deleting links that no longer exist? A re-sync event?\n\n\n\nimpl EventWrapper {\n\n fn ty(&self) -> RpcResult<&str> {\n\n unwrap_the_thingz(&self.raw, \"type\")\n\n }\n\n\n\n fn contract_id(&self) -> RpcResult<&str> {\n\n let data = self.raw.get(DATA_KEY).ok_or_else(|| {\n\n RpcError::InvalidParameter(format!(\"Event does not have key {}\", DATA_KEY))\n\n })?;\n\n unwrap_the_thingz(data, \"contract_id\")\n\n }\n\n\n\n /// Returns the linkdef values by deserializing them. This allows for lazy\n\n /// deserialization only when the type and contract ID match. This will normalize the value keys\n", "file_path": "service-applier/src/lib.rs", "rank": 9, "score": 38140.38119530276 }, { "content": "#[derive(Debug, Default, Actor, HealthResponder)]\n\n#[services(Actor, MessageSubscriber)]\n\nstruct ServiceApplierActor {}\n\n\n", "file_path": "service-applier/src/lib.rs", "rank": 10, "score": 37253.587902252584 }, { "content": "// main (via provider_main) initializes the threaded tokio executor,\n\n// listens to lattice rpcs, handles actor links,\n\n// and returns only when it receives a shutdown message\n\n//\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tracing_subscriber::fmt()\n\n .with_writer(std::io::stderr)\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .with_ansi(atty::is(atty::Stream::Stderr))\n\n .init();\n\n info!(\"Starting provider process\");\n\n provider_main(ApplierProvider::default())?;\n\n\n\n info!(\"Applier provider exiting\");\n\n Ok(())\n\n}\n\n\n\n/// applier capability provider implementation\n", "file_path": "applier/src/main.rs", "rank": 12, "score": 25493.144353358588 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n weld_codegen::rust_build(CONFIG)?;\n\n Ok(())\n\n}\n", "file_path": "interface/rust/build.rs", "rank": 13, "score": 25488.130255374646 }, { "content": "# Kubernetes applier\n\n\n\nThis repo contains the interface, provider, and basic service actor for a Kubernetes applier. This\n\nis mainly intended for use with those who need to connect existing Kubernetes services to services\n\nin wasmCloud. Please see each individual directory for more information\n", "file_path": "README.md", "rank": 14, "score": 16219.41620770366 }, { "content": "### Testing\n\n\n\nBefore running the test, you need to have a valid kubeconfig pointing at a running Kubernetes\n\ncluster (we recommend using [kind](https://kind.sigs.k8s.io/)).\n\n\n\nFor ease of testing, we use NATS in a docker image. The tests can be run manually by running `cargo\n\ntest --tests` if you wish to setup your own NATS server. Otherwise, you can just run `make test` to\n\nrun all tests.\n\n\n\n#### Troubleshooting\n\n\n\nFor maximum compatibility, we use rustls for the TLS stack. However, this can cause issues with\n\nkubeconfigs that contain a server IP address rather than a FQDN (such as those created by `kind`).\n\nIf you see an error about an unrecognized domain name, make sure the server entry in your kubeconfig\n\nis using a domain name (e.g. switching `127.0.0.1` to `localhost`).\n", "file_path": "applier/README.md", "rank": 15, "score": 15700.75457664632 }, { "content": "# Kubernetes Applier Capability Provider\n\n\n\nThis is a capability provider implementation of the `cosmonic:kubernetes_applier` contract. Its\n\npurpose is to take arbitrary manifests from an actor and do the equivalent of a `kubectl apply` to\n\ncreate the object.\n\n\n\n## Using the provider\n\n\n\nTODO: Put in the OCI reference here once we push\n\n\n\nThe only configuration required for linking to this provider is a valid kubeconfig. There are 3 ways\n\nof doing this:\n\n\n\n- The default (if no config is specified) will attempt to infer the kubeconfig from the default\n\n location (e.g. `$HOME/.kube/config`) or, if it is running in a pod, from the environment variables\n\n in the pod. This option is great for local testing and for running this provider within a host\n\n running in a pod.\n\n- The `config_b64` key: The value of this key should be the base64 encoded kubeconfig the provider\n\n should use. Please note that this kubeconfig should have all certs and tokens embedded within the\n\n kubeconfig (i.e. `client-certificate-data`). If any file paths are used, the link will be\n\n rejected.\n\n- The `config_file` key: A specific path where the kubeconfig should be loaded from. This option\n\n does allow file paths and is recommended when you have full control over the host and are storing\n\n the kubeconfig in a location other than the default\n\n\n\n## Contributing\n\n\n\nWe welcome all contributions! If you would like to submit changes, please open a [Pull\n\nRequest](https://github.com/cosmonic/kubernetes-applier/pulls) and one of the maintainers will\n\nreview it\n\n\n\n### Prerequisites\n\n\n\nIn order to build this module, you will need to have the following tools installed:\n\n\n\n- `make`\n\n- [`wash`](https://wasmcloud.dev/overview/installation/#install-wash)\n\n- `jq`\n\n\n\n### Building\n\n\n\nTo build the binary, simply run `make build`. To build and sign the provider for use with a\n\nwasmCloud host, run `make`\n\n\n", "file_path": "applier/README.md", "rank": 16, "score": 15697.530914440891 }, { "content": "# Interface for the Factorial service, wasmcloud:example:factorial\n\n\n\nThis is an interface for a simple service that calculates\n\nthe fatorial of a whole number.\n", "file_path": "interface/README.md", "rank": 17, "score": 15693.57639155914 }, { "content": "# Kubernetes Service Applier Actor\n\n\n\nThis actor is for use in Kubernetes <-> wasmCloud compatibility. It works by listening on the\n\nwasmCloud event messaging topic for new linkdefs between actors and `wasmcloud:httpserver`\n\ncontracts. When one of these appear a service will be created to point at the proper port exposed on\n\nthe httpserver. When the link is deleted, the service will be removed.\n\n\n\nThis is also meant to be used as a template for an actor when using `wash new actor` for those who\n\nwish to customize what resources are created on link definitions.\n\n\n\n## How to use\n\n\n\nIn order to use this actor, you'll need the NATS `wasmcloud:messaging` provider and the [Kubernetes\n\nApplier Provider](../applier):\n\n\n\n```console\n\n$ wash ctl start provider wasmcloud.azurecr.io/applier:0.2.0\n\n$ wash ctl start provider wasmcloud.azurecr.io/nats_messaging:0.11.5\n\n```\n\n\n\nOnce you have started the providers, you can start the actor:\n\n\n\n```console\n\n$ wash ctl start actor wasmcloud.azurecr.io/service_applier:0.2.0\n\n```\n\n\n\nThen you'll need to link the providers to the actor. For instructions on how you can configure the\n\napplier provider, see [its README](../applier/README.md). For the NATS provider, you will need to\n\nconnect to the same NATS cluster that your wasmcloud hosts are connected to. This can be specified\n\nwith the `URI` parameter (e.g. `URI=nats://localhost:4222`). You'll also need to set which\n\nsubscription to listen to: `SUBSCRIPTION=wasmbus.evt.<lattice-prefix>`, where lattice prefix is the\n\nsame prefix you specified for your hosts, by default, this is `default` (so your configuration would\n\nbe `SUBSCRIPTION=wasmbus.evt.default`);\n\n\n\nNOTE: All `Service`s will be created will be in the default namespace of the kubeconfig you use for\n\nthe link definition between this actor and the applier provider. However, this is often desired\n\nbehavior as you can run this actor on a host inside of Kubernetes, which means you can use service\n\naccount credentials. By default, these credentials are scoped to the namespace where the hosts are\n\nrunning, which is where the `Service` should be at anyway\n\n\n", "file_path": "service-applier/README.md", "rank": 18, "score": 15210.462666233234 }, { "content": "## See it in action\n\n\n\nThe easiest way to see this in action is to start the httpserver provider\n\n(wasmcloud.azurecr.io/httpserver:0.14.6) and the echo actor (wasmcloud.azurecr.io/echo:0.3.2) and\n\nthen link them (you can do this in washboard as well if you prefer a GUI):\n\n\n\n```console\n\n$ wash ctl link put MBCFOPM6JW2APJLXJD3Z5O4CN7CPYJ2B4FTKLJUR5YR5MITIU7HD3WD5 VAG3QITQQ2ODAOWB5TTQSDJ53XK3SHBEIFNK4AYJ5RKAX2UNSCAPHA5M wasmcloud:httpserver 'address=0.0.0.0:8081'\n\n⡃⠀ Defining link between MBCFOPM6JW2APJLXJD3Z5O4CN7CPYJ2B4FTKLJUR5YR5MITIU7HD3WD5 and VAG3QITQQ2ODAOWB5TTQSDJ53XK3SHBEIFNK4AYJ5RKAX2UNSCAPHA5M ... \n\nPublished link (MBCFOPM6JW2APJLXJD3Z5O4CN7CPYJ2B4FTKLJUR5YR5MITIU7HD3WD5) <-> (VAG3QITQQ2ODAOWB5TTQSDJ53XK3SHBEIFNK4AYJ5RKAX2UNSCAPHA5M) successfully\n\n```\n\n\n\nThen you can see that the service was created by running:\n\n\n\n```console\n\n$ kubectl get svc\n\nNAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n\nkubernetes ClusterIP 10.96.0.1 <none> 443/TCP 4d1h\n\nmbcfopm6jw2apjlxjd3z5o4cn7cpyj2b4ftkljur5yr5mitiu7hd3wd5 ClusterIP 10.96.170.75 <none> 8081/TCP 10s\n\n```\n\n\n\nThe service name is the lowercased actor ID, so you can easily identify which actor it is pointing\n\nat.\n", "file_path": "service-applier/README.md", "rank": 19, "score": 15206.657991303697 }, { "content": "### Requirements for Hosts running in Kubernetes\n\n\n\nIf you'd like your existing applications running in Kubernetes to be able to connect to applications\n\nrunning in wasmCloud, we recommend creating a \"routing tier\" of wasmCloud hosts. This means you will\n\nhave one `Deployment` of pods running wasmCloud hosts that are just for running actors and other\n\nproviders. You will then have a second `Deployment` of pods running wasmCloud hosts that all have\n\nthe HTTP server provider running on them. Each of these pods should have the label and value\n\n`wasmcloud.dev/route-to=true` on them in order to have traffic routed to them. Essentially, the\n\n`Service`s created by this actor direct traffic to those HTTP servers, all of which will have the\n\nport you configured in your link definition available. Once the traffic has hit those HTTP servers,\n\nit will be transmitted to actors running in the lattice, whether those are running inside or outside\n\nof Kubernetes. A simple diagram is below:\n\n\n\n```\n\n┌──────────────────────────────────┐ ┌─────────┐\n\n│ Kubernetes │ │ │ Other\n\n│ │ │ │ ┌────┐\n\n│ ┌────────┐ │ │ │ │ │\n\n│ │Service │ │ │ ├─► │\n\n│ │ │ │ │ │ └────┘\n\n│ ┌──────┴┬───────┼───────┐ │ │ │\n\n│ │ │ │ │ │ │ │ ┌────┐\n\n│ ┌──▼─┐ ┌──▼─┐ ┌──▼─┐ ┌──▼─┐ │ │ │ │ │\n\n│ │ │ │ │ │ │ │ │ │ │ ├─► │\n\n│ │ │ │ │ │ │ │ │ │ │ │ └────┘\n\n│ └─┬──┘ └────┘ └────┤ └──┬─┘ │ │ │\n\n│ │ Router Hosts │ │ │ │ │ ┌────┐\n\n│ │ │ └─────┴────┼─► Lattice │ │ │\n\n│ │ │ │ │ ├─► │\n\n│ └──────►└─────────────────────► │ │ └────┘\n\n│ │ │ │\n\n│ ┌────┐ ┌────┐ ┌────┐ ┌────┐ │ │ │ ┌────┐\n\n│ │ │ │ │ │ │ │ │ │ │ │ │ │\n\n│ │ │ │ │ │ │ │ ◄──┼─┤ ├─► │\n\n│ └────┘ └────┘ └────┘ └────┘ │ │ │ └────┘\n\n│ Normal Hosts │ │ │\n\n│ │ │ │ ┌────┐\n\n│ │ │ │ │ │\n\n│ │ │ ├─► │\n\n│ │ │ │ └────┘\n\n└──────────────────────────────────┘ └─────────┘ Hosts\n\n```\n\n\n", "file_path": "service-applier/README.md", "rank": 20, "score": 15203.996012972399 }, { "content": " })?\n\n .as_str();\n\n\n\n tracing::span::Span::current().record(\"object_name\", &tracing::field::display(obj_name));\n\n\n\n let type_data = object.types.as_ref().ok_or_else(|| {\n\n RpcError::InvalidParameter(\n\n \"The given manifest does not contain type information\".to_string(),\n\n )\n\n })?;\n\n // Decompose api_version into the parts we need to type the request\n\n let (group, version) = match type_data.api_version.split_once('/') {\n\n Some((g, v)) => (g.to_owned(), v.to_owned()),\n\n None => (String::new(), type_data.api_version.to_owned()),\n\n };\n\n let gvk = GroupVersionKind {\n\n group,\n\n version,\n\n kind: type_data.kind.clone(),\n\n };\n", "file_path": "applier/src/main.rs", "rank": 28, "score": 19.526795082282597 }, { "content": " })\n\n }\n\n\n\n #[instrument(level = \"debug\", skip(self, ctx), fields(actor_id = ?ctx.actor))]\n\n async fn delete(&self, ctx: &Context, arg: &DeleteRequest) -> RpcResult<OperationResponse> {\n\n let client = self.get_client(ctx).await?;\n\n\n\n let resource = ApiResource::from_gvk(&GroupVersionKind {\n\n group: arg.group.clone(),\n\n version: arg.version.clone(),\n\n kind: arg.kind.clone(),\n\n });\n\n\n\n let api: Api<DynamicObject> = if let Some(ns) = arg.namespace.as_ref() {\n\n Api::namespaced_with(client, ns.as_str(), &resource)\n\n } else {\n\n Api::default_namespaced_with(client, &resource)\n\n };\n\n debug!(\"Attempting to delete object\");\n\n match api\n", "file_path": "applier/src/main.rs", "rank": 29, "score": 17.087223571798756 }, { "content": " kind: Service::KIND.to_owned(),\n\n version: Service::VERSION.to_owned(),\n\n name: svc_name,\n\n namespace: None,\n\n },\n\n )\n\n .await?;\n\n\n\n if !resp.succeeded {\n\n return Err(RpcError::ActorHandler(format!(\n\n \"Unable to delete kubernetes service: {}\",\n\n resp.error.unwrap_or_default()\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "service-applier/src/lib.rs", "rank": 31, "score": 16.33617073004221 }, { "content": " .expect(\"foo label doesn't exist\"),\n\n \"happy\",\n\n \"Label value should be set correctly\"\n\n );\n\n\n\n let resp = client\n\n .delete(\n\n &ctx,\n\n &DeleteRequest {\n\n group: String::new(),\n\n kind: \"Service\".into(),\n\n version: \"v1\".into(),\n\n name: svc_name.into(),\n\n ..Default::default()\n\n },\n\n )\n\n .await?;\n\n assert!(resp.succeeded, \"Delete should have succeeded\");\n\n if api.get(svc_name).await.is_ok() {\n\n panic!(\"Service {} should be deleted\", svc_name)\n", "file_path": "applier/tests/applier_test.rs", "rank": 32, "score": 15.53381483341123 }, { "content": "use std::{\n\n collections::{BTreeMap, HashMap},\n\n net::SocketAddr,\n\n};\n\n\n\nuse k8s_openapi::{\n\n api::core::v1::{Service, ServicePort, ServiceSpec},\n\n apimachinery::pkg::{apis::meta::v1::ObjectMeta, util::intstr::IntOrString},\n\n Resource,\n\n};\n\nuse kubernetes_applier_interface::{DeleteRequest, KubernetesApplier, KubernetesApplierSender};\n\nuse wasmbus_rpc::{actor::prelude::*, core::LinkDefinition};\n\nuse wasmcloud_interface_logging::debug;\n\nuse wasmcloud_interface_messaging::{MessageSubscriber, MessageSubscriberReceiver, SubMessage};\n\n\n\nconst LINKDEF_SET_EVENT_TYPE: &str = \"com.wasmcloud.lattice.linkdef_set\";\n\nconst LINKDEF_DELETED_EVENT_TYPE: &str = \"com.wasmcloud.lattice.linkdef_deleted\";\n\nconst EXPECTED_CONTRACT_ID: &str = \"wasmcloud:httpserver\";\n\n\n\nconst DATA_KEY: &str = \"data\";\n\nconst ADDRESS_KEY: &str = \"address\";\n\nconst PORT_KEY: &str = \"port\";\n\nconst LABEL_PREFIX: &str = \"wasmcloud.dev\";\n\n\n\n#[derive(Debug, Default, Actor, HealthResponder)]\n\n#[services(Actor, MessageSubscriber)]\n", "file_path": "service-applier/src/lib.rs", "rank": 33, "score": 14.843523765147967 }, { "content": " tokio::time::sleep(std::time::Duration::from_secs(3)).await;\n\n\n\n let actor_id = prov.origin().public_key();\n\n // create client and ctx\n\n let client = KubernetesApplierSender::via(prov);\n\n let ctx = Context {\n\n actor: Some(actor_id),\n\n ..Default::default()\n\n };\n\n\n\n let resp = client\n\n .delete(\n\n &ctx,\n\n &DeleteRequest {\n\n group: String::new(),\n\n kind: \"Service\".into(),\n\n version: \"v1\".into(),\n\n name: svc_name.into(),\n\n ..Default::default()\n\n },\n\n )\n\n .await?;\n\n assert!(resp.succeeded, \"Delete should have succeeded\");\n\n\n\n Ok(())\n\n}\n", "file_path": "applier/tests/applier_test.rs", "rank": 34, "score": 14.549795085645094 }, { "content": " .delete(arg.name.as_str(), &DeleteParams::default())\n\n .await\n\n {\n\n // If it is ok or returns not found, that means we are ok\n\n Ok(_) => Ok(OperationResponse {\n\n succeeded: true,\n\n error: None,\n\n }),\n\n Err(kube::Error::Api(e)) if e.code == 404 => Ok(OperationResponse {\n\n succeeded: true,\n\n error: None,\n\n }),\n\n Err(e) => Ok(OperationResponse {\n\n succeeded: false,\n\n error: Some(e.to_string()),\n\n }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "applier/src/main.rs", "rank": 36, "score": 13.97034410613178 }, { "content": " async fn delete_link(&self, actor_id: &str) {\n\n self.clients.write().await.remove(actor_id);\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl KubernetesApplier for ApplierProvider {\n\n #[instrument(level = \"debug\", skip(self, ctx, arg), fields(actor_id = ?ctx.actor, object_name = tracing::field::Empty))]\n\n async fn apply(&self, ctx: &Context, arg: &Vec<u8>) -> RpcResult<OperationResponse> {\n\n trace!(body_len = arg.len(), \"Decoding object for apply\");\n\n let object: DynamicObject = serde_yaml::from_slice(arg).map_err(|e| {\n\n RpcError::InvalidParameter(format!(\"Unable to parse data as kubernetes object: {}\", e))\n\n })?;\n\n\n\n let obj_name = object\n\n .metadata\n\n .name\n\n .as_ref()\n\n .ok_or_else(|| {\n\n RpcError::InvalidParameter(\"The given object is missing a name\".to_string())\n", "file_path": "applier/src/main.rs", "rank": 37, "score": 13.944944719808777 }, { "content": " let hc = prov.health_check().await;\n\n check!(hc.is_ok())?;\n\n Ok(())\n\n}\n\n\n\nconst VALID_MANIFEST: &str = r#\"apiVersion: v1\n\nkind: Service\n\nmetadata:\n\n name: foo-applier-test-happy\n\n labels:\n\n wasmcloud.dev/test: \"true\"\n\nspec:\n\n selector:\n\n app.kubernetes.io/name: foo-applier\n\n ports:\n\n - protocol: TCP\n\n port: 8080\n\n targetPort: 8080\"#;\n\n\n\nconst VALID_MANIFEST_WITH_LABELS: &str = r#\"apiVersion: v1\n", "file_path": "applier/tests/applier_test.rs", "rank": 39, "score": 12.998416366197155 }, { "content": " let resource = ApiResource::from_gvk(&gvk);\n\n\n\n trace!(?gvk, \"Inferred object type from data\");\n\n\n\n let client = self.get_client(ctx).await?;\n\n\n\n let api: Api<DynamicObject> = if let Some(ns) = object.metadata.namespace.as_ref() {\n\n Api::namespaced_with(client, ns.as_str(), &resource)\n\n } else {\n\n Api::default_namespaced_with(client, &resource)\n\n };\n\n\n\n debug!(\"Attempting to apply object to api\");\n\n\n\n trace!(\"Checking if object already exists\");\n\n let exists = match api.get(obj_name).await {\n\n Ok(_) => true,\n\n Err(kube::Error::Api(e)) if e.code == 404 => false,\n\n // TODO: retries in case of flakiness?\n\n Err(e) => {\n", "file_path": "applier/src/main.rs", "rank": 40, "score": 12.138918559704464 }, { "content": " api.create(\n\n &PostParams {\n\n field_manager: Some(FIELD_MANAGER.to_string()),\n\n ..Default::default()\n\n },\n\n &object,\n\n )\n\n .await\n\n };\n\n\n\n if let Err(e) = resp {\n\n return Ok(OperationResponse {\n\n succeeded: false,\n\n error: Some(e.to_string()),\n\n });\n\n }\n\n\n\n Ok(OperationResponse {\n\n succeeded: true,\n\n error: None,\n", "file_path": "applier/src/main.rs", "rank": 41, "score": 11.860116284523908 }, { "content": " // We can't put in the full contract ID because it contains `:`, which isn't allowed in k8s\n\n labels.insert(\n\n format!(\"{}/{}\", LABEL_PREFIX, \"contract\"),\n\n // SAFETY: We can unwrap because the contract ID is something we own and we know it has a `:`\n\n EXPECTED_CONTRACT_ID.rsplit_once(':').unwrap().1.to_owned(),\n\n );\n\n labels.insert(format!(\"{}/{}\", LABEL_PREFIX, \"link-name\"), ld.link_name);\n\n labels.insert(\n\n format!(\"{}/{}\", LABEL_PREFIX, \"provider-id\"),\n\n ld.provider_id,\n\n );\n\n\n\n let mut selector = BTreeMap::new();\n\n // Select pods that have a label of `wasmcloud.dev/route-to=true`\n\n selector.insert(\n\n format!(\"{}/{}\", LABEL_PREFIX, \"route-to\"),\n\n \"true\".to_string(),\n\n );\n\n\n\n debug!(\n", "file_path": "service-applier/src/lib.rs", "rank": 42, "score": 11.738925355749217 }, { "content": "\n\n Ok(())\n\n}\n\n\n\nconst INVALID_UPDATE_MANIFEST: &str = r#\"apiVersion: v1\n\nkind: Service\n\nmetadata:\n\n name: foo-applier-test-happy\n\n labels:\n\n wasmcloud.dev/test: \"true\"\n\nspec:\n\n selector:\n\n app.kubernetes.io/name: foo-applier\n\n ports:\n\n - protocol: TCP\n\n port: 8080\n\n targetPort: 8080\n\n totallyNotValid: bar\"#;\n\n\n\n/// Test that an invalid update fails\n", "file_path": "applier/tests/applier_test.rs", "rank": 43, "score": 11.660386232891316 }, { "content": " }\n\n LINKDEF_DELETED_EVENT_TYPE if evt.contract_id()? == EXPECTED_CONTRACT_ID => {\n\n debug!(\"Link definition for HTTP server deleted\");\n\n handle_delete(ctx, evt.linkdef()?).await\n\n }\n\n _ => {\n\n debug!(\"Skipping non-linkdef event {}\", event_type);\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\nasync fn handle_apply(ctx: &Context, ld: LinkDefinition) -> RpcResult<()> {\n\n let sender = KubernetesApplierSender::new();\n\n let port = get_port(ld.values)?;\n\n let svc_name = ld.actor_id.to_lowercase();\n\n\n\n let mut labels = BTreeMap::new();\n\n labels.insert(format!(\"{}/{}\", LABEL_PREFIX, \"actor-id\"), ld.actor_id);\n", "file_path": "service-applier/src/lib.rs", "rank": 45, "score": 10.899521014693345 }, { "content": " }\n\n Ok(())\n\n}\n\n\n\n// TODO: Test base64 config and file path config once https://github.com/wasmCloud/wasmcloud-test/issues/6 is fixed\n\n\n\nconst INVALID_MANIFEST: &str = r#\"apiVersion: v1\n\nkind: NotReal\n\nmetadata:\n\n name: foo-applier-test-invalid\n\n labels:\n\n wasmcloud.dev/test: \"true\"\n\nspec:\n\n selector:\n\n app.kubernetes.io/name: foo-applier\n\n ports:\n\n - protocol: TCP\n\n port: 8080\n\n targetPort: 8080\n\n totallyNotValid: bar\"#;\n", "file_path": "applier/tests/applier_test.rs", "rank": 46, "score": 10.637196424622786 }, { "content": "//! Kubernetes applier capability provider\n\n//!\n\n//!\n\nuse kube::{\n\n api::{DeleteParams, DynamicObject, PatchParams, PostParams},\n\n config::{KubeConfigOptions, Kubeconfig},\n\n core::{params::Patch, ApiResource, GroupVersionKind},\n\n Api, Client, Config,\n\n};\n\nuse kubernetes_applier_interface::{\n\n DeleteRequest, KubernetesApplier, KubernetesApplierReceiver, OperationResponse,\n\n};\n\nuse tokio::sync::RwLock;\n\nuse tracing::{debug, info, instrument, trace};\n\nuse wasmbus_rpc::provider::prelude::*;\n\n\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\n\n\n/// Loading a kubeconfig from a file\n", "file_path": "applier/src/main.rs", "rank": 47, "score": 10.62527030122915 }, { "content": " )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn handle_delete(ctx: &Context, ld: LinkDefinition) -> RpcResult<()> {\n\n let sender = KubernetesApplierSender::new();\n\n let svc_name = ld.actor_id.to_lowercase();\n\n\n\n debug!(\n\n \"Deleting Kubernetes service with name {} from related linkdef {}-{}-{}\",\n\n svc_name, ld.actor_id, ld.provider_id, ld.link_name\n\n );\n\n\n\n let resp = sender\n\n .delete(\n\n ctx,\n\n &DeleteRequest {\n\n group: Service::GROUP.to_owned(),\n", "file_path": "service-applier/src/lib.rs", "rank": 48, "score": 10.4840859277782 }, { "content": " \"Applying new kubernetes resource with name {}, listening on port {}, with labels {:?}, and selecting pods with labels matching {:?}\",\n\n svc_name,\n\n port,\n\n labels,\n\n selector\n\n );\n\n\n\n // NOTE: If you have more than one type of contract you are handling, you'll likely want to have\n\n // some sort of data store that maps a unique service name to the full link definition. For\n\n // here, you can only have one linkdef of this type for an actor, so we just use the lowercased\n\n // actor key\n\n let resp = sender\n\n .apply(\n\n ctx,\n\n &serde_yaml::to_vec(&Service {\n\n metadata: ObjectMeta {\n\n name: Some(svc_name),\n\n labels: Some(labels),\n\n ..Default::default()\n\n },\n", "file_path": "service-applier/src/lib.rs", "rank": 51, "score": 9.303621263618101 }, { "content": " spec: Some(ServiceSpec {\n\n selector: Some(selector),\n\n ports: Some(vec![ServicePort {\n\n protocol: Some(\"TCP\".to_string()),\n\n port,\n\n target_port: Some(IntOrString::Int(port)),\n\n ..Default::default()\n\n }]),\n\n ..Default::default()\n\n }),\n\n ..Default::default()\n\n })\n\n .expect(\"Unable to serialize Service to yaml. This is programmer error\"),\n\n )\n\n .await?;\n\n\n\n if !resp.succeeded {\n\n return Err(RpcError::ActorHandler(format!(\n\n \"Unable to apply kubernetes service: {}\",\n\n resp.error.unwrap_or_default()\n", "file_path": "service-applier/src/lib.rs", "rank": 52, "score": 8.97117602125688 }, { "content": " RpcError::ProviderInit(format!(\n\n \"Unable to create client from loaded kubeconfig: {}\",\n\n e\n\n ))\n\n })?;\n\n\n\n // NOTE: In the future, we may want to improve this with a retry\n\n client.apiserver_version().await.map_err(|e| {\n\n RpcError::ProviderInit(format!(\n\n \"Unable to connect to the Kubernetes API server: {}\",\n\n e\n\n ))\n\n })?;\n\n tracing::trace!(\"Successfully connected to server\");\n\n\n\n let mut clients = self.clients.write().await;\n\n clients.insert(ld.actor_id.clone(), client);\n\n Ok(true)\n\n }\n\n\n", "file_path": "applier/src/main.rs", "rank": 53, "score": 8.672158523979917 }, { "content": " /// to lowercase\n\n fn linkdef(&self) -> RpcResult<LinkDefinition> {\n\n let value = self.raw.get(DATA_KEY).ok_or_else(|| {\n\n RpcError::InvalidParameter(format!(\"Event does not have key {}\", DATA_KEY))\n\n })?;\n\n\n\n let mut ld: LinkDefinition =\n\n serde_json::from_value(value.to_owned()).map_err(|e| RpcError::Deser(e.to_string()))?;\n\n\n\n ld.values = ld\n\n .values\n\n .into_iter()\n\n .map(|(k, v)| (k.to_lowercase(), v))\n\n .collect();\n\n\n\n Ok(ld)\n\n }\n\n}\n\n\n", "file_path": "service-applier/src/lib.rs", "rank": 54, "score": 8.357484130594113 }, { "content": " let path = p.to_owned();\n\n debug!(%path, \"Loading kubeconfig from file\");\n\n let conf = tokio::task::spawn_blocking(move || Kubeconfig::read_from(path))\n\n .await\n\n .map_err(|e| {\n\n RpcError::ProviderInit(format!(\n\n \"Internal error occured while loading kubeconfig: {}\",\n\n e\n\n ))\n\n })?\n\n .map_err(|e| format!(\"Invalid kubeconfig from file {}: {}\", p, e))?;\n\n Config::from_custom_kubeconfig(conf, &KubeConfigOptions::default())\n\n .await\n\n .map_err(|e| {\n\n RpcError::ProviderInit(format!(\"Invalid kubeconfig from file {}: {}\", p, e))\n\n })?\n\n } else if let Some(raw) = values.get(CONFIG_B64_KEY) {\n\n debug!(\"Loading config from base64 encoded string\");\n\n let decoded = base64::decode(raw).map_err(|e| {\n\n RpcError::ProviderInit(format!(\"Invalid base64 config given: {}\", e))\n", "file_path": "applier/src/main.rs", "rank": 55, "score": 8.21809681211076 }, { "content": "impl ApplierProvider {\n\n async fn get_client(&self, ctx: &Context) -> RpcResult<Client> {\n\n let actor_id = ctx.actor.as_ref().ok_or_else(|| {\n\n RpcError::InvalidParameter(\"Actor ID does not exist on request\".to_string())\n\n })?;\n\n Ok(self\n\n .clients\n\n .read()\n\n .await\n\n .get(actor_id.as_str())\n\n .ok_or_else(|| {\n\n RpcError::InvalidParameter(format!(\"No link registered for actor {}\", actor_id))\n\n })?\n\n .clone())\n\n }\n\n}\n\n\n", "file_path": "applier/src/main.rs", "rank": 56, "score": 7.850563695630269 }, { "content": " let ctx = Context {\n\n actor: Some(actor_id),\n\n ..Default::default()\n\n };\n\n\n\n let resp = client\n\n .apply(&ctx, &INVALID_UPDATE_MANIFEST.as_bytes().to_vec())\n\n .await?;\n\n assert!(!resp.succeeded, \"Update should not have succeeded\");\n\n assert!(resp.error.is_some(), \"Error message should be set\");\n\n\n\n Ok(())\n\n}\n\n\n\n/// Test that a non-existent delete succeeds\n\nasync fn nonexistent_delete(_opt: &TestOptions) -> RpcResult<()> {\n\n let prov = test_provider().await;\n\n let svc_name = \"foo-applier-test-noexist\";\n\n\n\n // The test scaffolding doesn't wait for an ack from the link, so wait for a bit\n", "file_path": "applier/tests/applier_test.rs", "rank": 57, "score": 7.847074028833733 }, { "content": "kind: Service\n\nmetadata:\n\n name: foo-applier-test-happy\n\n labels:\n\n wasmcloud.dev/test: \"true\"\n\n foo: happy\n\nspec:\n\n selector:\n\n app.kubernetes.io/name: foo-applier\n\n ports:\n\n - protocol: TCP\n\n port: 8080\n\n targetPort: 8080\"#;\n\n\n\n/// Test the happy path of creating updating and deleting\n\nasync fn create_update_delete_happy_path(_opt: &TestOptions) -> RpcResult<()> {\n\n let prov = test_provider().await;\n\n let svc_name = \"foo-applier-test-happy\";\n\n\n\n let client = kube::Client::try_default()\n", "file_path": "applier/tests/applier_test.rs", "rank": 58, "score": 7.3229421065051765 }, { "content": " return Ok(OperationResponse {\n\n succeeded: false,\n\n error: Some(format!(\"Unable to fetch object from API: {}\", e)),\n\n })\n\n }\n\n };\n\n\n\n let resp = if exists {\n\n trace!(\"Object already exists, attempting server-side apply\");\n\n api.patch(\n\n obj_name,\n\n &PatchParams {\n\n field_manager: Some(FIELD_MANAGER.to_string()),\n\n ..Default::default()\n\n },\n\n &Patch::Apply(&object),\n\n )\n\n .await\n\n } else {\n\n trace!(\"Object does not exist, creating\");\n", "file_path": "applier/src/main.rs", "rank": 59, "score": 7.237977343710764 }, { "content": " .await\n\n .expect(\"Unable to get client\");\n\n let api: Api<Service> = Api::default_namespaced(client);\n\n\n\n // The test scaffolding doesn't wait for an ack from the link, so wait for a bit\n\n tokio::time::sleep(std::time::Duration::from_secs(3)).await;\n\n\n\n let actor_id = prov.origin().public_key();\n\n // create client and ctx\n\n let client = KubernetesApplierSender::via(prov);\n\n let ctx = Context {\n\n actor: Some(actor_id),\n\n ..Default::default()\n\n };\n\n\n\n let resp = client\n\n .apply(&ctx, &VALID_MANIFEST.as_bytes().to_vec())\n\n .await?;\n\n assert!(resp.succeeded, \"Create should have succeeded\");\n\n\n", "file_path": "applier/tests/applier_test.rs", "rank": 60, "score": 6.334501341818283 }, { "content": " // Validate service exists\n\n api.get(svc_name)\n\n .await\n\n .unwrap_or_else(|_| panic!(\"Service {} does not exist\", svc_name));\n\n\n\n let resp = client\n\n .apply(&ctx, &VALID_MANIFEST_WITH_LABELS.as_bytes().to_vec())\n\n .await?;\n\n assert!(resp.succeeded, \"Update should have succeeded\");\n\n\n\n let svc = api\n\n .get(svc_name)\n\n .await\n\n .unwrap_or_else(|_| panic!(\"Service {} does not exist\", svc_name));\n\n\n\n assert_eq!(\n\n svc.metadata\n\n .labels\n\n .expect(\"Should have labels present\")\n\n .get(\"foo\")\n", "file_path": "applier/tests/applier_test.rs", "rank": 62, "score": 6.256080737761538 }, { "content": "async fn invalid_update(_opt: &TestOptions) -> RpcResult<()> {\n\n let prov = test_provider().await;\n\n\n\n let client = kube::Client::try_default()\n\n .await\n\n .expect(\"Unable to get client\");\n\n let api: Api<Service> = Api::default_namespaced(client);\n\n\n\n let valid: Service = serde_yaml::from_str(VALID_MANIFEST).unwrap();\n\n // Create a good service first\n\n api.create(&PostParams::default(), &valid)\n\n .await\n\n .expect(\"Should be able to create valid service\");\n\n\n\n // The test scaffolding doesn't wait for an ack from the link, so wait for a bit\n\n tokio::time::sleep(std::time::Duration::from_secs(3)).await;\n\n\n\n let actor_id = prov.origin().public_key();\n\n // create client and ctx\n\n let client = KubernetesApplierSender::via(prov);\n", "file_path": "applier/tests/applier_test.rs", "rank": 63, "score": 6.16409160319421 }, { "content": "const CONFIG_FILE_KEY: &str = \"config_file\";\n\n/// Passing a kubeconfig as a base64 encoding string. This config should contain embedded\n\n/// certificates rather than paths to certificates\n\nconst CONFIG_B64_KEY: &str = \"config_b64\";\n\n\n\nconst CERT_PATH_ERROR: &str =\n\n \"Certificate and key paths are not allowed for base64 encoded configs. Offending entry:\";\n\nconst FIELD_MANAGER: &str = \"kubernetes-applier-provider\";\n\n\n\n// main (via provider_main) initializes the threaded tokio executor,\n\n// listens to lattice rpcs, handles actor links,\n\n// and returns only when it receives a shutdown message\n\n//\n", "file_path": "applier/src/main.rs", "rank": 64, "score": 6.021811596129151 }, { "content": " &user.name,\n\n )?;\n\n ensure_no_path(&user.auth_info.client_key, \"client_key\", &user.name)?;\n\n ensure_no_path(&user.auth_info.token_file, \"token_file\", &user.name)?;\n\n }\n\n Config::from_custom_kubeconfig(conf, &KubeConfigOptions::default())\n\n .await\n\n .map_err(|e| {\n\n RpcError::ProviderInit(format!(\"Invalid kubeconfig from base64: {}\", e))\n\n })?\n\n } else {\n\n debug!(\"No config given, inferring config from environment\");\n\n // If no config was manually specified we try to infer it from local pod variables or\n\n // the default kubeconfig path\n\n Config::infer().await.map_err(|e| RpcError::ProviderInit(format!(\"No config given and unable to infer config from environment or default config file: {}\", e)))?\n\n };\n\n\n\n tracing::trace!(?config, \"Attempting to create client and connect to server\");\n\n // Now create the client and make sure it works\n\n let client = Client::try_from(config).map_err(|e| {\n", "file_path": "applier/src/main.rs", "rank": 65, "score": 5.720717963102567 }, { "content": "//! The Rust interface for interacting with a kubernetes applier\n\n\n\n#[allow(clippy::ptr_arg)]\n\nmod kubernetes_applier;\n\npub use kubernetes_applier::*;\n", "file_path": "interface/rust/src/lib.rs", "rank": 66, "score": 5.164826101131759 }, { "content": "\n\n/// Test that an invalid create fails\n\nasync fn invalid_create(_opt: &TestOptions) -> RpcResult<()> {\n\n let prov = test_provider().await;\n\n // The test scaffolding doesn't wait for an ack from the link, so wait for a bit\n\n tokio::time::sleep(std::time::Duration::from_secs(3)).await;\n\n\n\n let actor_id = prov.origin().public_key();\n\n // create client and ctx\n\n let client = KubernetesApplierSender::via(prov);\n\n let ctx = Context {\n\n actor: Some(actor_id),\n\n ..Default::default()\n\n };\n\n\n\n let resp = client\n\n .apply(&ctx, &INVALID_MANIFEST.as_bytes().to_vec())\n\n .await?;\n\n assert!(!resp.succeeded, \"Create should not have succeeded\");\n\n assert!(resp.error.is_some(), \"Error message should be set\");\n", "file_path": "applier/tests/applier_test.rs", "rank": 67, "score": 4.370488055633656 }, { "content": " })?;\n\n // NOTE: We do not support multiple yaml documents in the same file. We shouldn't need\n\n // this, but if we do, we can borrow some of the logic from the `kube` crate\n\n let conf: Kubeconfig = serde_yaml::from_slice(&decoded).map_err(|e| {\n\n RpcError::ProviderInit(format!(\"Invalid kubeconfig data given: {}\", e))\n\n })?;\n\n // Security: check that cert paths are not set as they could access certs on the host\n\n // runtime\n\n trace!(\"Ensuring base64 encoded config does not contain paths\");\n\n for cluster in conf.clusters.iter() {\n\n ensure_no_path(\n\n &cluster.cluster.certificate_authority,\n\n \"cluster\",\n\n &cluster.name,\n\n )?;\n\n }\n\n for user in conf.auth_infos.iter() {\n\n ensure_no_path(\n\n &user.auth_info.client_certificate,\n\n \"client_certificate\",\n", "file_path": "applier/src/main.rs", "rank": 68, "score": 4.293072176961507 }, { "content": " invalid_create,\n\n invalid_update,\n\n nonexistent_delete\n\n );\n\n print_test_results(&res);\n\n\n\n let passed = res.iter().filter(|tr| tr.passed).count();\n\n let total = res.len();\n\n assert_eq!(passed, total, \"{} passed out of {}\", passed, total);\n\n\n\n // try to let the provider shut down gracefully\n\n let provider = test_provider().await;\n\n let _ = provider.shutdown().await;\n\n}\n\n\n\n/// test that health check returns healthy\n\nasync fn health_check(_opt: &TestOptions) -> RpcResult<()> {\n\n let prov = test_provider().await;\n\n\n\n // health check\n", "file_path": "applier/tests/applier_test.rs", "rank": 70, "score": 3.4280627998804003 }, { "content": "use k8s_openapi::api::core::v1::Service;\n\nuse kube::{api::PostParams, Api};\n\nuse kubernetes_applier_interface::*;\n\nuse wasmbus_rpc::{error::RpcError, provider::prelude::*};\n\nuse wasmcloud_test_util::{\n\n check,\n\n cli::print_test_results,\n\n provider_test::test_provider,\n\n testing::{TestOptions, TestResult},\n\n};\n\n#[allow(unused_imports)]\n\nuse wasmcloud_test_util::{run_selected, run_selected_spawn};\n\n\n\n#[tokio::test]\n\nasync fn run_all() {\n\n let opts = TestOptions::default();\n\n let res = run_selected_spawn!(\n\n &opts,\n\n health_check,\n\n create_update_delete_happy_path,\n", "file_path": "applier/tests/applier_test.rs", "rank": 71, "score": 3.147350976138452 }, { "content": "// build.rs - build smithy models into rust sources at compile tile\n\n\n\n// path to codegen.toml relative to location of Cargo.toml\n\nconst CONFIG: &str = \"../codegen.toml\";\n\n\n", "file_path": "interface/rust/build.rs", "rank": 72, "score": 2.3106926597576947 } ]
Rust
src/command.rs
mwyrebski/drawerrs
1fc30e533f6af3e8435b5e763149d5c4643a77d0
use crate::canvas::Point; #[derive(PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] pub enum Command { Line { from: Point, to: Point }, Rectangle { p1: Point, p2: Point }, Circle { p: Point, r: usize }, Canvas { width: usize, height: usize }, Char(char), Read(String), Save(String), Info, Show, Quit, } fn try_parse_usize(s: &str) -> Result<usize, String> { s.parse::<usize>().map_err(|e| e.to_string()) } impl Command { pub fn from(input: String) -> Result<Command, String> { let split: Vec<String> = input .trim() .split_whitespace() .enumerate() .map(|(i, s)| { if i == 0 { s.to_uppercase() } else { s.to_string() } }) .collect(); let split: Vec<&str> = split.iter().map(String::as_ref).collect(); if split.is_empty() { return Err("empty input".to_string()); } let cmd = match split.as_slice() { ["LINE", x1, y1, x2, y2] => Command::Line { from: Point(try_parse_usize(x1)?, try_parse_usize(y1)?), to: Point(try_parse_usize(x2)?, try_parse_usize(y2)?), }, ["RECT", x1, y1, x2, y2] | ["RECTANGLE", x1, y1, x2, y2] => Command::Rectangle { p1: Point(try_parse_usize(x1)?, try_parse_usize(y1)?), p2: Point(try_parse_usize(x2)?, try_parse_usize(y2)?), }, ["CIRC", x, y, r] | ["CIRCLE", x, y, r] => Command::Circle { p: Point(try_parse_usize(x)?, try_parse_usize(y)?), r: try_parse_usize(r)?, }, ["CANV", width, height] | ["CANVAS", width, height] => Command::Canvas { width: try_parse_usize(width)?, height: try_parse_usize(height)?, }, ["CHAR", ch] => Command::Char(ch.parse::<char>().map_err(|e| e.to_string())?), ["READ", filename] => Command::Read(filename.to_string()), ["SAVE", filename] => Command::Save(filename.to_string()), ["INFO"] => Command::Info, ["SHOW"] => Command::Show, ["QUIT"] => Command::Quit, _ => return Err("unknown command".to_string()), }; Ok(cmd) } } #[cfg(test)] mod tests { use super::*; fn command_variations(input: &str) -> Vec<String> { let capitalize = |s: &str| { let mut c = s.chars(); c.next().unwrap().to_uppercase().chain(c).collect() }; let mut vec: Vec<String> = Vec::new(); for s in &[ input.to_ascii_lowercase(), input.to_ascii_uppercase(), capitalize(input), ] { vec.push(s.clone()); vec.push(format!(" {} ", s.clone())); vec.push(format!("\t{}\t\n", s.clone())); vec.push(s.split_whitespace().collect::<Vec<&str>>().join("\t")); } vec } #[test] fn line_is_parsed() { let expected = Command::Line { from: Point(1, 2), to: Point(3, 4), }; for input in command_variations("line 1 2 3 4") { let cmd = Command::from(input).unwrap(); assert_eq!(expected, cmd); } } #[test] fn rect_is_parsed() { let expected = Command::Rectangle { p1: Point(1, 2), p2: Point(3, 4), }; for input in [ &command_variations("rect 1 2 3 4")[..], &command_variations("rectangle 1 2 3 4")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn circle_is_parsed() { let expected = Command::Circle { p: Point(1, 2), r: 3, }; for input in [ &command_variations("circ 1 2 3")[..], &command_variations("circle 1 2 3")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn canvas_is_parsed() { let expected = Command::Canvas { width: 100, height: 200, }; for input in [ &command_variations("canv 100 200")[..], &command_variations("canvas 100 200")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn char_is_parsed() { let expected = Command::Char('*'); for input in command_variations("char *") { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn read_is_parsed() { for input in &[ ("file_name", "read\tfile_name "), ("FileName", " READ FileName"), ("File_NAME", "Read \tFile_NAME \n"), ] { let expected = Command::Read(input.0.to_string()); let cmd = Command::from(input.1.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn save_is_parsed() { for input in &[ ("file_name", "save\tfile_name "), ("FileName", " SAVE FileName"), ("File_NAME", "Save \tFile_NAME \n"), ] { let expected = Command::Save(input.0.to_string()); let cmd = Command::from(input.1.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn info_is_parsed() { for input in command_variations("info") { let expected = Command::Info; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn show_is_parsed() { for input in command_variations("show") { let expected = Command::Show; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn quit_is_parsed() { for input in command_variations("quit") { let expected = Command::Quit; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } }
use crate::canvas::Point; #[derive(PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] pub enum Command { Line { from: Point, to: Point }, Rectangle { p1: Point, p2: Point }, Circle { p: Point, r: usize }, Canvas { width: usize, height: usize }, Char(char), Read(String), Save(String), Info, Show, Quit, } fn try_parse_usize(s: &str) -> Result<usize, String> { s.parse::<usize>().map_err(|e| e.to_string()) } impl Command { pub fn from(input: String) -> Result<Command, String> { let split: Vec<String> = input .trim() .split_whitespace() .enumerate() .map(|(i, s)| { if i == 0 { s.to_uppercase() } else { s.to_string() } }) .collect(); let split: Vec<&str> = split.iter().map(String::as_ref).collect(); if split.is_empty() { return Err("empty input".to_string()); } let cmd = match split.as_slice() { ["LINE", x1, y1, x2, y2] => Command::Line { from: Point(try_parse_usize(x1)?, try_parse_usize(y1)?), to: Point(try_parse_usize(x2)?, try_parse_usize(y2)?), }, ["RECT", x1, y1, x2, y2] | ["RECTANGLE", x1, y1, x2, y2] => Command::Rectangle { p1: Point(try_parse_usize(x1)?, try_parse_usize(y1)?), p2: Point(try_parse_usize(x2)?, try_parse_usize(y2)?), }, ["
} #[cfg(test)] mod tests { use super::*; fn command_variations(input: &str) -> Vec<String> { let capitalize = |s: &str| { let mut c = s.chars(); c.next().unwrap().to_uppercase().chain(c).collect() }; let mut vec: Vec<String> = Vec::new(); for s in &[ input.to_ascii_lowercase(), input.to_ascii_uppercase(), capitalize(input), ] { vec.push(s.clone()); vec.push(format!(" {} ", s.clone())); vec.push(format!("\t{}\t\n", s.clone())); vec.push(s.split_whitespace().collect::<Vec<&str>>().join("\t")); } vec } #[test] fn line_is_parsed() { let expected = Command::Line { from: Point(1, 2), to: Point(3, 4), }; for input in command_variations("line 1 2 3 4") { let cmd = Command::from(input).unwrap(); assert_eq!(expected, cmd); } } #[test] fn rect_is_parsed() { let expected = Command::Rectangle { p1: Point(1, 2), p2: Point(3, 4), }; for input in [ &command_variations("rect 1 2 3 4")[..], &command_variations("rectangle 1 2 3 4")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn circle_is_parsed() { let expected = Command::Circle { p: Point(1, 2), r: 3, }; for input in [ &command_variations("circ 1 2 3")[..], &command_variations("circle 1 2 3")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn canvas_is_parsed() { let expected = Command::Canvas { width: 100, height: 200, }; for input in [ &command_variations("canv 100 200")[..], &command_variations("canvas 100 200")[..], ] .concat() { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn char_is_parsed() { let expected = Command::Char('*'); for input in command_variations("char *") { let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn read_is_parsed() { for input in &[ ("file_name", "read\tfile_name "), ("FileName", " READ FileName"), ("File_NAME", "Read \tFile_NAME \n"), ] { let expected = Command::Read(input.0.to_string()); let cmd = Command::from(input.1.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn save_is_parsed() { for input in &[ ("file_name", "save\tfile_name "), ("FileName", " SAVE FileName"), ("File_NAME", "Save \tFile_NAME \n"), ] { let expected = Command::Save(input.0.to_string()); let cmd = Command::from(input.1.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn info_is_parsed() { for input in command_variations("info") { let expected = Command::Info; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn show_is_parsed() { for input in command_variations("show") { let expected = Command::Show; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } #[test] fn quit_is_parsed() { for input in command_variations("quit") { let expected = Command::Quit; let cmd = Command::from(input.to_string()).unwrap(); assert_eq!(expected, cmd); } } }
CIRC", x, y, r] | ["CIRCLE", x, y, r] => Command::Circle { p: Point(try_parse_usize(x)?, try_parse_usize(y)?), r: try_parse_usize(r)?, }, ["CANV", width, height] | ["CANVAS", width, height] => Command::Canvas { width: try_parse_usize(width)?, height: try_parse_usize(height)?, }, ["CHAR", ch] => Command::Char(ch.parse::<char>().map_err(|e| e.to_string())?), ["READ", filename] => Command::Read(filename.to_string()), ["SAVE", filename] => Command::Save(filename.to_string()), ["INFO"] => Command::Info, ["SHOW"] => Command::Show, ["QUIT"] => Command::Quit, _ => return Err("unknown command".to_string()), }; Ok(cmd) }
function_block-function_prefix_line
[ { "content": "fn open_input_reader() -> Box<dyn BufRead> {\n\n let args: Vec<String> = env::args().skip(1).collect();\n\n match (args.get(0), args.get(1)) {\n\n (Some(opt), Some(filename)) if opt == \"-r\" => {\n\n Box::new(BufReader::new(fs::File::open(filename).unwrap()))\n\n }\n\n _ => Box::new(BufReader::new(io::stdin())),\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 1, "score": 25665.3918127075 }, { "content": "fn main() -> io::Result<()> {\n\n println!(\"drawerrs {}\", VERSION);\n\n println!(\"\");\n\n let mut canvas = Canvas::new(20, 10);\n\n let mut setchar = '*';\n\n let mut reader = open_input_reader();\n\n\n\n loop {\n\n print!(\"> \");\n\n io::stdout().flush()?;\n\n\n\n let mut line = String::new();\n\n reader.read_line(&mut line).unwrap();\n\n if line.is_empty() {\n\n println!(\"EOF. Quitting\");\n\n break;\n\n }\n\n\n\n match Command::from(line) {\n\n Ok(cmd) => match cmd {\n", "file_path": "src/main.rs", "rank": 2, "score": 17385.52570124019 }, { "content": " let data = vec![vec![' '; width]; height];\n\n Canvas {\n\n width,\n\n height,\n\n data,\n\n }\n\n }\n\n pub fn set(&mut self, x: usize, y: usize, ch: char) {\n\n self.data[y][x] = ch;\n\n }\n\n pub fn setp(&mut self, p: Point, ch: char) {\n\n self.data[p.1][p.0] = ch;\n\n }\n\n pub fn info(&self) -> String {\n\n format!(\"Canvas size: {}x{}\", self.width, self.height)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]\n\npub struct Point(pub usize, pub usize);\n", "file_path": "src/canvas.rs", "rank": 3, "score": 16436.585756944995 }, { "content": "use std::fmt;\n\n\n\npub struct Canvas {\n\n width: usize,\n\n height: usize,\n\n data: Vec<Vec<char>>,\n\n}\n\n\n\nimpl fmt::Display for Canvas {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let result = self.data.iter().fold(String::new(), |acc, row| {\n\n let line = row.into_iter().collect::<String>();\n\n format!(\"{}{}\\n\", acc, line)\n\n });\n\n write!(f, \"{}\", result)\n\n }\n\n}\n\n\n\nimpl Canvas {\n\n pub fn new(width: usize, height: usize) -> Canvas {\n", "file_path": "src/canvas.rs", "rank": 4, "score": 16433.86640997509 }, { "content": "\n\nimpl Point {\n\n pub fn as_f64(self) -> (f64, f64) {\n\n (self.0 as f64, self.1 as f64)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n impl Canvas {\n\n fn get(&self, x: usize, y: usize) -> char {\n\n self.data[y][x]\n\n }\n\n }\n\n #[test]\n\n fn canvas_to_string() {\n\n let c = Canvas::new(4, 2);\n\n assert_eq!(\" \\n \\n\", c.to_string());\n", "file_path": "src/canvas.rs", "rank": 5, "score": 16431.25148938201 }, { "content": " }\n\n\n\n #[test]\n\n fn creates_canvas_with_new() {\n\n Canvas::new(2, 2);\n\n }\n\n #[test]\n\n fn new_matches_size() {\n\n let c = Canvas::new(5, 3);\n\n assert_eq!(5, c.width);\n\n assert_eq!(3, c.height);\n\n }\n\n #[test]\n\n fn set_matches_the_position() {\n\n let mut c = Canvas::new(5, 5);\n\n c.set(4, 2, '*');\n\n assert_eq!('*', c.get(4, 2));\n\n }\n\n #[test]\n\n fn get_works_on_extremes() {\n\n let c = Canvas::new(10, 5);\n\n for &(x, y) in [(0, 0), (9, 0), (9, 4), (0, 4)].iter() {\n\n assert_eq!(' ', c.get(x, y));\n\n }\n\n }\n\n}\n", "file_path": "src/canvas.rs", "rank": 6, "score": 16427.119826560265 }, { "content": " Command::Line { from, to } => {\n\n let Point(x1, y1) = from;\n\n let Point(x2, y2) = to;\n\n let (fx1, fy1) = from.as_f64();\n\n let (fx2, fy2) = to.as_f64();\n\n let m = (fy2 - fy1) / (fx2 - fx1);\n\n\n\n let form = |x, y| y as f64 - fy1 == m * (x as f64 - fx1);\n\n\n\n canvas.setp(from, setchar);\n\n canvas.setp(to, setchar);\n\n for x in x1..=x2 {\n\n for y in y1..=y2 {\n\n if form(x, y) {\n\n canvas.set(x, y, setchar);\n\n }\n\n }\n\n }\n\n }\n\n Command::Rectangle { p1, p2 } => {\n", "file_path": "src/main.rs", "rank": 17, "score": 15.545361167160651 }, { "content": "drawerrs\n\n========\n\n\n\n**_This project is work in progress._**\n\n\n\nDraw ASCII art using simple commands (with rust).\n\n\n\nBuilding\n\n--------\n\n\n\nThe program needs [Rust](https://www.rust-lang.org/) and uses cargo to build.\n\n\n\nTo build it, execute:\n\n\n\n cargo build\n\n\n\nThere are tests that can be run with:\n\n\n\n cargo test\n\n\n\nRunning\n\n-------\n\n\n\nYou can run it using cargo:\n\n\n\n cargo run\n\n\n\ndrawerrs stars in interactive mode waiting for commands.\n\n\n\nProviding arguments `-r <filename>` allows to read instructions from a file.\n\nIt can also be run with cargo:\n\n\n\n cargo run -- -r <filename>\n\n\n\nCommands\n\n--------\n\n\n\nAll commands can be used both in interactive mode or when reading from a file.\n\nThe following commands are supported:\n\n\n\n| Command | Description\n\n| ------------------------ | -------------------------------\n\n| CHAR ch | Use `ch` character when drawing\n\n| CANV[AS] width height | Create new canvas with the declared size\n\n| CIRC[LE] x y r | Draw a circle in point `(x, y)` with radius of the size `r`\n\n| LINE x1 y1 x2 y2 | Draw a line from point `(x1, y1)` to point `(x2, y2)`\n\n| RECT[ANGLE] x1 y1 x2 y2 | Draw a rectangle from left top on `(x1, y1)` to right bottom on `(x2, y2)`\n\n| INFO | Display short info about canvas size\n\n| SHOW | Display the drawing on the screen\n\n| READ filename | Read and execute instructions from the provided file\n\n| SAVE filename | Save ASCII drawing to the provided file\n\n| QUIT | Quit program\n\n\n\nLicense\n\n=======\n\n\n\n[MIT](https://opensource.org/licenses/MIT)\n", "file_path": "README.md", "rank": 18, "score": 14.86672558785002 }, { "content": " let Point(x1, y1) = p1;\n\n let Point(x2, y2) = p2;\n\n for x in x1..=x2 {\n\n for y in y1..=y2 {\n\n if x == x1 || x == x2 || y == y1 || y == y2 {\n\n canvas.set(x, y, setchar);\n\n }\n\n }\n\n }\n\n }\n\n Command::Circle { p, r } => {\n\n let Point(x, y) = p;\n\n let fr = r as f64;\n\n let circle = |x, y| {\n\n let fx = x as f64 - fr / 2.0;\n\n let fy = y as f64 - fr / 2.0;\n\n let dist = ((fx - fr).powi(2) + (fy - fr).powi(2)).sqrt();\n\n dist > fr - 0.5 && dist < fr + 0.5\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 14.80225922280682 }, { "content": " for x in x - r - 1..=x + r + 1 {\n\n for y in y - r - 1..=y + r + 1 {\n\n if circle(x, y) {\n\n canvas.set(x, y, setchar);\n\n }\n\n }\n\n }\n\n }\n\n Command::Canvas { width, height } => {\n\n canvas = Canvas::new(width, height);\n\n println!(\"New canvas size {}x{}\", width, height);\n\n }\n\n Command::Char(ch) => {\n\n setchar = ch;\n\n println!(\"Will use new char '{}'\", setchar);\n\n }\n\n Command::Read(_filename) => {}\n\n Command::Save(filename) => {\n\n std::fs::write(filename, canvas.to_string())?;\n\n }\n", "file_path": "src/main.rs", "rank": 20, "score": 9.702947620010569 }, { "content": " Command::Info => println!(\"{}\", canvas.info()),\n\n Command::Show => {\n\n print!(\"{}\", canvas);\n\n }\n\n Command::Quit => {\n\n println!(\"Quitting...\");\n\n break;\n\n }\n\n },\n\n Err(e) => println!(\"Error: {}\", e),\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 8.578299899217775 }, { "content": "mod canvas;\n\nmod command;\n\n\n\nuse crate::canvas::Canvas;\n\nuse crate::canvas::Point;\n\nuse crate::command::Command;\n\nuse std::env;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\nuse std::io::Write;\n\n\n\nconst VERSION: &'static str = env!(\"CARGO_PKG_VERSION\");\n\n\n", "file_path": "src/main.rs", "rank": 22, "score": 7.139766269982873 } ]
Rust
epp-client/src/epp/request/contact/update.rs
djc/epp-client
d06d404c12ea4d887b1106007fe56ed18600423a
use epp_client_macros::*; use crate::epp::object::data::{AuthInfo, ContactStatus, Phone, PostalInfo}; use crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait}; use crate::epp::request::Command; use crate::epp::response::contact::info::EppContactInfoResponse; use crate::epp::xml::EPP_CONTACT_XMLNS; use crate::error; use serde::{Deserialize, Serialize}; pub type EppContactUpdate = EppObject<Command<ContactUpdate>>; #[derive(Serialize, Deserialize, Debug)] pub struct ContactChangeInfo { #[serde(rename = "postalInfo")] postal_info: Option<PostalInfo>, voice: Option<Phone>, fax: Option<Phone>, email: Option<StringValue>, #[serde(rename = "authInfo")] auth_info: Option<AuthInfo>, } #[derive(Serialize, Deserialize, Debug)] pub struct StatusList { status: Vec<ContactStatus>, } #[derive(Serialize, Deserialize, Debug)] pub struct ContactUpdateData { xmlns: String, id: StringValue, #[serde(rename = "add")] add_statuses: Option<StatusList>, #[serde(rename = "rem")] remove_statuses: Option<StatusList>, #[serde(rename = "chg")] change_info: Option<ContactChangeInfo>, } #[derive(Serialize, Deserialize, Debug, ElementName)] #[element_name(name = "update")] pub struct ContactUpdate { #[serde(rename = "update")] contact: ContactUpdateData, } impl EppContactUpdate { pub fn new(id: &str, client_tr_id: &str) -> EppContactUpdate { let contact_update = ContactUpdate { contact: ContactUpdateData { xmlns: EPP_CONTACT_XMLNS.to_string(), id: id.to_string_value(), add_statuses: None, remove_statuses: None, change_info: None, }, }; EppObject::build(Command::<ContactUpdate>::new(contact_update, client_tr_id)) } pub fn set_info( &mut self, email: &str, postal_info: PostalInfo, voice: Phone, auth_password: &str, ) { self.data.command.contact.change_info = Some(ContactChangeInfo { email: Some(email.to_string_value()), postal_info: Some(postal_info), voice: Some(voice), auth_info: Some(AuthInfo::new(auth_password)), fax: None, }); } pub fn set_fax(&mut self, fax: Phone) { match &mut self.data.command.contact.change_info { Some(ref mut info) => info.fax = Some(fax), _ => (), } } pub fn add(&mut self, statuses: Vec<ContactStatus>) { self.data.command.contact.add_statuses = Some(StatusList { status: statuses }); } pub fn remove(&mut self, statuses: Vec<ContactStatus>) { self.data.command.contact.remove_statuses = Some(StatusList { status: statuses }); } pub fn load_from_epp_contact_info( &mut self, contact_info: EppContactInfoResponse, ) -> Result<(), error::Error> { match contact_info.data.res_data { Some(res_data) => { self.data.command.contact.change_info = Some(ContactChangeInfo { email: Some(res_data.info_data.email.clone()), postal_info: Some(res_data.info_data.postal_info.clone()), voice: Some(res_data.info_data.voice.clone()), fax: res_data.info_data.fax.clone(), auth_info: None, }); Ok(()) } None => Err(error::Error::Other( "No res_data in EppContactInfoResponse object".to_string(), )), } } }
use epp_client_macros::*; use crate::epp::object::data::{AuthInfo, ContactStatus, Phone, PostalInfo}; use crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait}; use crate::epp::request::Command; use crate::epp::response::contact::info::EppContactInfoResponse; use crate::epp::xml::EPP_CONTACT_XMLNS; use crate::error; use serde::{Deserialize, Serialize}; pub type EppContactUpdate = EppObject<Command<ContactUpdate>>; #[derive(Serialize, Deserialize, Debug)] pub struct ContactChangeInfo { #[serde(rename = "postalInfo")] postal_info: Option<PostalInfo>, voice: Option<Phone>, fax: Option<Phone>, email: Option<StringValue>, #[serde(rename = "authInfo")] auth_info: Option<AuthInfo>, } #[derive(Serialize, Deserialize, Debug)] pub struct StatusList { status: Vec<ContactStatus>, } #[derive(Serialize, Deserialize, Debug)] pub struct ContactUpdateData { xmlns: String, id: StringValue, #[serde(rename = "add")] add_statuses: Option<StatusList>, #[serde(rename = "rem")] remove_statuses: Option<StatusList>, #[serde(rename = "chg")] change_info: Option<ContactChangeInfo>, } #[derive(Serialize, Deserialize, Debug, ElementName)] #[element_name(name = "update")] pub struct ContactUpdate { #[serde(rename = "update")] contact: ContactUpdateData, } impl EppContactUpdate { pub fn new(id: &str, client_tr_id: &str) -> EppContactUpdate {
EppObject::build(Command::<ContactUpdate>::new(contact_update, client_tr_id)) } pub fn set_info( &mut self, email: &str, postal_info: PostalInfo, voice: Phone, auth_password: &str, ) { self.data.command.contact.change_info = Some(ContactChangeInfo { email: Some(email.to_string_value()), postal_info: Some(postal_info), voice: Some(voice), auth_info: Some(AuthInfo::new(auth_password)), fax: None, }); } pub fn set_fax(&mut self, fax: Phone) { match &mut self.data.command.contact.change_info { Some(ref mut info) => info.fax = Some(fax), _ => (), } } pub fn add(&mut self, statuses: Vec<ContactStatus>) { self.data.command.contact.add_statuses = Some(StatusList { status: statuses }); } pub fn remove(&mut self, statuses: Vec<ContactStatus>) { self.data.command.contact.remove_statuses = Some(StatusList { status: statuses }); } pub fn load_from_epp_contact_info( &mut self, contact_info: EppContactInfoResponse, ) -> Result<(), error::Error> { match contact_info.data.res_data { Some(res_data) => { self.data.command.contact.change_info = Some(ContactChangeInfo { email: Some(res_data.info_data.email.clone()), postal_info: Some(res_data.info_data.postal_info.clone()), voice: Some(res_data.info_data.voice.clone()), fax: res_data.info_data.fax.clone(), auth_info: None, }); Ok(()) } None => Err(error::Error::Other( "No res_data in EppContactInfoResponse object".to_string(), )), } } }
let contact_update = ContactUpdate { contact: ContactUpdateData { xmlns: EPP_CONTACT_XMLNS.to_string(), id: id.to_string_value(), add_statuses: None, remove_statuses: None, change_info: None, }, };
assignment_statement
[ { "content": "/// Basic client TRID generation function. Mainly used for testing. Users of the library should use their own clTRID generation function.\n\npub fn generate_client_tr_id(username: &str) -> Result<String, Box<dyn Error>> {\n\n let timestamp = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?;\n\n Ok(format!(\"{}:{}\", username, timestamp.as_secs()))\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, ElementName)]\n\n#[element_name(name = \"hello\")]\n\n/// Type corresponding to the <hello> tag in an EPP XML hello request\n\npub struct Hello;\n\n\n\nimpl EppHello {\n\n /// Creates a new Epp Hello request\n\n pub fn new() -> EppHello {\n\n EppObject::build(Hello {})\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, ElementName)]\n\n#[element_name(name = \"login\")]\n\n/// Type corresponding to the &lt;login&gt; tag in an EPP XML login request\n", "file_path": "epp-client/src/epp/request.rs", "rank": 0, "score": 172097.18060409706 }, { "content": "/// A function to generate a simple client TRID. Should only be used for testing, library users\n\n/// should generate a client TRID according to their own requirements\n\npub fn default_client_tr_id_fn(client: &EppClient) -> String {\n\n let timestamp = match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {\n\n Ok(time) => time,\n\n Err(e) => panic!(\"Error in client TRID gen function: {}\", e)\n\n };\n\n format!(\"{}:{}\", &client.username(), timestamp.as_secs())\n\n}\n\n\n\nimpl EppClient {\n\n /// Fetches the username used in the registry connection\n\n pub fn username(&self) -> String {\n\n self.credentials.0.to_string()\n\n }\n\n\n\n // pub fn set_client_tr_id_fn<F>(&mut self, func: F)\n\n // where F: Fn(&EppClient) -> String + Send + Sync + 'static {\n\n // self.client_tr_id_fn = Arc::new(func);\n\n // }\n\n\n\n /// Creates a new EppClient object and does an EPP Login to a given registry to become ready\n", "file_path": "epp-client/src/connection/client.rs", "rank": 1, "score": 154767.75314135966 }, { "content": "/// Reads EPP XML requests and responses from the test/resources directory to run tests on\n\nfn get_xml(path: &str) -> Result<String, Box<dyn Error>> {\n\n let ws_regex = Regex::new(r\"[\\s]{2,}\")?;\n\n\n\n let mut f = File::open(format!(\"{}/{}\", RESOURCES_DIR, path))?;\n\n let mut buf = String::new();\n\n\n\n f.read_to_string(&mut buf)?;\n\n if buf.len() > 0 {\n\n let mat = Regex::new(r\"\\?>\").unwrap().find(&buf.as_str()).unwrap();\n\n let start = mat.end();\n\n buf = format!(\n\n \"{}\\r\\n{}\",\n\n &buf[..start],\n\n ws_regex.replace_all(&buf[start..], \"\")\n\n );\n\n }\n\n Ok(buf)\n\n}\n", "file_path": "epp-client/src/tests/mod.rs", "rank": 2, "score": 114478.55602598701 }, { "content": "/// Trait for StringValue type to add easier conversion from str and String\n\npub trait StringValueTrait {\n\n fn to_string_value(&self) -> StringValue;\n\n}\n\n\n\nimpl StringValueTrait for &str {\n\n fn to_string_value(&self) -> StringValue {\n\n StringValue(self.to_string())\n\n }\n\n}\n\n\n\nimpl StringValueTrait for String {\n\n fn to_string_value(&self) -> StringValue {\n\n StringValue(self.to_string())\n\n }\n\n}\n\n\n", "file_path": "epp-client/src/epp/object.rs", "rank": 3, "score": 90337.24137687156 }, { "content": "#[proc_macro_derive(ElementName, attributes(element_name))]\n\npub fn element_name_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).expect(\"Error while parsing ElementName macro input\");\n\n\n\n element_name_macro(&ast)\n\n}\n", "file_path": "epp-client-macros/src/lib.rs", "rank": 4, "score": 76624.34871409333 }, { "content": "//! Types for EPP contact update response\n\n\n\nuse crate::epp::response::EppCommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML contact update response\n\npub type EppContactUpdateResponse = EppCommandResponse;\n", "file_path": "epp-client/src/epp/response/contact/update.rs", "rank": 13, "score": 70442.29587614647 }, { "content": "#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\nstruct FlattenedServiceMenu {\n\n pub version: StringValue,\n\n pub lang: StringValue,\n\n #[serde(rename = \"objURI\")]\n\n pub obj_uris: Vec<StringValue>,\n\n #[serde(rename = \"svcExtension\")]\n\n pub svc_ext: Option<ServiceExtension>,\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ServiceMenu {\n\n /// Deserializes the <svcMenu> data to the `ServiceMenu` type\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let flattened_svc_menu = FlattenedServiceMenu::deserialize(deserializer)?;\n\n\n\n let svc_menu = ServiceMenu {\n\n options: Options {\n\n version: flattened_svc_menu.version,\n", "file_path": "epp-client/src/epp/response.rs", "rank": 14, "score": 49294.364186344625 }, { "content": "/// Trait to be implemented by serializers. Currently the only included serializer is `quick-xml`\n\npub trait EppXml {\n\n type Output: Debug;\n\n\n\n fn serialize(&self) -> Result<String, Box<dyn Error>>;\n\n fn deserialize(epp_xml: &str) -> Result<Self::Output, error::Error>;\n\n}\n", "file_path": "epp-client/src/epp/xml.rs", "rank": 15, "score": 43928.58663658459 }, { "content": "/// Trait to set correct value for xml tags when tags are being generated from generic types\n\npub trait ElementName {\n\n fn element_name(&self) -> &'static str;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, ElementName)]\n\n#[element_name(name = \"empty\")]\n\n/// An empty placeholder tag. To be refactored to something more compliant later.\n\npub struct EmptyTag;\n\n\n\n/// An EPP XML Document that is used either as an EPP XML request or\n\n/// an EPP XML response\n\n#[derive(Deserialize, Debug, PartialEq)]\n\n#[serde(rename = \"epp\")]\n\npub struct EppObject<T: ElementName> {\n\n /// XML namespace for the &lt;epp&gt; tag\n\n pub xmlns: String,\n\n /// Schema namespace for the &lt;epp&gt; tag\n\n #[serde(rename = \"xmlns:xsi\")]\n\n pub xmlns_xsi: String,\n\n /// Schema location attribute for &lt;epp&gt;\n", "file_path": "epp-client/src/epp/object.rs", "rank": 16, "score": 43927.909115451155 }, { "content": "fn element_name_macro(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let mut elem_name = ast.ident.to_string();\n\n let (impl_generics, type_generics, _) = &ast.generics.split_for_impl();\n\n\n\n if ast.attrs.len() > 0 {\n\n let attribute = &ast.attrs[0];\n\n match attribute.parse_meta() {\n\n Ok(syn::Meta::List(meta)) => {\n\n if meta.nested.len() > 0 {\n\n elem_name = match &meta.nested[0] {\n\n syn::NestedMeta::Meta(syn::Meta::NameValue(v)) => match &v.lit {\n\n syn::Lit::Str(lit) => lit.value(),\n\n _ => panic!(\"Invalid element_name attribute\"),\n\n },\n\n _ => panic!(\"Invalid element_name attribute\"),\n\n };\n\n } else {\n\n panic!(\"Invalid element_name attribute\");\n\n }\n", "file_path": "epp-client-macros/src/lib.rs", "rank": 17, "score": 38712.89222090345 }, { "content": "//! Types for EPP contact responses\n\n\n\npub mod check;\n\npub mod create;\n\npub mod delete;\n\npub mod info;\n\npub mod update;\n", "file_path": "epp-client/src/epp/response/contact.rs", "rank": 18, "score": 35915.9856012392 }, { "content": "//! Types for EPP contact requests\n\n\n\npub mod check;\n\npub mod create;\n\npub mod delete;\n\npub mod info;\n\npub mod update;\n", "file_path": "epp-client/src/epp/request/contact.rs", "rank": 19, "score": 35915.9856012392 }, { "content": "pub type EppHostUpdate = EppObject<Command<HostUpdate>>;\n\n\n\n/// Type for data under the &lt;chg&gt; tag\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostChangeInfo {\n\n /// The new name for the host\n\n pub name: StringValue,\n\n}\n\n\n\n/// Type for data under the &lt;add&gt; and &lt;rem&gt; tags\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostAddRemove {\n\n /// The IP addresses to be added to or removed from the host\n\n #[serde(rename = \"addr\")]\n\n pub addresses: Option<Vec<HostAddr>>,\n\n /// The statuses to be added to or removed from the host\n\n #[serde(rename = \"status\")]\n\n pub statuses: Option<Vec<HostStatus>>,\n\n}\n\n\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 20, "score": 35354.994453227926 }, { "content": "/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n\npub type EppDomainUpdate = EppObject<Command<DomainUpdate<HostObjList>>>;\n\n/// Type that represents the &lt;epp&gt; request for domain &lt;update&gt; command\n\n/// with &lt;hostAttr&gt; elements in the request for &lt;ns&gt; list\n\npub type EppDomainUpdateWithHostAttr = EppObject<Command<DomainUpdate<HostAttrList>>>;\n\n\n\n/// Type for elements under the &lt;chg&gt; tag for domain update\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainChangeInfo {\n\n /// The new registrant contact for the domain\n\n pub registrant: Option<StringValue>,\n\n /// The new auth info for the domain\n\n #[serde(rename = \"authInfo\")]\n\n pub auth_info: Option<AuthInfo>,\n\n}\n\n\n\n/// Type for elements under the &lt;add&gt; and &lt;rem&gt; tags for domain update\n\n#[derive(Serialize, Deserialize, Debug)]\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 21, "score": 35354.93235559244 }, { "content": "/// Type for data under the host &lt;update&gt; tag\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostUpdateData {\n\n /// XML namespace for host commands\n\n xmlns: String,\n\n /// The name of the host\n\n name: StringValue,\n\n /// The IP addresses and statuses to be added to the host\n\n add: Option<HostAddRemove>,\n\n /// The IP addresses and statuses to be removed from the host\n\n #[serde(rename = \"rem\")]\n\n remove: Option<HostAddRemove>,\n\n /// The host details that need to be updated\n\n #[serde(rename = \"chg\")]\n\n change_info: Option<HostChangeInfo>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"update\")]\n\n/// Type for EPP XML &lt;update&gt; command for hosts\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 22, "score": 35354.73514840344 }, { "content": "pub struct DomainAddRemove<T> {\n\n /// The list of nameservers to add or remove\n\n /// Type T can be either a `HostObjList` or `HostAttrList`\n\n #[serde(rename = \"ns\")]\n\n pub ns: Option<T>,\n\n /// The list of contacts to add to or remove from the domain\n\n #[serde(rename = \"contact\")]\n\n pub contacts: Option<Vec<DomainContact>>,\n\n /// The list of statuses to add to or remove from the domain\n\n #[serde(rename = \"status\")]\n\n pub statuses: Option<Vec<DomainStatus>>,\n\n}\n\n\n\n/// Type for elements under the &lt;update&gt; tag for domain update\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainUpdateData<T> {\n\n /// XML namespace for domain commands\n\n pub xmlns: String,\n\n /// The name of the domain to update\n\n pub name: StringValue,\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 23, "score": 35352.53552370286 }, { "content": " /// `DomainAddRemove` Object containing the list of elements to be added\n\n /// to the domain\n\n pub add: Option<DomainAddRemove<T>>,\n\n /// `DomainAddRemove` Object containing the list of elements to be removed\n\n /// from the domain\n\n #[serde(rename = \"rem\")]\n\n pub remove: Option<DomainAddRemove<T>>,\n\n /// The data under the &lt;chg&gt; tag for domain update\n\n #[serde(rename = \"chg\")]\n\n pub change_info: Option<DomainChangeInfo>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"update\")]\n\n/// Type for EPP XML &lt;update&gt; command for domains\n\npub struct DomainUpdate<T> {\n\n #[serde(rename = \"update\")]\n\n pub domain: DomainUpdateData<T>,\n\n}\n\n\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 24, "score": 35351.18873551146 }, { "content": "//! Types for EPP domain check request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::data::{AuthInfo, DomainContact, DomainStatus, HostAttrList, HostObjList};\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_DOMAIN_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; request for domain &lt;update&gt; command\n\n/// with &lt;hostObj&gt; elements in the request for &lt;ns&gt; list\n\n///\n\n/// ## Usage\n\n///\n\n/// ```ignore\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::object::data::{DomainStatus, DomainContact};\n\n/// use epp_client::epp::{EppDomainUpdate, EppDomainUpdateResponse, DomainAddRemove};\n\n/// use epp_client::epp::generate_client_tr_id;\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 25, "score": 35347.829573613875 }, { "content": "impl EppDomainUpdate {\n\n /// Creates a new EppObject for domain update corresponding to the &lt;epp&gt; tag in EPP XML\n\n /// with the &lt;ns&gt; tag containing &lt;hostObj&gt; tags\n\n pub fn new(name: &str, client_tr_id: &str) -> EppDomainUpdate {\n\n EppObject::build(Command::<DomainUpdate<HostObjList>>::new(\n\n DomainUpdate {\n\n domain: DomainUpdateData {\n\n xmlns: EPP_DOMAIN_XMLNS.to_string(),\n\n name: name.to_string_value(),\n\n add: None,\n\n remove: None,\n\n change_info: None,\n\n },\n\n },\n\n client_tr_id,\n\n ))\n\n }\n\n\n\n /// Sets the data for the &lt;chg&gt; tag\n\n pub fn info(&mut self, info: DomainChangeInfo) {\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 26, "score": 35346.892764631564 }, { "content": "//! Types for EPP host update request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::data::{HostAddr, HostStatus};\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_HOST_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; request for host &lt;update&gt; command\n\n///\n\n/// ## Usage\n\n///\n\n/// ```ignore\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::object::StringValueTrait;\n\n/// use epp_client::epp::object::data::{HostAddr, HostStatus};\n\n/// use epp_client::epp::{EppHostUpdate, EppHostUpdateResponse, HostAddRemove, HostChangeInfo};\n\n/// use epp_client::epp::generate_client_tr_id;\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 27, "score": 35346.06860119882 }, { "content": "pub struct HostUpdate {\n\n /// The instance holding the data for the host to be updated\n\n #[serde(rename = \"update\")]\n\n host: HostUpdateData,\n\n}\n\n\n\nimpl EppHostUpdate {\n\n /// Creates a new EppObject for host update corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(name: &str, client_tr_id: &str) -> EppHostUpdate {\n\n EppObject::build(Command::<HostUpdate>::new(\n\n HostUpdate {\n\n host: HostUpdateData {\n\n xmlns: EPP_HOST_XMLNS.to_string(),\n\n name: name.to_string_value(),\n\n add: None,\n\n remove: None,\n\n change_info: None,\n\n },\n\n },\n\n client_tr_id,\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 28, "score": 35345.427246237414 }, { "content": "/// ])\n\n/// };\n\n///\n\n/// let remove = DomainAddRemove {\n\n/// ns: None,\n\n/// contacts: Some(vec![\n\n/// DomainContact {\n\n/// contact_type: \"billing\".to_string(),\n\n/// id: \"eppdev-contact-2\".to_string()\n\n/// }\n\n/// ]),\n\n/// statuses: None,\n\n/// };\n\n///\n\n/// domain_update.add(add);\n\n/// domain_update.remove(remove);\n\n///\n\n/// // send it to the registry and receive a response of type EppDomainUpdateResponse\n\n/// let response = client.transact::<_, EppDomainUpdateResponse>(&domain_update).await.unwrap();\n\n///\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 29, "score": 35344.90590685568 }, { "content": " self.data.command.domain.change_info = Some(info);\n\n }\n\n\n\n /// Sets the data for the &lt;add&gt; tag\n\n pub fn add(&mut self, add: DomainAddRemove<HostObjList>) {\n\n self.data.command.domain.add = Some(add);\n\n }\n\n\n\n /// Sets the data for the &lt;rem&gt; tag\n\n pub fn remove(&mut self, remove: DomainAddRemove<HostObjList>) {\n\n self.data.command.domain.remove = Some(remove);\n\n }\n\n}\n\n\n\nimpl EppDomainUpdateWithHostAttr {\n\n /// Creates a new EppObject for domain update corresponding to the &lt;epp&gt; tag in EPP XML\n\n /// with the &lt;ns&gt; tag containing &lt;hostAttr&gt; tags\n\n pub fn new(name: &str, client_tr_id: &str) -> EppDomainUpdateWithHostAttr {\n\n EppObject::build(Command::<DomainUpdate<HostAttrList>>::new(\n\n DomainUpdate {\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 30, "score": 35344.55267034101 }, { "content": " domain: DomainUpdateData {\n\n xmlns: EPP_DOMAIN_XMLNS.to_string(),\n\n name: name.to_string_value(),\n\n add: None,\n\n remove: None,\n\n change_info: None,\n\n },\n\n },\n\n client_tr_id,\n\n ))\n\n }\n\n\n\n /// Sets the data for the &lt;chg&gt; tag\n\n pub fn info(&mut self, info: DomainChangeInfo) {\n\n self.data.command.domain.change_info = Some(info);\n\n }\n\n\n\n /// Sets the data for the &lt;add&gt; tag\n\n pub fn add(&mut self, add: DomainAddRemove<HostAttrList>) {\n\n self.data.command.domain.add = Some(add);\n\n }\n\n\n\n /// Sets the data for the &lt;rem&gt; tag\n\n pub fn remove(&mut self, remove: DomainAddRemove<HostAttrList>) {\n\n self.data.command.domain.remove = Some(remove);\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 31, "score": 35343.551809464705 }, { "content": " ))\n\n }\n\n\n\n /// Sets the data for the &lt;chg&gt; element of the host update\n\n pub fn info(&mut self, info: HostChangeInfo) {\n\n self.data.command.host.change_info = Some(info);\n\n }\n\n\n\n /// Sets the data for the &lt;add&gt; element of the host update\n\n pub fn add(&mut self, add: HostAddRemove) {\n\n self.data.command.host.add = Some(add);\n\n }\n\n\n\n /// Sets the data for the &lt;rem&gt; element of the host update\n\n pub fn remove(&mut self, remove: HostAddRemove) {\n\n self.data.command.host.remove = Some(remove);\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 32, "score": 35340.838985779315 }, { "content": "///\n\n/// #[tokio::main]\n\n/// async fn main() {\n\n/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create an EppDomainUpdate instance\n\n/// let mut domain_update = EppDomainUpdate::new(\"eppdev-100.com\", generate_client_tr_id(&client).as_str());\n\n///\n\n/// let add = DomainAddRemove {\n\n/// ns: None,\n\n/// contacts: None,\n\n/// statuses: Some(vec![\n\n/// DomainStatus {\n\n/// status: \"clientUpdateProhibited\".to_string()\n\n/// }\n", "file_path": "epp-client/src/epp/request/domain/update.rs", "rank": 33, "score": 35340.40715216085 }, { "content": "//! Types for EPP domain update response\n\n\n\nuse crate::epp::response::EppCommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML domain update response\n\npub type EppDomainUpdateResponse = EppCommandResponse;\n", "file_path": "epp-client/src/epp/response/domain/update.rs", "rank": 34, "score": 35336.87008338131 }, { "content": "//! Types for EPP host check response\n\n\n\nuse crate::epp::response::EppCommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML host update response\n\npub type EppHostUpdateResponse = EppCommandResponse;\n", "file_path": "epp-client/src/epp/response/host/update.rs", "rank": 35, "score": 35336.49932374832 }, { "content": "///\n\n/// let remove = HostAddRemove {\n\n/// addresses: Some(vec![\n\n/// HostAddr::new(\"v6\", \"2404:6800:4001:801::200e\")\n\n/// ]),\n\n/// statuses: None,\n\n/// };\n\n///\n\n/// host_update.add(add);\n\n/// host_update.remove(remove);\n\n///\n\n/// // Send a &lt;chg&gt; section as well\n\n/// host_update.info(HostChangeInfo { name: \"ns2.eppdev-101.com\".to_string_value() });\n\n///\n\n/// // send it to the registry and receive a response of type EppHostUpdateResponse\n\n/// let response = client.transact::<_, EppHostUpdateResponse>(&host_update).await.unwrap();\n\n///\n\n/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 36, "score": 35336.48926202584 }, { "content": "///\n\n/// #[tokio::main]\n\n/// async fn main() {\n\n/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create an EppHostUpdate instance\n\n/// let mut host_update = EppHostUpdate::new(\"ns1.eppdev-101.com\", generate_client_tr_id(&client).as_str());\n\n///\n\n/// /// Prepare the add and remove sections for the update\n\n/// let add = HostAddRemove {\n\n/// addresses: Some(vec![\n\n/// HostAddr::new(\"v4\", \"177.34.126.17\")\n\n/// ]),\n\n/// statuses: None\n\n/// };\n", "file_path": "epp-client/src/epp/request/host/update.rs", "rank": 37, "score": 35335.71083022299 }, { "content": " /// XML namespace for contact commands\n\n xmlns: String,\n\n /// Contact &lt;id&gt; tag\n\n id: StringValue,\n\n /// Contact &lt;postalInfo&gt; tag\n\n #[serde(rename = \"postalInfo\")]\n\n postal_info: data::PostalInfo,\n\n /// Contact &lt;voice&gt; tag\n\n voice: data::Phone,\n\n /// Contact &lt;fax&gt; tag,\n\n fax: Option<data::Phone>,\n\n /// Contact &lt;email&gt; tag\n\n email: StringValue,\n\n /// Contact &lt;authInfo&gt; tag\n\n #[serde(rename = \"authInfo\")]\n\n auth_info: data::AuthInfo,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"create\")]\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 38, "score": 35134.61059698696 }, { "content": " /// The contact id\n\n pub id: StringValue,\n\n /// The contact ROID\n\n pub roid: StringValue,\n\n /// The list of contact statuses\n\n #[serde(rename = \"status\")]\n\n pub statuses: Vec<ContactStatus>,\n\n /// The postal info for the contact\n\n #[serde(rename = \"postalInfo\")]\n\n pub postal_info: PostalInfo,\n\n /// The voice data for the contact\n\n pub voice: Phone,\n\n /// The fax data for the contact\n\n pub fax: Option<Phone>,\n\n /// The email for the contact\n\n pub email: StringValue,\n\n /// The epp user to whom the contact belongs\n\n #[serde(rename = \"clID\")]\n\n pub client_id: StringValue,\n\n /// The epp user who created the contact\n", "file_path": "epp-client/src/epp/response/contact/info.rs", "rank": 39, "score": 35133.355423630914 }, { "content": "//! Types for EPP contact check response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML contact check response\n\npub type EppContactCheckResponse = EppObject<CommandResponse<ContactCheckResult>>;\n\n\n\n/// Type that represents the &lt;id&gt; tag for contact check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCheck {\n\n /// The text of the &lt;id&gt; tag\n\n #[serde(rename = \"$value\")]\n\n pub id: StringValue,\n\n /// The avail attr on the &lt;id&gt; tag\n\n #[serde(rename = \"avail\")]\n\n pub available: u16,\n\n}\n", "file_path": "epp-client/src/epp/response/contact/check.rs", "rank": 40, "score": 35133.038680355196 }, { "content": "/// Type for EPP XML &lt;create&gt; command for contacts\n\npub struct ContactCreate {\n\n /// Data for &lt;create&gt; command for contact\n\n #[serde(rename = \"create\")]\n\n pub contact: Contact,\n\n}\n\n\n\nimpl EppContactCreate {\n\n /// Creates a new EppObject for contact create corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(\n\n id: &str,\n\n email: &str,\n\n postal_info: data::PostalInfo,\n\n voice: data::Phone,\n\n auth_password: &str,\n\n client_tr_id: &str,\n\n ) -> EppContactCreate {\n\n let contact_create = ContactCreate {\n\n contact: Contact {\n\n xmlns: EPP_CONTACT_XMLNS.to_string(),\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 41, "score": 35133.010575993 }, { "content": "/// \"eppdev-contact-100\",\n\n/// \"[email protected]\",\n\n/// postal_info,\n\n/// voice,\n\n/// \"epP4uthd#v\",\n\n/// generate_client_tr_id(&client).as_str()\n\n/// );\n\n/// contact_create.set_fax(fax);\n\n///\n\n/// // send it to the registry and receive a response of type EppContactCreateResponse\n\n/// let response = client.transact::<_, EppContactCreateResponse>(&contact_create).await.unwrap();\n\n///\n\n/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n\npub type EppContactCreate = EppObject<Command<ContactCreate>>;\n\n\n\n/// Type for elements under the contact &lt;create&gt; tag\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Contact {\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 42, "score": 35132.14829094766 }, { "content": "\n\n/// Type containing the data for the &lt;delete&gt; tag for contacts\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactDeleteData {\n\n /// XML namespace for the &lt;delete&gt; command for contacts\n\n xmlns: String,\n\n /// The id of the contact to be deleted\n\n id: StringValue,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"delete\")]\n\n/// The &lt;delete&gt; type for the contact delete EPP command\n\npub struct ContactDelete {\n\n #[serde(rename = \"delete\")]\n\n /// The data for the &lt;delete&gt; tag for a contact delete command\n\n contact: ContactDeleteData,\n\n}\n\n\n\nimpl EppContactDelete {\n", "file_path": "epp-client/src/epp/request/contact/delete.rs", "rank": 43, "score": 35131.56187436828 }, { "content": " id: id.to_string_value(),\n\n postal_info: postal_info,\n\n voice: voice,\n\n fax: None,\n\n email: email.to_string_value(),\n\n auth_info: data::AuthInfo::new(auth_password),\n\n },\n\n };\n\n\n\n EppObject::build(Command::<ContactCreate>::new(contact_create, client_tr_id))\n\n }\n\n\n\n /// Sets the &lt;fax&gt; data for the request\n\n pub fn set_fax(&mut self, fax: data::Phone) {\n\n self.data.command.contact.fax = Some(fax);\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 44, "score": 35131.37301050746 }, { "content": "//! Types for EPP contact create response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML contact create response\n\npub type EppContactCreateResponse = EppObject<CommandResponse<ContactCreateResult>>;\n\n\n\n/// Type that represents the &lt;creData&gt; tag for contact create response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCreateData {\n\n /// XML namespace for contact response data\n\n #[serde(rename = \"xmlns:contact\")]\n\n xmlns: String,\n\n /// XML schema location for contact response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n\n /// The contact id\n", "file_path": "epp-client/src/epp/response/contact/create.rs", "rank": 45, "score": 35131.01794908736 }, { "content": "//! Types for EPP contact info response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::data::{AuthInfo, ContactStatus, Phone, PostalInfo};\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML contact info response\n\npub type EppContactInfoResponse = EppObject<CommandResponse<ContactInfoResult>>;\n\n\n\n/// Type that represents the &lt;infData&gt; tag for contact check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactInfoData {\n\n /// XML namespace for contact response data\n\n #[serde(rename = \"xmlns:contact\")]\n\n xmlns: String,\n\n /// XML schema location for contact response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n", "file_path": "epp-client/src/epp/response/contact/info.rs", "rank": 46, "score": 35130.86210967342 }, { "content": " #[serde(rename = \"crID\")]\n\n pub creator_id: StringValue,\n\n /// The creation date\n\n #[serde(rename = \"crDate\")]\n\n pub created_at: StringValue,\n\n /// The epp user who last updated the contact\n\n #[serde(rename = \"upID\")]\n\n pub updater_id: Option<StringValue>,\n\n /// The last update date\n\n #[serde(rename = \"upDate\")]\n\n pub updated_at: Option<StringValue>,\n\n /// The contact transfer date\n\n #[serde(rename = \"trDate\")]\n\n pub transferred_at: Option<StringValue>,\n\n /// The contact auth info\n\n #[serde(rename = \"authInfo\")]\n\n pub auth_info: Option<AuthInfo>,\n\n}\n\n\n\n/// Type that represents the &lt;resData&gt; tag for contact info response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactInfoResult {\n\n /// Data under the &lt;infData&gt; tag\n\n #[serde(rename = \"infData\")]\n\n pub info_data: ContactInfoData,\n\n}\n", "file_path": "epp-client/src/epp/response/contact/info.rs", "rank": 47, "score": 35130.271155627874 }, { "content": "\n\n/// Type that represents the &lt;cd&gt; tag for contact check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCheckDataItem {\n\n /// Data under the &lt;id&gt; tag\n\n #[serde(rename = \"id\")]\n\n pub contact: ContactCheck,\n\n /// The reason for (un)availability\n\n pub reason: Option<StringValue>,\n\n}\n\n\n\n/// Type that represents the &lt;chkData&gt; tag for contact check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCheckData {\n\n /// XML namespace for contact response data\n\n #[serde(rename = \"xmlns:contact\")]\n\n xmlns: String,\n\n /// XML schema location for contact response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n", "file_path": "epp-client/src/epp/response/contact/check.rs", "rank": 48, "score": 35129.82220254819 }, { "content": "\n\n/// Type that represents the &lt;check&gt; command for contact transactions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactList {\n\n /// The XML namespace for the contact &lt;check&gt;\n\n xmlns: String,\n\n /// The list of contact ids to check for availability\n\n #[serde(rename = \"id\")]\n\n pub contact_ids: Vec<StringValue>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"check\")]\n\n/// The &lt;command&gt; type for contact check command\n\npub struct ContactCheck {\n\n /// The &lt;check&gt; tag for the contact check command\n\n #[serde(rename = \"check\")]\n\n list: ContactList,\n\n}\n\n\n", "file_path": "epp-client/src/epp/request/contact/check.rs", "rank": 49, "score": 35129.683506030444 }, { "content": "/// ```\n\npub type EppContactInfo = EppObject<Command<ContactInfo>>;\n\n\n\n/// Type for elements under the contact &lt;info&gt; tag\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactInfoData {\n\n /// XML namespace for contact commands\n\n xmlns: String,\n\n /// The contact id for the info command\n\n id: StringValue,\n\n /// The &lt;authInfo&gt; data\n\n #[serde(rename = \"authInfo\")]\n\n auth_info: AuthInfo,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"info\")]\n\n/// Type for EPP XML &lt;info&gt; command for contacts\n\npub struct ContactInfo {\n\n /// Data for &lt;info&gt; command for contact\n", "file_path": "epp-client/src/epp/request/contact/info.rs", "rank": 50, "score": 35129.45275322254 }, { "content": " pub id: StringValue,\n\n #[serde(rename = \"crDate\")]\n\n /// The contact creation date\n\n pub created_at: StringValue,\n\n}\n\n\n\n/// Type that represents the &lt;resData&gt; tag for contact create response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCreateResult {\n\n /// Data under the &lt;creData&gt; tag\n\n #[serde(rename = \"creData\")]\n\n pub create_data: ContactCreateData,\n\n}\n", "file_path": "epp-client/src/epp/response/contact/create.rs", "rank": 51, "score": 35127.52876956654 }, { "content": "//! Types for EPP contact create request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::data;\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_CONTACT_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; request for contact &lt;create&gt; command\n\n///\n\n/// ## Usage\n\n///\n\n/// ```ignore\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::object::data::{Address, Phone, PostalInfo};\n\n/// use epp_client::epp::{EppContactCreate, EppContactCreateResponse};\n\n/// use epp_client::epp::generate_client_tr_id;\n\n///\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 52, "score": 35126.111812550196 }, { "content": "//! Types for EPP contact delete request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_CONTACT_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type for the &lt;epp&gt; request for contact &lt;delete&gt; command\n\n///\n\n/// ## Usage\n\n///\n\n/// ```ignore\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::{EppContactDelete, EppContactDeleteResponse};\n\n/// use epp_client::epp::generate_client_tr_id;\n\n///\n\n/// #[tokio::main]\n\n/// async fn main() {\n", "file_path": "epp-client/src/epp/request/contact/delete.rs", "rank": 53, "score": 35124.07740755372 }, { "content": "//! Types for EPP contact check request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_CONTACT_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; request for contact &lt;check&gt; command\n\n///\n\n/// ## Usage\n\n///\n\n/// ```rust\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::{EppContactCheck, EppContactCheckResponse};\n\n/// use epp_client::epp::generate_client_tr_id;\n\n///\n\n/// #[tokio::main]\n\n/// async fn main() {\n", "file_path": "epp-client/src/epp/request/contact/check.rs", "rank": 54, "score": 35124.005387134755 }, { "content": "//! Types for EPP contact info request\n\n\n\nuse epp_client_macros::*;\n\n\n\nuse crate::epp::object::data::AuthInfo;\n\nuse crate::epp::object::{ElementName, EppObject, StringValue, StringValueTrait};\n\nuse crate::epp::request::Command;\n\nuse crate::epp::xml::EPP_CONTACT_XMLNS;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type for the &lt;epp&gt; request for contact &lt;info&gt; command\n\n///\n\n/// ## Usage\n\n///\n\n/// ```ignore\n\n/// use epp_client::EppClient;\n\n/// use epp_client::epp::{EppContactInfo, EppContactInfoResponse};\n\n/// use epp_client::epp::generate_client_tr_id;\n\n///\n\n/// #[tokio::main]\n", "file_path": "epp-client/src/epp/request/contact/info.rs", "rank": 55, "score": 35123.86114271783 }, { "content": "impl EppContactCheck {\n\n /// Creates an EppObject corresponding to the &lt;epp&gt; tag with data for a contact check request\n\n pub fn new(contact_ids: Vec<&str>, client_tr_id: &str) -> EppContactCheck {\n\n let contact_ids = contact_ids\n\n .iter()\n\n .filter_map(|d| Some(d.to_string_value()))\n\n .collect::<Vec<StringValue>>();\n\n\n\n let contact_check = ContactCheck {\n\n list: ContactList {\n\n xmlns: EPP_CONTACT_XMLNS.to_string(),\n\n contact_ids: contact_ids,\n\n },\n\n };\n\n\n\n EppObject::build(Command::<ContactCheck>::new(contact_check, client_tr_id))\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/contact/check.rs", "rank": 56, "score": 35122.83648627083 }, { "content": " #[serde(rename = \"info\")]\n\n info: ContactInfoData,\n\n}\n\n\n\nimpl EppContactInfo {\n\n /// Creates a new EppObject for contact info corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(id: &str, auth_password: &str, client_tr_id: &str) -> EppContactInfo {\n\n let contact_info = ContactInfo {\n\n info: ContactInfoData {\n\n xmlns: EPP_CONTACT_XMLNS.to_string(),\n\n id: id.to_string_value(),\n\n auth_info: AuthInfo::new(auth_password),\n\n },\n\n };\n\n\n\n EppObject::build(Command::<ContactInfo>::new(contact_info, client_tr_id))\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/contact/info.rs", "rank": 57, "score": 35122.289262149185 }, { "content": " /// Data under the &lt;cd&gt; tag\n\n #[serde(rename = \"cd\")]\n\n pub contact_list: Vec<ContactCheckDataItem>,\n\n}\n\n\n\n/// Type that represents the &lt;resData&gt; tag for contact check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ContactCheckResult {\n\n /// Data under the &lt;chkData&gt; tag\n\n #[serde(rename = \"chkData\")]\n\n pub check_data: ContactCheckData,\n\n}\n", "file_path": "epp-client/src/epp/response/contact/check.rs", "rank": 58, "score": 35121.13966147113 }, { "content": " /// Creates a new EppObject for contact delete corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(id: &str, client_tr_id: &str) -> EppContactDelete {\n\n let contact_delete = ContactDelete {\n\n contact: ContactDeleteData {\n\n xmlns: EPP_CONTACT_XMLNS.to_string(),\n\n id: id.to_string_value(),\n\n },\n\n };\n\n\n\n EppObject::build(Command::<ContactDelete>::new(contact_delete, client_tr_id))\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request/contact/delete.rs", "rank": 59, "score": 35120.12606805236 }, { "content": "/// #[tokio::main]\n\n/// async fn main() {\n\n/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create the address, postal_info, voice instances\n\n/// let street = vec![\"58\", \"Orchid Road\"];\n\n/// let address = Address::new(street, \"New York\", \"New York\", \"392374\", \"US\");\n\n/// let postal_info = PostalInfo::new(\"int\", \"John Doe\", \"Acme Widgets\", address);\n\n/// let mut voice = Phone::new(\"+1.47237942\");\n\n/// voice.set_extension(\"123\");\n\n/// let mut fax = Phone::new(\"+1.86698799\");\n\n/// fax.set_extension(\"677\");\n\n///\n\n/// // Create an EppContactCreate instance\n\n/// let mut contact_create = EppContactCreate::new(\n", "file_path": "epp-client/src/epp/request/contact/create.rs", "rank": 60, "score": 35116.12407363889 }, { "content": "//! Types for EPP contact delete response\n\n\n\nuse crate::epp::response::EppCommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML contact delete response\n\npub type EppContactDeleteResponse = EppCommandResponse;\n", "file_path": "epp-client/src/epp/response/contact/delete.rs", "rank": 61, "score": 35115.905990869454 }, { "content": "/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create an EppContactDelete instance\n\n/// let contact_delete = EppContactDelete::new(\n\n/// \"eppdev-contact-100\",\n\n/// generate_client_tr_id(&client).as_str()\n\n/// );\n\n///\n\n/// // send it to the registry and receive a response of type EppContactDeleteResponse\n\n/// let response = client.transact::<_, EppContactDeleteResponse>(&contact_delete).await.unwrap();\n\n///\n\n/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n\npub type EppContactDelete = EppObject<Command<ContactDelete>>;\n", "file_path": "epp-client/src/epp/request/contact/delete.rs", "rank": 62, "score": 35113.225383776946 }, { "content": "/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create an EppContactCheck instance\n\n/// let contact_check = EppContactCheck::new(\n\n/// vec![\"epp-client-c1\", \"epp-client-c2\"],\n\n/// generate_client_tr_id(&client).as_str()\n\n/// );\n\n///\n\n/// // send it to the registry and receive a response of type EppContactCheckResponse\n\n/// let response = client.transact::<_, EppContactCheckResponse>(&contact_check).await.unwrap();\n\n///\n\n/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n\npub type EppContactCheck = EppObject<Command<ContactCheck>>;\n", "file_path": "epp-client/src/epp/request/contact/check.rs", "rank": 63, "score": 35112.94881081185 }, { "content": "/// async fn main() {\n\n/// // Create an instance of EppClient, specifying the name of the registry as in\n\n/// // the config file\n\n/// let mut client = match EppClient::new(\"verisign\").await {\n\n/// Ok(client) => client,\n\n/// Err(e) => panic!(\"Failed to create EppClient: {}\", e)\n\n/// };\n\n///\n\n/// // Create an EppContactInfo instance\n\n/// let contact_info = EppContactInfo::new(\n\n/// \"eppdev-contact-100\",\n\n/// \"epP4uthd#v\",\n\n/// generate_client_tr_id(&client).as_str()\n\n/// );\n\n///\n\n/// // send it to the registry and receive a response of type EppContactInfoResponse\n\n/// let response = client.transact::<_, EppContactInfoResponse>(&contact_info).await.unwrap();\n\n///\n\n/// println!(\"{:?}\", response);\n\n/// }\n", "file_path": "epp-client/src/epp/request/contact/info.rs", "rank": 64, "score": 35110.833041637474 }, { "content": "pub struct ContactStatus {\n\n /// The status name, represented by the 's' attr on &lt;status&gt; tags\n\n #[serde(rename = \"s\")]\n\n pub status: String,\n\n}\n\n\n\n/// The data for &lt;voice&gt; and &lt;fax&gt; types on domain transactions\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Phone {\n\n /// The inner text on the &lt;voice&gt; and &lt;fax&gt; tags\n\n #[serde(rename = \"$value\")]\n\n pub number: String,\n\n /// The value of the 'x' attr on &lt;voice&gt; and &lt;fax&gt; tags\n\n #[serde(rename = \"x\")]\n\n pub extension: Option<String>,\n\n}\n\n\n\n/// The &lt;addr&gt; type on contact transactions\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Address {\n", "file_path": "epp-client/src/epp/object/data.rs", "rank": 65, "score": 32.98572136241967 }, { "content": "//! Data types common to EPP Requests and Responses\n\n\n\npub mod data;\n\n\n\nuse epp_client_macros::*;\n\nuse serde::{ser::SerializeStruct, Deserialize, Serialize, Serializer};\n\nuse std::fmt::Display;\n\n\n\nuse crate::epp::xml::{EPP_XMLNS, EPP_XMLNS_XSI, EPP_XSI_SCHEMA_LOCATION};\n\n\n\n/// Wraps String for easier serialization to and from values that are inner text\n\n/// for tags rather than attributes\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\npub struct StringValue(String);\n\n\n\nimpl Default for StringValue {\n\n fn default() -> Self {\n\n Self(String::from(\"\"))\n\n }\n\n}\n\n\n\nimpl Display for StringValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n/// Trait for StringValue type to add easier conversion from str and String\n", "file_path": "epp-client/src/epp/object.rs", "rank": 66, "score": 32.60686139055375 }, { "content": "}\n\n\n\n/// The &lt;contact&gt; type on domain creation and update requests\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainContact {\n\n /// The contact id\n\n #[serde(rename = \"$value\")]\n\n pub id: String,\n\n /// The contact type attr (usually admin, billing, or tech in most registries)\n\n #[serde(rename = \"type\")]\n\n pub contact_type: String,\n\n}\n\n\n\n/// The &lt;period&gt; type for registration, renewal or transfer on domain transactions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Period {\n\n /// The interval (usually 'y' indicating years)\n\n unit: String,\n\n /// The length of the registration, renewal or transfer period (usually in years)\n\n #[serde(rename = \"$value\")]\n", "file_path": "epp-client/src/epp/object/data.rs", "rank": 67, "score": 31.84122144872508 }, { "content": "//! Common data types included in EPP Requests and Responses\n\n\n\nuse crate::epp::object::{StringValue, StringValueTrait};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// The &lt;status&gt; attribute on EPP XML for domain transactions\n\npub type DomainStatus = ContactStatus;\n\n/// The &lt;status&gt; attribute on EPP XML for host transactions\n\npub type HostStatus = ContactStatus;\n\n\n\n/// The &lt;hostAddr&gt; types domain or host transactions\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostAddr {\n\n #[serde(rename = \"ip\")]\n\n pub ip_version: Option<String>,\n\n #[serde(rename = \"$value\")]\n\n pub address: String,\n\n}\n\n\n\nimpl HostAddr {\n", "file_path": "epp-client/src/epp/object/data.rs", "rank": 68, "score": 27.12401081992683 }, { "content": " state.serialize_field(\"xsi:schemaLocation\", &self.xsi_schema_location)?;\n\n state.serialize_field(data_name, &self.data)?;\n\n state.end()\n\n }\n\n}\n\n\n\n/// The <option> type in EPP XML login requests\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n#[serde(rename = \"options\")]\n\npub struct Options {\n\n /// The EPP version being used\n\n pub version: StringValue,\n\n /// The language that will be used during EPP transactions\n\n pub lang: StringValue,\n\n}\n\n\n\nimpl Options {\n\n /// Creates an Options object with version and lang data\n\n pub fn build(version: &str, lang: &str) -> Options {\n\n Options {\n", "file_path": "epp-client/src/epp/object.rs", "rank": 69, "score": 26.953636622268817 }, { "content": "//! Types for EPP requests\n\n\n\npub mod contact;\n\npub mod domain;\n\npub mod host;\n\npub mod message;\n\n\n\nuse serde::{ser::SerializeStruct, ser::Serializer, Deserialize, Serialize};\n\nuse std::error::Error;\n\nuse std::time::SystemTime;\n\n\n\nuse crate::epp::object::{\n\n ElementName, EmptyTag, EppObject, Extension, Options, ServiceExtension, Services, StringValue,\n\n StringValueTrait,\n\n};\n\nuse crate::epp::xml::{EPP_CONTACT_XMLNS, EPP_DOMAIN_XMLNS, EPP_HOST_XMLNS, EPP_LANG, EPP_VERSION};\n\nuse epp_client_macros::*;\n\n\n\n/// Type corresponding to the &lt;command&gt; tag in an EPP XML request\n\n/// without an &lt;extension&gt; tag\n", "file_path": "epp-client/src/epp/request.rs", "rank": 70, "score": 26.821222037948825 }, { "content": "pub struct Login {\n\n /// The username to use for the login\n\n #[serde(rename(serialize = \"clID\", deserialize = \"clID\"))]\n\n username: StringValue,\n\n /// The password to use for the login\n\n #[serde(rename = \"pw\", default)]\n\n password: StringValue,\n\n /// Data under the <options> tag\n\n options: Options,\n\n /// Data under the <svcs> tag\n\n #[serde(rename = \"svcs\")]\n\n services: Services,\n\n}\n\n\n\nimpl EppLogin {\n\n /// Creates a new EPP Login request\n\n pub fn new(\n\n username: &str,\n\n password: &str,\n\n ext_uris: &Option<Vec<String>>,\n", "file_path": "epp-client/src/epp/request.rs", "rank": 71, "score": 26.61429492911945 }, { "content": "}\n\n\n\n/// Type corresponding to the <trID> tag in an EPP response XML\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\npub struct ResponseTRID {\n\n /// The client TRID\n\n #[serde(rename = \"clTRID\")]\n\n pub client_tr_id: Option<StringValue>,\n\n /// The server TRID\n\n #[serde(rename = \"svTRID\")]\n\n pub server_tr_id: StringValue,\n\n}\n\n\n\n/// Type corresponding to the <msgQ> tag in an EPP response XML\n\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\npub struct MessageQueue {\n\n /// The message count\n\n pub count: u32,\n\n /// The message ID\n\n pub id: String,\n", "file_path": "epp-client/src/epp/response.rs", "rank": 72, "score": 26.140564100107287 }, { "content": " #[serde(rename = \"upID\")]\n\n pub updater_id: Option<StringValue>,\n\n /// The host last update date\n\n #[serde(rename = \"upDate\")]\n\n pub updated_at: Option<StringValue>,\n\n /// The host transfer date\n\n #[serde(rename = \"trDate\")]\n\n pub transferred_at: Option<StringValue>,\n\n}\n\n\n\n/// Type that represents the &lt;resData&gt; tag for host info response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostInfoResult {\n\n /// Data under the &lt;infData&gt; tag\n\n #[serde(rename = \"infData\")]\n\n pub info_data: HostInfoData,\n\n}\n", "file_path": "epp-client/src/epp/response/host/info.rs", "rank": 73, "score": 25.98133884094266 }, { "content": " length: u16,\n\n}\n\n\n\nimpl Period {\n\n /// Creates a new period in years\n\n pub fn new(length: u16) -> Period {\n\n Period {\n\n unit: \"y\".to_string(),\n\n length: length,\n\n }\n\n }\n\n\n\n /// Sets the period unit ('y' for years, most commonly)\n\n pub fn set_unit(&mut self, unit: &str) {\n\n self.unit = unit.to_string();\n\n }\n\n}\n\n\n\n/// The &lt;status&gt; type on contact transactions\n\n#[derive(Serialize, Deserialize, Debug)]\n", "file_path": "epp-client/src/epp/object/data.rs", "rank": 74, "score": 25.95097315174529 }, { "content": " pub info_type: String,\n\n /// The &lt;name&gt; tag under &lt;postalInfo&gt;\n\n pub name: StringValue,\n\n /// The &lt;org&gt; tag under &lt;postalInfo&gt;\n\n #[serde(rename = \"org\")]\n\n pub organization: StringValue,\n\n /// The &lt;addr&gt; tag under &lt;postalInfo&gt;\n\n #[serde(rename = \"addr\")]\n\n pub address: Address,\n\n}\n\n\n\n/// The &lt;authInfo&gt; tag for domain and contact transactions\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct AuthInfo {\n\n /// The &lt;pw&gt; tag under &lt;authInfo&gt;\n\n #[serde(rename = \"pw\")]\n\n pub password: StringValue,\n\n}\n\n\n\nimpl Phone {\n", "file_path": "epp-client/src/epp/object/data.rs", "rank": 75, "score": 25.93960399351919 }, { "content": " /// Sets the <svcs> tag data\n\n pub fn services(&mut self, services: Services) {\n\n self.data.command.services = services;\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, ElementName)]\n\n#[element_name(name = \"logout\")]\n\n/// Type corresponding to the &lt;logout&gt; tag in an EPP XML logout request\n\npub struct Logout;\n\n\n\nimpl EppLogout {\n\n /// Creates a new EPP Logout request\n\n pub fn new(client_tr_id: &str) -> EppLogout {\n\n EppObject::build(Command::<Logout> {\n\n command: Logout,\n\n extension: None,\n\n client_tr_id: client_tr_id.to_string_value(),\n\n })\n\n }\n\n}\n", "file_path": "epp-client/src/epp/request.rs", "rank": 76, "score": 25.73138942635516 }, { "content": "//! Types for EPP responses\n\n\n\npub mod contact;\n\npub mod domain;\n\npub mod host;\n\npub mod message;\n\n\n\nuse epp_client_macros::*;\n\nuse serde::{Deserialize, Deserializer, Serialize};\n\nuse std::fmt::Debug;\n\n\n\nuse crate::epp::object::{\n\n ElementName, EmptyTag, EppObject, Extension, Options, ServiceExtension, Services, StringValue,\n\n};\n\n\n\n/// Type corresponding to the &lt;response&gt; tag in an EPP response without an &lt;extension&gt; section\n\npub type CommandResponse<T> = CommandResponseWithExtension<T, EmptyTag>;\n\n\n\n/// The EPP Greeting that is received on a successful connection and in response to an EPP hello\n\npub type EppGreeting = EppObject<Greeting>;\n", "file_path": "epp-client/src/epp/response.rs", "rank": 77, "score": 24.893153063648096 }, { "content": "pub struct HostDeleteData {\n\n /// XML namespace for host commands\n\n xmlns: String,\n\n /// The host to be deleted\n\n name: StringValue,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"delete\")]\n\n/// Type for EPP XML &lt;delete&gt; command for hosts\n\npub struct HostDelete {\n\n /// The instance holding the data for the host to be deleted\n\n #[serde(rename = \"delete\")]\n\n host: HostDeleteData,\n\n}\n\n\n\nimpl EppHostDelete {\n\n /// Creates a new EppObject for host delete corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(name: &str, client_tr_id: &str) -> EppHostDelete {\n\n EppObject::build(Command::<HostDelete>::new(\n", "file_path": "epp-client/src/epp/request/host/delete.rs", "rank": 78, "score": 24.741327018033687 }, { "content": "pub struct HostInfoData {\n\n /// XML namespace for host commands\n\n xmlns: String,\n\n /// The name of the host to be queried\n\n name: StringValue,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"info\")]\n\n/// Type for EPP XML &lt;info&gt; command for hosts\n\npub struct HostInfo {\n\n /// The instance holding the data for the host query\n\n #[serde(rename = \"info\")]\n\n info: HostInfoData,\n\n}\n\n\n\nimpl EppHostInfo {\n\n /// Creates a new EppObject for host info corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(name: &str, client_tr_id: &str) -> EppHostInfo {\n\n EppObject::build(Command::<HostInfo>::new(\n", "file_path": "epp-client/src/epp/request/host/info.rs", "rank": 79, "score": 24.639899128743497 }, { "content": "pub struct DomainDeleteData {\n\n /// XML namespace for domain commands\n\n xmlns: String,\n\n /// The domain to be deleted\n\n name: StringValue,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"delete\")]\n\n/// Type for EPP XML &lt;delete&gt; command for domains\n\npub struct DomainDelete {\n\n /// The data under the &lt;delete&gt; tag for domain deletion\n\n #[serde(rename = \"delete\")]\n\n domain: DomainDeleteData,\n\n}\n\n\n\nimpl EppDomainDelete {\n\n /// Creates a new EppObject for domain delete corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(name: &str, client_tr_id: &str) -> EppDomainDelete {\n\n EppObject::build(Command::<DomainDelete>::new(\n", "file_path": "epp-client/src/epp/request/domain/delete.rs", "rank": 80, "score": 24.539340555654352 }, { "content": " /// The epp user who last updated the domain\n\n #[serde(rename = \"upID\")]\n\n pub updater_id: StringValue,\n\n /// The domain last updated date\n\n #[serde(rename = \"upDate\")]\n\n pub updated_at: StringValue,\n\n /// The domain expiry date\n\n #[serde(rename = \"exDate\")]\n\n pub expiring_at: StringValue,\n\n /// The domain transfer date\n\n #[serde(rename = \"trDate\")]\n\n pub transferred_at: Option<StringValue>,\n\n /// The domain auth info\n\n #[serde(rename = \"authInfo\")]\n\n pub auth_info: Option<AuthInfo>,\n\n}\n\n\n\n/// Type that represents the &lt;resData&gt; tag for domain info response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainInfoResult {\n\n /// Data under the &lt;resData&gt; tag\n\n #[serde(rename = \"infData\")]\n\n pub info_data: DomainInfoData,\n\n}\n", "file_path": "epp-client/src/epp/response/domain/info.rs", "rank": 81, "score": 24.505858874910082 }, { "content": "#[derive(Serialize, Deserialize, Debug, PartialEq)]\n\npub struct Dcp {\n\n /// Data for the <access> tag\n\n pub access: Access,\n\n /// Data for the <statement> tag\n\n pub statement: Statement,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, ElementName)]\n\n#[serde(rename_all = \"lowercase\")]\n\n#[element_name(name = \"greeting\")]\n\n/// Type corresponding to the <greeting> tag in the EPP greeting XML\n\npub struct Greeting {\n\n /// The service ID\n\n #[serde(rename = \"svID\")]\n\n pub service_id: String,\n\n /// The date from the EPP server\n\n #[serde(rename = \"svDate\")]\n\n pub service_date: String,\n\n /// Data under the <svcMenu> element\n", "file_path": "epp-client/src/epp/response.rs", "rank": 82, "score": 24.420439457178045 }, { "content": "//! Types to use in serialization to and deserialization from EPP XML\n\n\n\npub mod quick_xml;\n\n\n\nuse std::{error::Error, fmt::Debug};\n\n\n\nuse crate::error;\n\n\n\npub const EPP_XML_HEADER: &str = r#\"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\"#;\n\npub const EPP_XMLNS: &str = \"urn:ietf:params:xml:ns:epp-1.0\";\n\npub const EPP_XMLNS_XSI: &str = \"http://www.w3.org/2001/XMLSchema-instance\";\n\npub const EPP_XSI_SCHEMA_LOCATION: &str = \"urn:ietf:params:xml:ns:epp-1.0 epp-1.0.xsd\";\n\n\n\npub const EPP_DOMAIN_XMLNS: &str = \"urn:ietf:params:xml:ns:domain-1.0\";\n\npub const EPP_CONTACT_XMLNS: &str = \"urn:ietf:params:xml:ns:contact-1.0\";\n\npub const EPP_HOST_XMLNS: &str = \"urn:ietf:params:xml:ns:host-1.0\";\n\n\n\npub const EPP_CONTACT_SCHEMA_LOCATION: &str = \"urn:ietf:params:xml:ns:contact-1.0 contact-1.0.xsd\";\n\npub const EPP_DOMAIN_SCHEMA_LOCATION: &str = \"urn:ietf:params:xml:ns:domain-1.0 domain-1.0.xsd\";\n\n\n\npub const EPP_DOMAIN_RGP_EXT_XMLNS: &str = \"urn:ietf:params:xml:ns:rgp-1.0\";\n\npub const EPP_DOMAIN_RGP_EXT_SCHEMA_LOCATION: &str = \"urn:ietf:params:xml:ns:rgp-1.0 rgp-1.0.xsd\";\n\n\n\npub const EPP_VERSION: &str = \"1.0\";\n\npub const EPP_LANG: &str = \"en\";\n\n\n\n/// Trait to be implemented by serializers. Currently the only included serializer is `quick-xml`\n", "file_path": "epp-client/src/epp/xml.rs", "rank": 83, "score": 24.304079817762627 }, { "content": "//! Types for EPP host check response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML host check response\n\npub type EppHostCheckResponse = EppObject<CommandResponse<HostCheckResult>>;\n\n\n\n/// Type that represents the &lt;name&gt; tag for host check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostCheck {\n\n /// The host name\n\n #[serde(rename = \"$value\")]\n\n pub name: StringValue,\n\n /// The host (un)availability\n\n #[serde(rename = \"avail\")]\n\n pub available: u16,\n\n}\n", "file_path": "epp-client/src/epp/response/host/check.rs", "rank": 84, "score": 24.05343660774648 }, { "content": "//! Types for EPP domain check response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML domain check response\n\npub type EppDomainCheckResponse = EppObject<CommandResponse<DomainCheckResult>>;\n\n\n\n/// Type that represents the &lt;name&gt; tag for domain check response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainCheck {\n\n /// The domain name\n\n #[serde(rename = \"$value\")]\n\n pub name: StringValue,\n\n /// The domain (un)availability\n\n #[serde(rename = \"avail\")]\n\n pub available: u16,\n\n}\n", "file_path": "epp-client/src/epp/response/domain/check.rs", "rank": 85, "score": 24.053436607746477 }, { "content": "//! Types for EPP domain info response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::data::{AuthInfo, DomainContact, DomainStatus, HostAttr};\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::domain::rgp::request::RgpRequestResult;\n\nuse crate::epp::response::CommandResponseWithExtension;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML domain info response\n\npub type EppDomainInfoResponse =\n\n EppObject<CommandResponseWithExtension<DomainInfoResult, RgpRequestResult>>;\n\n\n\n/// The two types of ns lists, hostObj and hostAttr, that may be returned in the\n\n/// domain info response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainNsList {\n\n /// List of &lt;hostObj&gt; ns elements\n\n #[serde(rename = \"hostObj\")]\n\n pub host_obj: Option<Vec<StringValue>>,\n", "file_path": "epp-client/src/epp/response/domain/info.rs", "rank": 86, "score": 23.970352922276188 }, { "content": "use epp_client_macros::*;\n\n\n\nuse crate::epp::object::{ElementName, EmptyTag, EppObject};\n\nuse crate::epp::response::CommandResponseWithExtension;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML rgp restore request response\n\npub type EppDomainRgpRestoreRequestResponse =\n\n EppObject<CommandResponseWithExtension<EmptyTag, RgpRequestResult>>;\n\n\n\n/// Type that represents the &lt;rgpStatus&gt; tag for domain rgp restore request response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct RgpStatus {\n\n /// The domain RGP status\n\n #[serde(rename = \"s\")]\n\n pub status: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[serde(rename = \"upData\")]\n", "file_path": "epp-client/src/epp/response/domain/rgp/request.rs", "rank": 87, "score": 23.951398220744466 }, { "content": "/// println!(\"{:?}\", response);\n\n/// }\n\n/// ```\n\npub type EppDomainRgpRestoreRequest =\n\n EppObject<CommandWithExtension<DomainUpdate<HostObjList>, RgpRestoreRequest>>;\n\n\n\n/// Type corresponding to the &lt;restore&gt; tag for an rgp restore request\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct RgpRestoreRequestData {\n\n /// The value of the op attribute in the &lt;restore&gt; tag\n\n pub op: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"update\")]\n\n/// Type for EPP XML &lt;check&gt; command for domains\n\npub struct RgpRestoreRequest {\n\n /// XML namespace for the RGP restore extension\n\n xmlns: String,\n\n /// XML schema location for the RGP restore extension\n", "file_path": "epp-client/src/epp/request/domain/rgp/request.rs", "rank": 88, "score": 23.809094352837228 }, { "content": "//! Types for EPP message poll response\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML message poll response\n\npub type EppMessagePollResponse = EppObject<CommandResponse<MessagePollResult>>;\n\n\n\n/// Type that represents the &lt;trnData&gt; tag for message poll response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct MessageDomainTransferData {\n\n /// XML namespace for message response data\n\n #[serde(rename = \"xmlns:obj\")]\n\n xmlns: String,\n\n /// The name of the domain under transfer\n\n pub name: StringValue,\n\n /// The domain transfer status\n\n #[serde(rename = \"trStatus\")]\n\n pub transfer_status: StringValue,\n", "file_path": "epp-client/src/epp/response/message/poll.rs", "rank": 89, "score": 23.713972726121295 }, { "content": " report: RgpRestoreReportData,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, ElementName)]\n\n#[element_name(name = \"update\")]\n\n/// Type for EPP XML &lt;check&gt; command for domains\n\npub struct RgpRestoreReport {\n\n /// XML namespace for the RGP restore extension\n\n xmlns: String,\n\n /// XML schema location for the RGP restore extension\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n\n /// The object holding the list of domains to be checked\n\n restore: RgpRestoreReportSection,\n\n}\n\n\n\nimpl EppDomainRgpRestoreReport {\n\n /// Creates a new EppObject for domain rgp restore report corresponding to the &lt;epp&gt; tag in EPP XML\n\n pub fn new(\n\n name: &str,\n", "file_path": "epp-client/src/epp/request/domain/rgp/report.rs", "rank": 90, "score": 23.706370816343263 }, { "content": "//! XML serialization using the `quick-xml` library\n\n\n\nuse quick_xml::de::from_str;\n\nuse quick_xml::se;\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse std::{error::Error, fmt::Debug};\n\n\n\nuse crate::epp::object::{ElementName, EppObject};\n\nuse crate::epp::xml::{EppXml, EPP_XML_HEADER};\n\nuse crate::error;\n\n\n\nimpl<T: Serialize + DeserializeOwned + ElementName + Debug> EppXml for EppObject<T> {\n\n type Output = EppObject<T>;\n\n\n\n /// Serializes the EppObject instance to an EPP XML document\n\n fn serialize(&self) -> Result<String, Box<dyn Error>> {\n\n let epp_xml = format!(\"{}\\r\\n{}\", EPP_XML_HEADER, se::to_string(self)?);\n\n\n\n Ok(epp_xml)\n\n }\n", "file_path": "epp-client/src/epp/xml/quick_xml.rs", "rank": 91, "score": 23.62264465274803 }, { "content": "//! Types for EPP domain renew response\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML domain renew response\n\npub type EppDomainRenewResponse = EppObject<CommandResponse<DomainRenewResult>>;\n\n\n\n/// Type that represents the &lt;renData&gt; tag for domain renew response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainRenewData {\n\n /// XML namespace for domain response data\n\n #[serde(rename = \"xmlns:domain\")]\n\n xmlns: String,\n\n /// XML schema location for domain response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n\n /// The name of the domain\n\n pub name: StringValue,\n", "file_path": "epp-client/src/epp/response/domain/renew.rs", "rank": 92, "score": 23.42732861197212 }, { "content": "//! Types for EPP domain create response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML domain create response\n\npub type EppDomainCreateResponse = EppObject<CommandResponse<DomainCreateResult>>;\n\n\n\n/// Type that represents the &lt;chkData&gt; tag for domain create response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct DomainCreateData {\n\n /// XML namespace for domain response data\n\n #[serde(rename = \"xmlns:domain\")]\n\n xmlns: String,\n\n /// XML schema location for domain response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n\n /// The domain name\n", "file_path": "epp-client/src/epp/response/domain/create.rs", "rank": 93, "score": 22.96869374330729 }, { "content": "//! Types for EPP host create response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML host create response\n\npub type EppHostCreateResponse = EppObject<CommandResponse<HostCreateResult>>;\n\n\n\n/// Type that represents the &lt;creData&gt; tag for host create response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostCreateData {\n\n /// XML namespace for host response data\n\n #[serde(rename = \"xmlns:host\")]\n\n xmlns: String,\n\n /// XML schema location for host response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n\n /// The host name\n", "file_path": "epp-client/src/epp/response/host/create.rs", "rank": 94, "score": 22.96869374330729 }, { "content": "//! Types for EPP host info response\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::epp::object::data::{HostAddr, HostStatus};\n\nuse crate::epp::object::{EppObject, StringValue};\n\nuse crate::epp::response::CommandResponse;\n\n\n\n/// Type that represents the &lt;epp&gt; tag for the EPP XML host info response\n\npub type EppHostInfoResponse = EppObject<CommandResponse<HostInfoResult>>;\n\n\n\n/// Type that represents the &lt;infData&gt; tag for host info response\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct HostInfoData {\n\n /// XML namespace for host response data\n\n #[serde(rename = \"xmlns:host\")]\n\n xmlns: String,\n\n /// XML schema location for host response data\n\n #[serde(rename = \"xsi:schemaLocation\")]\n\n schema_location: String,\n", "file_path": "epp-client/src/epp/response/host/info.rs", "rank": 95, "score": 22.874119526378532 }, { "content": "\n\n/// Instances of the EppClient type are used to transact with the registry.\n\n/// Once initialized, the EppClient instance can serialize EPP requests to XML and send them\n\n/// to the registry and deserialize the XML responses from the registry to local types\n\npub struct EppClient {\n\n credentials: (String, String),\n\n ext_uris: Option<Vec<String>>,\n\n connection: EppConnection,\n\n // pub client_tr_id_fn: Arc<dyn Fn(&EppClient) -> String + Send + Sync>,\n\n}\n\n\n\n/// A function to generate a simple client TRID. Should only be used for testing, library users\n\n/// should generate a client TRID according to their own requirements\n", "file_path": "epp-client/src/connection/client.rs", "rank": 96, "score": 22.861491374144848 }, { "content": " assert_eq!(\n\n result.info_data.postal_info.address.country_code,\n\n \"FR\".to_string_value()\n\n );\n\n assert_eq!(result.info_data.voice.number, \"+33.47237942\".to_string());\n\n assert_eq!(*voice_ext, \"123\".to_string());\n\n assert_eq!(fax.number, \"+33.86698799\".to_string());\n\n assert_eq!(*fax_ext, \"243\".to_string());\n\n assert_eq!(\n\n result.info_data.email,\n\n \"[email protected]\".to_string_value()\n\n );\n\n assert_eq!(result.info_data.client_id, \"eppdev\".to_string_value());\n\n assert_eq!(result.info_data.creator_id, \"SYSTEM\".to_string_value());\n\n assert_eq!(\n\n result.info_data.created_at,\n\n \"2021-07-23T13:09:09.0Z\".to_string_value()\n\n );\n\n assert_eq!(\n\n *(result.info_data.updater_id.as_ref().unwrap()),\n", "file_path": "epp-client/src/tests/de.rs", "rank": 97, "score": 22.770859379334436 }, { "content": " version: version.to_string_value(),\n\n lang: lang.to_string_value(),\n\n }\n\n }\n\n}\n\n\n\n/// Type representing the &lt;extension&gt; tag for an EPP document\n\n#[derive(Deserialize, Debug, PartialEq)]\n\n#[serde(rename = \"extension\")]\n\npub struct Extension<E: ElementName> {\n\n /// Data under the &lt;extension&gt; tag\n\n #[serde(alias = \"upData\")]\n\n pub data: E,\n\n}\n\n\n\nimpl<E: ElementName + Serialize> Serialize for Extension<E> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n", "file_path": "epp-client/src/epp/object.rs", "rank": 98, "score": 22.404972657528628 }, { "content": " \"SYSTEM\".to_string_value()\n\n );\n\n assert_eq!(\n\n *(result.info_data.updated_at.as_ref().unwrap()),\n\n \"2021-07-23T13:09:09.0Z\".to_string_value()\n\n );\n\n assert_eq!((*auth_info).password, \"eppdev-387323\".to_string_value());\n\n assert_eq!(\n\n object.data.tr_ids.client_tr_id.unwrap(),\n\n CLTRID.to_string_value()\n\n );\n\n assert_eq!(object.data.tr_ids.server_tr_id, SVTRID.to_string_value());\n\n }\n\n\n\n #[test]\n\n fn contact_update() {\n\n let xml = get_xml(\"response/contact/update.xml\").unwrap();\n\n let object = EppContactUpdateResponse::deserialize(xml.as_str()).unwrap();\n\n\n\n assert_eq!(object.data.result.code, 1000);\n", "file_path": "epp-client/src/tests/de.rs", "rank": 99, "score": 21.820467555238384 } ]
Rust
hartex_core/src/error.rs
mod-tc/HarTex-rust-discord-bot
1cbd3e8933ff71853eff0114fd2675f3e8b7b210
use std::string::FromUtf8Error; use base64::DecodeError; use ctrlc::Error as CtrlcError; use toml::de::Error as TomlDeserializationError; use crate::discord::{ embed_builder::{ image_source::ImageSourceUrlError, EmbedError }, gateway::{ cluster::{ ClusterCommandError, ClusterStartError, }, shard::SessionInactiveError }, http::{ error::Error as HttpError, request::{ application::InteractionError, channel::message::{ create_message::CreateMessageError, update_message::UpdateMessageError }, guild::member::update_guild_member::UpdateGuildMemberError }, response::DeserializeBodyError }, model::gateway::payload::update_presence::UpdatePresenceError }; #[derive(Debug)] pub enum HarTexError { Base64DecodeError { error: DecodeError }, ClusterCommandError { error: ClusterCommandError }, ClusterStartError { error: ClusterStartError }, CreateMessageError { error: CreateMessageError }, CtrlcError { error: CtrlcError }, DeserializeBodyError { error: DeserializeBodyError }, EmbedError { error: EmbedError }, EmbedImageSourceUrlError { error: ImageSourceUrlError }, InteractionError { error: InteractionError }, SessionInactiveError { error: SessionInactiveError }, TomlDeserializationError { error: TomlDeserializationError }, TwilightHttpError { error: HttpError }, UpdateGuildMemberError { error: UpdateGuildMemberError }, UpdateMessageError { error: UpdateMessageError }, UpdatePresenceError { error: UpdatePresenceError }, Utf8ValidationError { error: FromUtf8Error }, Custom { message: String } } impl From<ClusterCommandError> for HarTexError { fn from(error: ClusterCommandError) -> Self { Self::ClusterCommandError { error } } } impl From<ClusterStartError> for HarTexError { fn from(error: ClusterStartError) -> Self { Self::ClusterStartError { error } } } impl From<CreateMessageError> for HarTexError { fn from(error: CreateMessageError) -> Self { Self::CreateMessageError { error } } } impl From<CtrlcError> for HarTexError { fn from(error: CtrlcError) -> Self { Self::CtrlcError { error } } } impl From<DecodeError> for HarTexError { fn from(error: DecodeError) -> Self { Self::Base64DecodeError { error } } } impl From<DeserializeBodyError> for HarTexError { fn from(error: DeserializeBodyError) -> Self { Self::DeserializeBodyError { error } } } impl From<EmbedError> for HarTexError { fn from(error: EmbedError) -> Self { Self::EmbedError { error } } } impl From<FromUtf8Error> for HarTexError { fn from(error: FromUtf8Error) -> Self { Self::Utf8ValidationError { error } } } impl From<HttpError> for HarTexError { fn from(error: HttpError) -> Self { Self::TwilightHttpError { error } } } impl From<ImageSourceUrlError> for HarTexError { fn from(error: ImageSourceUrlError) -> Self { Self::EmbedImageSourceUrlError { error } } } impl From<InteractionError> for HarTexError { fn from(error: InteractionError) -> Self { Self::InteractionError { error } } } impl From<SessionInactiveError> for HarTexError { fn from(error: SessionInactiveError) -> Self { Self::SessionInactiveError { error } } } impl From<TomlDeserializationError> for HarTexError { fn from(error: TomlDeserializationError) -> Self { Self::TomlDeserializationError { error } } } impl From<UpdateGuildMemberError> for HarTexError { fn from(error: UpdateGuildMemberError) -> Self { Self::UpdateGuildMemberError { error } } } impl From<UpdateMessageError> for HarTexError { fn from(error: UpdateMessageError) -> Self { Self::UpdateMessageError { error } } } impl From<UpdatePresenceError> for HarTexError { fn from(error: UpdatePresenceError) -> Self { Self::UpdatePresenceError { error } } } pub type HarTexResult<T> = Result<T, HarTexError>;
use std::string::FromUtf8Error; use base64::DecodeError; use ctrlc::Error as CtrlcError; use toml::de::Error as TomlDeserializationError; use crate::discord::{ embed_builder::{ image_source::ImageSourceUrlError, EmbedError }, gateway::{ cluster::{ ClusterCommandError, ClusterStartError, }, shard::SessionInactiveError }, http::{ error::Error as HttpError, request::{ application::InteractionError, channel::message::{ create_message::CreateMessageError, update_message::UpdateMessageError }, guild::member::update_guild_member::UpdateGuildMemberError }, response::DeserializeBodyError }, model::gateway::payload::update_presence::UpdatePresenceError }; #[derive(Debug)] pub enum HarTexError { Base64DecodeError { error:
} } impl From<TomlDeserializationError> for HarTexError { fn from(error: TomlDeserializationError) -> Self { Self::TomlDeserializationError { error } } } impl From<UpdateGuildMemberError> for HarTexError { fn from(error: UpdateGuildMemberError) -> Self { Self::UpdateGuildMemberError { error } } } impl From<UpdateMessageError> for HarTexError { fn from(error: UpdateMessageError) -> Self { Self::UpdateMessageError { error } } } impl From<UpdatePresenceError> for HarTexError { fn from(error: UpdatePresenceError) -> Self { Self::UpdatePresenceError { error } } } pub type HarTexResult<T> = Result<T, HarTexError>;
DecodeError }, ClusterCommandError { error: ClusterCommandError }, ClusterStartError { error: ClusterStartError }, CreateMessageError { error: CreateMessageError }, CtrlcError { error: CtrlcError }, DeserializeBodyError { error: DeserializeBodyError }, EmbedError { error: EmbedError }, EmbedImageSourceUrlError { error: ImageSourceUrlError }, InteractionError { error: InteractionError }, SessionInactiveError { error: SessionInactiveError }, TomlDeserializationError { error: TomlDeserializationError }, TwilightHttpError { error: HttpError }, UpdateGuildMemberError { error: UpdateGuildMemberError }, UpdateMessageError { error: UpdateMessageError }, UpdatePresenceError { error: UpdatePresenceError }, Utf8ValidationError { error: FromUtf8Error }, Custom { message: String } } impl From<ClusterCommandError> for HarTexError { fn from(error: ClusterCommandError) -> Self { Self::ClusterCommandError { error } } } impl From<ClusterStartError> for HarTexError { fn from(error: ClusterStartError) -> Self { Self::ClusterStartError { error } } } impl From<CreateMessageError> for HarTexError { fn from(error: CreateMessageError) -> Self { Self::CreateMessageError { error } } } impl From<CtrlcError> for HarTexError { fn from(error: CtrlcError) -> Self { Self::CtrlcError { error } } } impl From<DecodeError> for HarTexError { fn from(error: DecodeError) -> Self { Self::Base64DecodeError { error } } } impl From<DeserializeBodyError> for HarTexError { fn from(error: DeserializeBodyError) -> Self { Self::DeserializeBodyError { error } } } impl From<EmbedError> for HarTexError { fn from(error: EmbedError) -> Self { Self::EmbedError { error } } } impl From<FromUtf8Error> for HarTexError { fn from(error: FromUtf8Error) -> Self { Self::Utf8ValidationError { error } } } impl From<HttpError> for HarTexError { fn from(error: HttpError) -> Self { Self::TwilightHttpError { error } } } impl From<ImageSourceUrlError> for HarTexError { fn from(error: ImageSourceUrlError) -> Self { Self::EmbedImageSourceUrlError { error } } } impl From<InteractionError> for HarTexError { fn from(error: InteractionError) -> Self { Self::InteractionError { error } } } impl From<SessionInactiveError> for HarTexError { fn from(error: SessionInactiveError) -> Self { Self::SessionInactiveError { error }
random
[ { "content": "/// # Trait `Command`\n\n///\n\n/// An application command.\n\n///\n\n/// ## Trait Methods\n\n/// - `name`; return type `String`: the name of the command\n\n/// - `description`; return type `String`: the description of the command\n\n/// - `execute`; parameters `CommandContext`, `InMemoryCache`; return type `FutureRetType<()>`: the execution procedure\n\n/// - `required_cmdopts`; return type `Vec<CommandOption>`: a vector of required command options\n\n/// - `optional_cmdopts`; return type `Vec<CommandOption>`: a vector of optional command options\n\n/// - `enabled_by_default`; return type `bool`: whether the slash command is enabled by default when added to a guild\n\npub trait Command {\n\n fn name(&self) -> String;\n\n\n\n fn description(&self) -> String;\n\n\n\n fn command_type(&self) -> CommandType;\n\n\n\n fn execute<'asynchronous_trait>(&self, ctx: CommandContext, cache: InMemoryCache) -> FutureRetType<'asynchronous_trait, ()>;\n\n\n\n fn required_cmdopts(&self) -> Vec<CommandOption> {\n\n vec![]\n\n }\n\n\n\n fn optional_cmdopts(&self) -> Vec<CommandOption> {\n\n vec![]\n\n }\n\n\n\n fn enabled_by_default(&self) -> bool {\n\n true\n\n }\n", "file_path": "hartex_cmdsys/src/command.rs", "rank": 0, "score": 39265.31903020991 }, { "content": "/// # Trait `Check`\n\n///\n\n/// A pre-command check.\n\n///\n\n/// ## Trait Types\n\n/// - `CheckRetType`: the return type that the check returns\n\npub trait Check {\n\n type CheckRetType;\n\n\n\n fn execute<'asynchronous_trait>(ctx: CommandContext, params: CheckParams) -> FutureRetType<'asynchronous_trait, Self::CheckRetType>;\n\n}\n\n\n\n/// # Struct `CheckParams`\n\n///\n\n/// The parameters to pass to a check.\n\npub struct CheckParams {\n\n // the user id of the message author\n\n user_id: Option<UserId>\n\n}\n\n\n\nimpl CheckParams {\n\n /// # Static Method `CheckParams::builder`\n\n ///\n\n /// Constructs a new `CheckParamsBuilder`.\n\n pub fn builder() -> CheckParamsBuilder {\n\n CheckParamsBuilder::new()\n", "file_path": "hartex_cmdsys/src/checks/mod.rs", "rank": 1, "score": 38222.70982668469 }, { "content": "pub fn main() -> HarTexResult<()> {\n\n let tokio_runtime = Builder::new_multi_thread()\n\n .enable_io()\n\n .enable_time()\n\n .thread_name(\"hartex\")\n\n .build()\n\n .unwrap();\n\n\n\n tokio_runtime.block_on(async move {\n\n hartex_driver::hartex_main().await\n\n })?;\n\n tokio_runtime.shutdown_timeout(Duration::from_millis(100));\n\n\n\n Ok(())\n\n}\n", "file_path": "hartex/src/main.rs", "rank": 2, "score": 36221.12271118487 }, { "content": "pub fn default_feature_enabled() -> bool {\n\n false\n\n}\n", "file_path": "hartex_conftoml/src/nightly.rs", "rank": 3, "score": 35319.8641544967 }, { "content": "/// # Function `ansi_display`\n\n///\n\n/// Converts the provided parameters to a string that is `Display`able.\n\n///\n\n/// ## Parameters\n\n/// - `params`, type `Vec<i32>`: the parameters to convert\n\npub fn ansi_display(params: Vec<i32>) -> impl Display {\n\n let strings = params.iter().map(|i| i.to_string()).collect::<Vec<_>>();\n\n\n\n format!(\"{ANSI_ESC_CHAR}[{string}m\", string = strings.join(\";\"))\n\n}\n", "file_path": "hartex_core/src/ansi.rs", "rank": 4, "score": 30218.99600089667 }, { "content": "pub fn from_string(input: String) -> HarTexResult<TomlConfig> {\n\n Ok(match toml::from_str(input.as_str()) {\n\n Ok(config) => config,\n\n Err(error) => {\n\n Logger::error(\n\n &format!(\"failed to deserialize config: {}\", error),\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n return Err(HarTexError::from(error))\n\n }\n\n })\n\n}\n", "file_path": "hartex_conftoml/src/lib.rs", "rank": 23, "score": 30061.050582633205 }, { "content": "fn deserialize_timezone<'deserialize, Deserializer>(deserializer: Deserializer) -> Result<tz::Timezone, Deserializer::Error>\n\n where\n\n Deserializer: de::Deserializer<'deserialize> {\n\n deserializer.deserialize_str(tz::TimezoneDeserializeStringVisitor)\n\n}", "file_path": "hartex_conftoml/src/guildconf/mod.rs", "rank": 24, "score": 27641.557659174567 }, { "content": "fn default_dm_cant_use_cmd() -> bool {\n\n true\n\n}\n\n\n", "file_path": "hartex_conftoml/src/guildconf/mod.rs", "rank": 25, "score": 23703.974158342124 }, { "content": "use std::sync::Arc;\n\n\n\nuse hartex_core::{\n\n discord::{\n\n cache_inmemory::InMemoryCache,\n\n gateway::Cluster,\n\n http::Client,\n\n model::application::interaction::Interaction\n\n },\n\n error::HarTexResult\n\n};\n\n\n\nuse hartex_cmdsys::{\n\n command::Command,\n\n context::{\n\n CommandContext,\n\n CommandContextInner\n\n }\n\n};\n\n\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 26, "score": 13.530059323703489 }, { "content": "//! # The `context` Module\n\n//!\n\n//! This module provides a command context used in commands.\n\n\n\nuse std::{\n\n ops::Deref,\n\n sync::Arc\n\n};\n\n\n\nuse hartex_core::discord::{\n\n gateway::Cluster,\n\n http::Client,\n\n model::application::interaction::Interaction\n\n};\n\n\n\n/// # Struct `CommandContext`\n\n///\n\n/// The command context used for command invocation.\n\n#[derive(Clone)]\n\npub struct CommandContext {\n", "file_path": "hartex_cmdsys/src/context.rs", "rank": 27, "score": 13.426555638877504 }, { "content": "//! # The `events` Module\n\n//!\n\n//! The `events` module provides utility functions for handling events as they come into the event\n\n//! loop.\n\n\n\nuse hartex_core::{\n\n discord::{\n\n gateway::{\n\n Cluster,\n\n Event\n\n },\n\n http::Client,\n\n cache_inmemory::InMemoryCache\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n },\n\n events::EventType\n\n};\n", "file_path": "hartex_driver/src/events.rs", "rank": 28, "score": 13.250340568789797 }, { "content": "/// `EventType::Custom`\n\n/// - `http`, type `Client`: the Twilight HTTP Client to use for some specific events that need it\n\n/// - `cluster`, type `Cluster`: the gateway cluster to use for some specific events that need it\n\n#[allow(clippy::needless_lifetimes)]\n\npub async fn handle_event(\n\n (event_type, twilight, custom): (EventType, Option<Event>, Option<HarTexEvent>),\n\n http: Client,\n\n emitter: EventEmitter,\n\n cache: InMemoryCache,\n\n cluster: Cluster\n\n) -> HarTexResult<()> {\n\n match event_type {\n\n EventType::Twilight if twilight.is_some() => {\n\n match twilight.unwrap() {\n\n Event::GuildCreate(payload) => {\n\n EventHandler::guild_create(payload, http).await?\n\n }\n\n Event::InteractionCreate(payload) => {\n\n EventHandler::interaction_create(payload, http, cluster, cache).await?\n\n }\n", "file_path": "hartex_driver/src/events.rs", "rank": 29, "score": 12.903372748309708 }, { "content": "/// An implementation of Discord’s sharding gateway sessions for the `twilight-rs` ecoststem.\n\n/// This is responsible for receiving stateful events in real-time from Discord and sending some\n\n/// stateful information.\n\npub use twilight_gateway as gateway;\n\n\n\n/// Re-export `twilight_http`\n\n///\n\n/// HTTP (HyperText Transfer Protocol) support for the `twilight-rs` ecosystem.\n\npub use twilight_http as http;\n\n\n\n/// Re-export `twilight_mention`\n\n///\n\n/// A utility crate for the `twilight-rs` ecosystem to mention its model types and parse such\n\n/// mentions.\n\npub use twilight_mention as mention;\n\n\n\n/// Re-export `twilight_model`\n\n///\n\n/// A crate of `serde` models defining the Discord APIs with a few convenience methods implemented\n\n/// on top of them for the `twilight-rs` ecosystem.\n", "file_path": "hartex_core/src/discord.rs", "rank": 30, "score": 12.667411281216616 }, { "content": "/// - `cluster`: the gateway cluster\n\npub async fn handle_interaction(\n\n interaction: Interaction,\n\n cache: InMemoryCache,\n\n http: Client,\n\n cluster: Cluster\n\n) -> HarTexResult<()> {\n\n match {\n\n match interaction.clone() {\n\n Interaction::ApplicationCommand(command) => {\n\n match &*command.data.name {\n\n // Global Plugin\n\n \"about\" => {\n\n About.execute(\n\n CommandContext {\n\n inner: Arc::new(CommandContextInner {\n\n http,\n\n cluster,\n\n interaction\n\n })\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 31, "score": 11.704392725123267 }, { "content": " discord::{\n\n cache_inmemory::{\n\n InMemoryCache,\n\n ResourceType\n\n },\n\n gateway::{\n\n cluster::{\n\n Cluster,\n\n ShardScheme\n\n },\n\n EventTypeFlags,\n\n Intents\n\n },\n\n http::Client,\n\n model::id::ApplicationId\n\n },\n\n error::HarTexResult,\n\n events::EventType\n\n};\n\n\n", "file_path": "hartex_driver/src/lib.rs", "rank": 32, "score": 11.441566070264859 }, { "content": "//! # The `tz` Module\n\n//!\n\n//! This module contains the `Timezone` configuration model.\n\n\n\nuse std::fmt::Formatter;\n\n\n\nuse serde::de::{\n\n Error,\n\n Visitor\n\n};\n\n\n\n/// # Enum `Timezone`\n\n///\n\n/// Represents a timezone.\n\npub enum Timezone {\n\n /// # Enum Variant `Timezone::AsiaHongKong`\n\n ///\n\n /// The \"Asia/Hong_Kong\" timezone.\n\n AsiaHongKong,\n\n\n", "file_path": "hartex_conftoml/src/guildconf/tz.rs", "rank": 33, "score": 11.416867331367254 }, { "content": " /// - `cache`, type `InMemoryCache`: the cache to pass to the command if the message is indeed a command\n\n /// - `http`, type `Client`: the Twilight HTTP client to pass to the command if the message is indeed a command\n\n pub async fn message_create(\n\n _: Box<MessageCreate>,\n\n _: EventEmitter,\n\n _: InMemoryCache,\n\n _: Client,\n\n _: Cluster\n\n ) -> HarTexResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// # Static Asynchronous Method `EventHandler::ready`\n\n ///\n\n /// Handles the `Ready` event.\n\n ///\n\n /// ## Parameters\n\n /// - `payload`, type `Box<Ready>`: the `Ready` event payload\n\n /// - `cluster`, type `Cluster`: the gateway cluster\n\n /// - `http`, type `Client`: the http client\n", "file_path": "hartex_driver/src/handler.rs", "rank": 34, "score": 11.260077913175166 }, { "content": "//! # The `handler` Module\n\n//!\n\n//! This module defines the `EventHandler` struct, which defines various function handlers for\n\n//! individual events.\n\n\n\nuse tokio::time;\n\n\n\nuse hartex_core::{\n\n discord::{\n\n cache_inmemory::InMemoryCache,\n\n gateway::Cluster,\n\n http::Client,\n\n model::gateway::{\n\n event::shard::Identifying,\n\n payload::{\n\n update_presence::UpdatePresence,\n\n GuildCreate,\n\n InteractionCreate,\n\n MessageCreate,\n\n Ready,\n", "file_path": "hartex_driver/src/handler.rs", "rank": 35, "score": 10.897028014210953 }, { "content": " pub async fn ready(payload: Box<Ready>, cluster: Cluster, http: Client) -> HarTexResult<()> {\n\n let user = payload.user;\n\n\n\n Logger::info(\n\n format!(\n\n \"{}#{} [id: {}] has successfully startup; using discord api v{}\",\n\n user.name,\n\n user.discriminator,\n\n user.id,\n\n payload.version\n\n ),\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n for shard in cluster.shards() {\n\n let shard_id = shard.info()?.id();\n\n\n", "file_path": "hartex_driver/src/handler.rs", "rank": 36, "score": 10.198592699472425 }, { "content": " /// - `payload`, type `Box<InteractionCreate>`: the `InteractionCreate` event payload\n\n /// - `http`, type `Client`: the Twilight HTTP client to pass to the command if the message is indeed a command\n\n pub async fn interaction_create(\n\n payload: Box<InteractionCreate>,\n\n http: Client,\n\n cluster: Cluster,\n\n cache: InMemoryCache\n\n ) -> HarTexResult<()> {\n\n crate::interactions::handle_interaction(payload.0, cache, http, cluster).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// # Static Asynchronous Method `EventHandler::message_create`\n\n ///\n\n /// Handles the `MessageCreate` event.\n\n ///\n\n /// ## Parameters\n\n /// - `payload`, type `Box<MessageCreate>`: the `MessageCreate` event payload\n\n /// - `emitter`, type `EventEmitter`: the event emitter to use when the message contains an actual command to execute\n", "file_path": "hartex_driver/src/handler.rs", "rank": 37, "score": 9.877845412638543 }, { "content": "\n\n /// # Enum Variant `LogLevel::Warn`\n\n ///\n\n /// Represents the \"warning\" level. Generally used for things that have gone wrong, but *not*\n\n /// so severe.\n\n Warn,\n\n\n\n /// # Enum Variant `LogLevel::Warn`\n\n ///\n\n /// Represents the \"error\" level. Specifically used when something goes really wrong, or when\n\n /// a fatal error occurs that the bot could not continue its current work and needs to be\n\n /// reported to the current user in the form of a Discord message.\n\n Error,\n\n\n\n /// # Enum Variant `LogLevel::Verbose`\n\n ///\n\n /// Represents the \"verbose\" level. This level is the most verbose, printing a *lot* of\n\n /// information. Can also be used for very useful debugging as if a bug occurs and its cause\n\n /// is difficult to track down.\n\n Verbose\n", "file_path": "hartex_logging/src/level.rs", "rank": 38, "score": 9.709072463011562 }, { "content": "\n\n/// # Asynchronous Function `register_global_commands`\n\n///\n\n/// Registers a global slash command if it has not been previously added.\n\n///\n\n/// ## Parameters\n\n/// `commands`, type `Vec<Box<dyn SlashCommand + Send + Sync>>`: the commands to register.\n\n/// `http`, type `Client`: the Twilight HTTP client to use for registration.\n\npub async fn register_global_commands(commands: Vec<Box<dyn Command + Send + Sync>>, http: Client) -> HarTexResult<()> {\n\n let mut i = 1;\n\n let len = commands.len();\n\n\n\n let existing = match http.get_global_commands()?\n\n .exec()\n\n .await?\n\n .models()\n\n .await {\n\n Ok(commands) => commands,\n\n Err(error) => {\n\n Logger::error(\n", "file_path": "hartex_driver/src/commands.rs", "rank": 39, "score": 9.70812486733165 }, { "content": "//! # `hartex_conftoml` - The HarTex Configuration TOML Library.\n\n//!\n\n//! The `hartex_conftoml` provides an interface for serializing and deserializing TOML\n\n//! configuration for HarTex Discord bot.\n\n\n\n#![allow(non_snake_case)]\n\n\n\nuse serde::Deserialize;\n\n\n\nuse hartex_core::error::{\n\n HarTexError,\n\n HarTexResult\n\n};\n\n\n\nuse hartex_logging::Logger;\n\n\n\npub mod dashacc;\n\npub mod guildconf;\n\npub mod nightly;\n\n\n", "file_path": "hartex_conftoml/src/lib.rs", "rank": 40, "score": 9.621875479958232 }, { "content": "//! # The `commands` Module\n\n//!\n\n//! This module defines the command handler, which is used when a command is detected in a message.\n\n\n\nuse tokio::time;\n\n\n\nuse hartex_cmdsys::command::{\n\n Command,\n\n CommandType\n\n};\n\n\n\nuse hartex_core::{\n\n discord::http::Client,\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_logging::Logger;\n", "file_path": "hartex_driver/src/commands.rs", "rank": 41, "score": 9.582491006094745 }, { "content": " pub inner: Arc<CommandContextInner>\n\n}\n\n\n\n/// # Struct `CommandContextInner`\n\n///\n\n/// The inner structure for `CommandContext`.\n\n#[derive(Clone)]\n\npub struct CommandContextInner {\n\n pub http: Client,\n\n pub cluster: Cluster,\n\n pub interaction: Interaction\n\n}\n\n\n\nimpl Deref for CommandContext {\n\n type Target = CommandContextInner;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.inner\n\n }\n\n}\n", "file_path": "hartex_cmdsys/src/context.rs", "rank": 42, "score": 9.504859268008477 }, { "content": "//! # The `level` Module\n\n//!\n\n//! This module contains definitions for various log levels used for the logger.\n\n\n\nuse std::fmt::Display;\n\n\n\n/// # Enum `LogLevel`\n\n///\n\n/// An enumeration represents various log levels used within the logger for the HarTex Discord bot.\n\npub enum LogLevel {\n\n /// # Enum Variant `LogLevel::Info`\n\n ///\n\n /// Represents the \"general information\" level. Generally used for displaying information that\n\n /// is not really related to debugging as such.\n\n Info,\n\n\n\n /// # Enum Variant `LogLevel::Debug`\n\n ///\n\n /// Represents the \"debugging\" level. Usually (almost all the time) used for debugging.\n\n Debug,\n", "file_path": "hartex_logging/src/level.rs", "rank": 43, "score": 8.875009173630795 }, { "content": " interaction::Interaction\n\n }\n\n\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_utils::{\n\n FutureRetType,\n\n shard_id\n\n};\n\n\n\n/// # Struct `Ping`\n\n///\n\n/// The `ping` command.\n\npub struct Ping;\n\n\n", "file_path": "hartex_plugins/src/global/ping.rs", "rank": 44, "score": 8.863607837824276 }, { "content": "use hartex_conftoml::TomlConfig;\n\n\n\nuse hartex_core::{\n\n discord::model::id::GuildId,\n\n error::{\n\n HarTexError,\n\n HarTexResult,\n\n }\n\n};\n\n\n\nuse hartex_logging::Logger;\n\n\n\nuse crate::PendingFuture;\n\n\n\n/// # Struct `GetGuildConfig`\n\n///\n\n/// Gets the guild configuration from the database.\n\npub struct GetGuildConfig {\n\n pending: Option<PendingFuture<TomlConfig>>,\n\n\n", "file_path": "hartex_dbmani/src/guildconf.rs", "rank": 45, "score": 8.739532907898676 }, { "content": "//! # The `ansi` Module\n\n//!\n\n//! This module contains utilities for using ANSI Escape Sequences for use when printing to stdout.\n\n\n\nuse std::fmt::Display;\n\n\n\n/// # Constant `ANSI_ESC_CHAR`\n\n///\n\n/// The ANSI escape character used when creating an ANSI escape sequence.\n\npub const ANSI_ESC_CHAR: char = '\\x1B';\n\n\n\n/// # Enum `AnsiColour`\n\n///\n\n/// An enumerate representing the various colours or custom colours that ANSI supports.\n\npub enum AnsiColour {\n\n /// # Enum Variant `AnsiColour::Black`\n\n ///\n\n /// Black, converted to `30` when the `AnsiColour::into_i32s` instance method is invoked on this\n\n /// variant with the parameter `foreground` set to `true`, `40` otherwise.\n\n Black,\n", "file_path": "hartex_core/src/ansi.rs", "rank": 46, "score": 8.730273013717756 }, { "content": "use hartex_core::error::{\n\n HarTexError,\n\n HarTexResult\n\n};\n\n\n\nuse hartex_logging::Logger;\n\n\n\nuse crate::{\n\n whitelist::model::WhitelistedGuild,\n\n PendingFuture\n\n};\n\n\n\nmod model;\n\n\n\n/// # Struct `GetWhitelistedGuilds`\n\n///\n\n/// Gets the whitelisted guilds of the bot.\n\npub struct GetWhitelistedGuilds {\n\n pending: Option<PendingFuture<Vec<WhitelistedGuild>>>\n\n}\n", "file_path": "hartex_dbmani/src/whitelist/mod.rs", "rank": 47, "score": 8.638258733148582 }, { "content": " callback::{\n\n CallbackData,\n\n InteractionResponse\n\n },\n\n interaction::Interaction\n\n }\n\n\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_utils::FutureRetType;\n\n\n\n/// # Struct `Team`\n\n///\n\n/// The `team` command.\n\npub struct Team;\n", "file_path": "hartex_plugins/src/global/team.rs", "rank": 48, "score": 8.605125411565073 }, { "content": " interaction::Interaction\n\n }\n\n\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_utils::FutureRetType;\n\n\n\n/// # Struct `Source`\n\n///\n\n/// The `source` command.\n\npub struct Source;\n\n\n\nimpl Command for Source {\n\n fn name(&self) -> String {\n\n String::from(\"source\")\n", "file_path": "hartex_plugins/src/global/source.rs", "rank": 49, "score": 8.441333902185388 }, { "content": "//! # `hartex_dbmani` - The HarTex PostgreSQL Database Manipulation Library\n\n//!\n\n//! The `hartex_dbmani` provides an interface for manipulating the PostgreSQL databases used by\n\n//! HarTex Discord bot.\n\n\n\n#![feature(format_args_capture)]\n\n\n\n#![allow(non_snake_case)]\n\n\n\nuse std::{\n\n future::Future,\n\n pin::Pin\n\n};\n\n\n\nuse hartex_core::error::HarTexResult;\n\n\n\npub mod guildconf;\n\npub mod whitelist;\n\n\n\n/// # Typealias `PendingFuture`\n\n///\n\n/// Represents a pending future that is yet to return.\n\n///\n\n/// ## Generic Parameters\n\n/// - `T`: represents the type that the pending future is to return.\n", "file_path": "hartex_dbmani/src/lib.rs", "rank": 50, "score": 7.722241205102008 }, { "content": "//! # `hartex_utils` - The HarTex Utilities Library\n\n//!\n\n//! The `hartex_utils` library provides several utilities for the HarTex Discord bot.\n\n\n\n#![feature(format_args_capture)]\n\n\n\nuse std::{\n\n future::Future,\n\n pin::Pin\n\n};\n\n\n\nuse hartex_core::error::HarTexResult;\n\n\n\npub mod cdn;\n\npub mod stopwatch;\n\n\n\n/// # Constant Function `shard_id`\n\n///\n\n/// Computes the shard id for a specific guild by the guild id and the number of shards.\n\n///\n", "file_path": "hartex_utils/src/lib.rs", "rank": 51, "score": 7.704755854975068 }, { "content": " let intents = Intents::all();\n\n\n\n let http = Client::builder()\n\n .application_id(ApplicationId::from(application_id.parse::<u64>().unwrap()))\n\n .token(token.clone())\n\n .build();\n\n\n\n let (cluster, events) = Cluster::builder(token, intents)\n\n .event_types(EventTypeFlags::all())\n\n .http_client(http.clone())\n\n .shard_scheme(shard_scheme)\n\n .build()\n\n .await?;\n\n\n\n let cluster_spawn = cluster.clone();\n\n\n\n tokio::spawn(async move {\n\n cluster_spawn.up().await;\n\n });\n\n\n", "file_path": "hartex_driver/src/lib.rs", "rank": 52, "score": 7.5378112943604565 }, { "content": " + \" - guild member count of at least 250;\"\n\n + \" - be always abide by the Discord Terms of Service (<https://discord.com/terms>) and Community Guidelines (<https://discord.com/guidelines);\"\n\n + \" - how old is the guild and/or how active is it; and\"\n\n + \" - your experience level with TOML to configure the bot before using it.\\n\\n\"\n\n + \"You may join our Support Guild at <discord.gg/s8qjxZK> for more information, including the application link in which you may use\"\n\n + \"to apply for a whitelist application. Good luck!\";\n\n\n\n http.create_message(dm_channel.id).content(&message)?.exec().await?;\n\n\n\n Logger::error(\n\n \"leaving guild\",\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n http.leave_guild(guild_id).exec().await?;\n\n\n\n return Err(HarTexError::Custom {\n", "file_path": "hartex_driver/src/handler.rs", "rank": 53, "score": 7.511673466848356 }, { "content": " Event::MessageCreate(payload) => {\n\n EventHandler::message_create(payload, emitter, cache, http, cluster).await?\n\n }\n\n Event::Ready(payload) => {\n\n EventHandler::ready(payload, cluster, http).await?\n\n }\n\n Event::ShardIdentifying(payload) => {\n\n EventHandler::shard_identifying(payload).await?\n\n }\n\n _ => ()\n\n }\n\n },\n\n EventType::Custom if custom.is_some() => {\n\n match custom.unwrap() {\n\n HarTexEvent::CommandExecuted(payload) => {\n\n EventHandler::command_executed(payload).await?\n\n }\n\n }\n\n }\n\n _ => return Err(HarTexError::Custom {\n\n message: String::from(\"event type mismatch\")\n\n })\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "hartex_driver/src/events.rs", "rank": 54, "score": 7.485662419041721 }, { "content": " http,\n\n cluster,\n\n interaction\n\n })\n\n },\n\n cache\n\n ).await\n\n },\n\n \"team\" => {\n\n Team.execute(\n\n CommandContext {\n\n inner: Arc::new(CommandContextInner {\n\n http,\n\n cluster,\n\n interaction\n\n })\n\n },\n\n cache\n\n ).await\n\n }\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 55, "score": 7.437458421329803 }, { "content": "//! # The `discord` Module\n\n//!\n\n//! This module contains re-exports of *most* of the `twilight` ecosystem of crates to reduce the\n\n//! need to add the `twilight_*` dependencies to the `Cargo.toml`s of the individual separated\n\n//! HarTex crates.\n\n\n\n/// Re-export `twilight_cache_inmemory`\n\n///\n\n/// An in-process memory cache for the `twilight-rs` ecosystem. It’s responsible for processing\n\n/// events and caching things like guilds, channels, users, and voice states.\n\npub use twilight_cache_inmemory as cache_inmemory;\n\n\n\n/// Re-export `twilight_embed_builder`\n\n///\n\n/// A set of builders for the `twilight-rs` ecosystem for creating message embeds and are useful\n\n/// when creating or updating messages.\n\npub use twilight_embed_builder as embed_builder;\n\n\n\n/// Re-export `twilight_gateway`\n\n///\n", "file_path": "hartex_core/src/discord.rs", "rank": 56, "score": 7.3502276384183896 }, { "content": "//! # `hartex_core` - The HarTex Core Library\n\n//!\n\n//! The `hartex_core` library contains the core functionality for the HarTex Discord bot.\n\n//!\n\n//! ## Features\n\n//!\n\n//! - `twilight-bundled`: bundles most of the `twilight` ecosystem of crates with the library,\n\n//! removes the need to include the dependencies repeatedly across the\n\n//! HarTex crates.\n\n\n\n#![feature(format_args_capture)]\n\n\n\npub use ctrlc;\n\n\n\npub mod ansi;\n\n#[cfg(feature = \"twilight-bundled\")]\n\npub mod discord;\n\npub mod error;\n\npub mod events;\n\npub mod time;\n\n\n\n/// # Static `HARTEX_BUILD`\n\n///\n\n/// Represents the current version and build of HarTex Discord bot.\n\npub static HARTEX_BUILD: &'static str = \"Version 1.21.0, 22H1 (Build 262)\";\n", "file_path": "hartex_core/src/lib.rs", "rank": 57, "score": 7.0273819017100685 }, { "content": "//! # The `isglobadmin` Module\n\n//!\n\n//! This module implements a check for whether the message author is the global administrator himself.\n\n\n\nuse std::env;\n\n\n\nuse hartex_core::{\n\n discord::model::id::UserId,\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_logging::Logger;\n\n\n\nuse hartex_utils::FutureRetType;\n\n\n\nuse crate::{\n\n checks::{\n", "file_path": "hartex_cmdsys/src/checks/isglobadmin.rs", "rank": 58, "score": 6.989808150954915 }, { "content": "pub struct EventHandler;\n\n\n\n// Twilight Events\n\nimpl EventHandler {\n\n /// # Static Asynchronous Method `EventHandler::guild_create`\n\n ///\n\n /// Handles the `GuildCreate` event.\n\n ///\n\n /// ## Parameters\n\n /// - `payload`, type `Box<GuildCreate>`: the `GuildCreate` event payload\n\n /// - `http`, type `Client`: the Twilight HTTP Client to use for sending a message to the guild\n\n /// owner about his/her guild's whitelist status if the guild is not\n\n /// in the whitelist or that the whitelist has been removed, or that\n\n /// the guild has been previously been whitelisted but the whitelist\n\n /// is deactivated with a reason.\n\n pub async fn guild_create(payload: Box<GuildCreate>, http: Client) -> HarTexResult<()> {\n\n let guild_id = payload.id;\n\n\n\n Logger::verbose(\n\n format!(\"joined a new guild with name `{name}` with id {guild_id}; checking whether the guild is whitelisted\", name = payload.name),\n", "file_path": "hartex_driver/src/handler.rs", "rank": 59, "score": 6.873645574814365 }, { "content": " (EventType::Twilight, Some(twilight), None),\n\n http.clone(),\n\n emitter.clone(),\n\n cache.clone(),\n\n cluster.clone()\n\n ));\n\n }\n\n Either::Right(custom) => {\n\n tokio::spawn(events::handle_event(\n\n (EventType::Custom, None, Some(custom)),\n\n http.clone(),\n\n emitter.clone(),\n\n cache.clone(),\n\n cluster.clone()\n\n ));\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "hartex_driver/src/lib.rs", "rank": 60, "score": 6.72416334069764 }, { "content": "//! # The `guildconf` Module\n\n//!\n\n//! This module contains configuration models specifically for guild-specific configuration.\n\n\n\nuse serde::{\n\n de,\n\n Deserialize\n\n};\n\n\n\nmod tz;\n\n\n\n/// # Struct `GuildConfiguration`\n\n///\n\n/// Represents guild-specific configuration.\n\n#[derive(Deserialize)]\n\npub struct GuildConfiguration {\n\n #[serde(default = \"default_nickname\")]\n\n pub nickname: String,\n\n #[serde(default = \"default_timezone\", deserialize_with = \"deserialize_timezone\")]\n\n pub timezone: tz::Timezone,\n\n #[serde(default = \"default_dm_cant_use_cmd\")]\n\n pub dmCannotUseCommand: bool\n\n}\n\n\n", "file_path": "hartex_conftoml/src/guildconf/mod.rs", "rank": 61, "score": 6.715828431438484 }, { "content": "//! # The `payload` Module\n\n//!\n\n//! This module contains various event payloads used for the custom event system.\n\n\n\nuse hartex_core::discord::model::id::GuildId;\n\n\n\n/// # Struct `CommandExecuted`\n\n///\n\n/// The payload for which when the command is executed.\n\n#[derive(Clone)]\n\npub struct CommandExecuted {\n\n pub command: String,\n\n pub guild_id: GuildId\n\n}\n", "file_path": "hartex_model/src/payload.rs", "rank": 62, "score": 6.670706443900659 }, { "content": "//! # The `listener` Module\n\n//!\n\n//! This module implements listeners for sending events.\n\n\n\nuse std::sync::Arc;\n\n\n\nuse dashmap::DashMap;\n\n\n\nuse futures_channel::mpsc::{\n\n self,\n\n UnboundedReceiver,\n\n UnboundedSender\n\n};\n\n\n\n/// # Struct `Listener`\n\n///\n\n/// Represents an event listener.\n\n#[derive(Debug, Clone)]\n\npub struct Listener<T> {\n\n pub sender: UnboundedSender<T>\n", "file_path": "hartex_eventsys/src/listener.rs", "rank": 63, "score": 6.63244849202031 }, { "content": " },\n\n presence::{\n\n Activity,\n\n ActivityType,\n\n Status\n\n }\n\n }\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_dbmani::{\n\n guildconf::GetGuildConfig,\n\n whitelist::GetWhitelistedGuilds\n\n};\n\n\n\nuse hartex_eventsys::emitter::EventEmitter;\n", "file_path": "hartex_driver/src/handler.rs", "rank": 64, "score": 6.623965094999882 }, { "content": "pub use twilight_model as model;\n\n\n\n/// Re-export `twilight_standby`\n\n///\n\n/// Standby is a utility crate for the `twilight-rs` ecossytem to wait for an event to happen based\n\n/// on a predicate check. For example, you may have a command that has a reaction menu of ✅ and ❌.\n\n/// If you want to handle a reaction to these, using something like an application-level state or\n\n/// event stream may not suit your use case. It may be cleaner to wait for a reaction inline to\n\n/// your function.\n\npub use twilight_standby as standby;\n\n\n\n/// Re-export `twilight_util`\n\n///\n\n/// A set of utility types and functions for the `twilight-rs` ecosystem to augment or enhance\n\n/// default functionality.\n\npub use twilight_util as util;\n", "file_path": "hartex_core/src/discord.rs", "rank": 65, "score": 6.610876418192218 }, { "content": " },\n\n model::application::{\n\n callback::{\n\n CallbackData,\n\n InteractionResponse\n\n },\n\n interaction::Interaction\n\n }\n\n\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n },\n\n HARTEX_BUILD\n\n};\n\n\n\nuse hartex_dbmani::whitelist::GetWhitelistedGuilds;\n\n\n\nuse hartex_utils::FutureRetType;\n", "file_path": "hartex_plugins/src/global/about.rs", "rank": 66, "score": 6.530654460517063 }, { "content": "use hartex_eventsys::emitter::EventEmitter;\n\n\n\nuse hartex_logging::Logger;\n\n\n\npub mod commands;\n\npub mod events;\n\npub mod handler;\n\npub mod interactions;\n\n\n\n/// # Asynchronous Function `hartex_main`\n\n///\n\n/// This is the main entry point of HarTex Discord Bot.\n\npub async fn hartex_main() -> HarTexResult<()> {\n\n // loads the .env file to obtain environment variables\n\n dotenv::dotenv().ok();\n\n\n\n Logger::verbose(\n\n \"loaded environment variables\",\n\n Some(module_path!()),\n\n file!(),\n", "file_path": "hartex_driver/src/lib.rs", "rank": 67, "score": 6.488044553920449 }, { "content": "//! # The `time` Module\n\n//! \n\n//! This module provides time functionality for HarTex.\n\n\n\npub use chrono::*;\n", "file_path": "hartex_core/src/time.rs", "rank": 68, "score": 6.369159771372894 }, { "content": " }\n\n}\n\n\n\nimpl Stream for Events {\n\n type Item = HarTexEvent;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, context: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n self.receiver.poll_next_unpin(context)\n\n }\n\n}\n\n\n\n/// # Enum `HarTexEvent`\n\n///\n\n/// An enumeration represents the various custom-defined events that is used within HarTex.\n\n#[derive(Clone)]\n\npub enum HarTexEvent {\n\n /// # Enum Variant `HarTexEvent::CommandExecuted`\n\n ///\n\n /// A command is executed.\n\n ///\n\n /// ## Tuple Struct Parameters\n\n /// - `0`, type `Box<CommandExecuted<'a>>`: the payload of the event.\n\n CommandExecuted(Box<CommandExecuted>)\n\n}\n", "file_path": "hartex_eventsys/src/events.rs", "rank": 69, "score": 6.364881659101627 }, { "content": "use hartex_logging::Logger;\n\n\n\nuse hartex_plugins::{\n\n global::{\n\n about::About,\n\n ping::Ping,\n\n source::Source,\n\n team::Team,\n\n },\n\n information::userinfo::Userinfo\n\n};\n\n\n\n/// # Asynchronous Function `handle_interaction`\n\n///\n\n/// Handles the incoming interaction asynchronously.\n\n///\n\n/// ## Parameters\n\n/// - `interaction`, type `Interaction`: the interaction\n\n/// - `cache`, type `InMemoryCache`: the in-memory cache\n\n/// - `http`, type `Client`: the Twilight HTTP client\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 70, "score": 6.32377785107469 }, { "content": " HarTexError::Custom {\n\n message: String::from(\"invalid interaction type: expected ApplicationCommand\")\n\n }\n\n )\n\n };\n\n\n\n if interaction.guild_id.is_none() || interaction.user.is_some() {\n\n ctx.http\n\n .interaction_callback(\n\n interaction.id,\n\n &interaction.token,\n\n &InteractionResponse::ChannelMessageWithSource(\n\n CallbackData {\n\n allowed_mentions: None,\n\n components: None,\n\n content: Some(String::from(\":x: This command can only be used in a guild.\")),\n\n embeds: vec![],\n\n flags: None,\n\n tts: None\n\n }\n", "file_path": "hartex_plugins/src/information/userinfo.rs", "rank": 71, "score": 6.319120265402735 }, { "content": " /// # Enum Variant `Timezone::UTC`\n\n ///\n\n /// The \"UTC\" timezone.\n\n UTC\n\n}\n\n\n\n/// # Struct `TimezoneDeserializeStringVisitor`\n\n///\n\n/// A `String` visitor for deserializing a `Timezone`.\n\npub struct TimezoneDeserializeStringVisitor;\n\n\n\nimpl<'visitor> Visitor<'visitor> for TimezoneDeserializeStringVisitor {\n\n type Value = Timezone;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n write!(formatter, \"a string representing a timezone\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error {\n\n Ok(match v {\n\n \"Asia/Hong_Kong\" => Timezone::AsiaHongKong,\n\n \"UTC\" => Timezone::UTC,\n\n _ => return Err(Error::custom(\"invalid timezone\"))\n\n })\n\n }\n\n}", "file_path": "hartex_conftoml/src/guildconf/tz.rs", "rank": 72, "score": 6.178576240806345 }, { "content": "//! The `emitter` Module\n\n//!\n\n//! This module contains an emitter used for emitting the custom events.\n\n\n\nuse crate::{\n\n events,\n\n listener\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct EventEmitter {\n\n listeners: listener::Listeners<events::HarTexEvent>\n\n}\n\n\n\nimpl EventEmitter {\n\n pub fn new(listeners: listener::Listeners<events::HarTexEvent>) -> Self {\n\n Self {\n\n listeners\n\n }\n\n }\n", "file_path": "hartex_eventsys/src/emitter.rs", "rank": 73, "score": 6.177002596867189 }, { "content": "//! # The `events` Module\n\n//! \n\n//! The `events` module contains definitions for events emitted from the Discord API as well as\n\n//! our custom events.\n\n\n\n/// # Enum `EventType`\n\n/// \n\n/// Represents the type of an event.\n\npub enum EventType {\n\n /// # Enum Variant `EventType::Twilight`\n\n /// \n\n /// A twilight event.\n\n Twilight,\n\n \n\n /// # Enum Variant `EventType::Custom`\n\n /// \n\n /// A custom event.\n\n Custom\n\n}\n", "file_path": "hartex_core/src/events.rs", "rank": 74, "score": 6.164913251761579 }, { "content": "}\n\n\n\nimpl LogLevel {\n\n pub fn display(self) -> impl Display {\n\n match self {\n\n Self::Info => \"INFO \",\n\n Self::Debug => \"DEBUG \",\n\n Self::Warn => \"WARN \",\n\n Self::Error => \"ERROR \",\n\n Self::Verbose => \"VERBOSE\"\n\n }\n\n }\n\n}\n", "file_path": "hartex_logging/src/level.rs", "rank": 75, "score": 6.159640644169656 }, { "content": "//! # The `dashacc` Module\n\n//!\n\n//! This module contains configuration models specifically for dashboard access configuration.\n\n\n\nuse serde::Deserialize;\n\n\n\n/// # Struct `DashboardAccess`\n\n///\n\n/// Represents the dashboard access of a user.\n\n#[derive(Deserialize)]\n\npub struct DashboardAccess {\n\n pub userId: u64,\n\n pub accessLevel: u8\n\n}\n", "file_path": "hartex_conftoml/src/dashacc.rs", "rank": 76, "score": 6.077340841067997 }, { "content": " id::UserId\n\n }\n\n },\n\n error::{\n\n HarTexError,\n\n HarTexResult\n\n }\n\n};\n\n\n\nuse hartex_utils::{\n\n cdn::{\n\n Cdn,\n\n CdnResourceFormat\n\n },\n\n FutureRetType\n\n};\n\n\n\n/// # Struct `Userinfo`\n\n///\n\n/// The `userinfo` command.\n", "file_path": "hartex_plugins/src/information/userinfo.rs", "rank": 77, "score": 6.051731934214356 }, { "content": "//! # `hartex` - The HarTex Binary Crate\n\n//!\n\n//! This crate contains the main function which calls the `hartex_main` \"main function\" in the\n\n//! `hartex_driver` crate which glues everything together.\n\n\n\nuse std::time::Duration;\n\n\n\nuse tokio::runtime::Builder;\n\n\n\nuse hartex_core::error::HarTexResult;\n\n\n", "file_path": "hartex/src/main.rs", "rank": 78, "score": 5.9522822587032325 }, { "content": "/// Executes the `ping` command.\n\n///\n\n/// ## Parameters\n\n/// - `ctx`, type `CommandContext`: the command context to use.\n\nasync fn execute_ping_command(ctx: CommandContext) -> HarTexResult<()> {\n\n let interaction = match ctx.interaction.clone() {\n\n Interaction::ApplicationCommand(command) => command,\n\n _ => return Err(\n\n HarTexError::Custom {\n\n message: String::from(\"invalid interaction type: expected ApplicationCommand\")\n\n }\n\n )\n\n };\n\n\n\n let content = String::from(\"Hello! Did you need anything? :eyes:\");\n\n\n\n ctx.http\n\n .interaction_callback(\n\n interaction.id,\n\n &interaction.token,\n", "file_path": "hartex_plugins/src/global/ping.rs", "rank": 79, "score": 5.946549224049754 }, { "content": "\n\n // Information Plugin\n\n \"userinfo\" => {\n\n Userinfo.execute(\n\n CommandContext {\n\n inner: Arc::new(CommandContextInner {\n\n http,\n\n cluster,\n\n interaction\n\n })\n\n },\n\n cache\n\n ).await\n\n }\n\n _ => Ok(())\n\n }\n\n }\n\n _ => Ok(())\n\n }\n\n } {\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 80, "score": 5.932667105500564 }, { "content": "//! # The `cdn` Module\n\n//!\n\n//! This module implements a wrapper of the Discord CDN.\n\n\n\nuse std::fmt::{\n\n self,\n\n Display,\n\n Formatter\n\n};\n\n\n\nuse hartex_core::discord::model::id::UserId;\n\n\n\n/// # Struct `Cdn`\n\n///\n\n/// The \"namespace\" for various CDN endpoints\n\npub struct Cdn;\n\n\n\nimpl Cdn {\n\n pub fn default_user_avatar(discriminator: u16) -> String {\n\n format!(\"https://cdn.discordapp.com/embed/avatars/{discriminator}.png\", discriminator = discriminator % 5)\n", "file_path": "hartex_utils/src/cdn.rs", "rank": 81, "score": 5.8641967203445855 }, { "content": " .unwrap();\n\n let new_content = format!(\"{content} - `{latency}ms`\", latency = latency.as_millis());\n\n\n\n match ctx.http\n\n .update_interaction_original(&interaction.token)?\n\n .content(Some(&new_content)) {\n\n Ok(update) => update,\n\n Err(error) => {\n\n return Err(HarTexError::Custom {\n\n message: format!(\"failed to update original response: {error}\")\n\n });\n\n }\n\n }\n\n .exec()\n\n .await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "hartex_plugins/src/global/ping.rs", "rank": 82, "score": 5.8100716011192866 }, { "content": "/// - `ctx`, type `CommandContext`: the command context to use.\n\nasync fn execute_source_command(ctx: CommandContext) -> HarTexResult<()> {\n\n let interaction = match ctx.interaction.clone() {\n\n Interaction::ApplicationCommand(command) => command,\n\n _ => return Err(\n\n HarTexError::Custom {\n\n message: String::from(\"invalid interaction type: expected ApplicationCommand\")\n\n }\n\n )\n\n };\n\n\n\n ctx.http\n\n .interaction_callback(\n\n interaction.id,\n\n &interaction.token,\n\n &InteractionResponse::ChannelMessageWithSource(\n\n CallbackData {\n\n allowed_mentions: None,\n\n components: None,\n\n content: Some(\n", "file_path": "hartex_plugins/src/global/source.rs", "rank": 83, "score": 5.799993364758075 }, { "content": " column!()\n\n );\n\n\n\n let shard_scheme = ShardScheme::Auto;\n\n\n\n Logger::verbose(\n\n \"building bot cluster\",\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n Logger::verbose(\n\n \"registering gateway intents [all]\",\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n", "file_path": "hartex_driver/src/lib.rs", "rank": 84, "score": 5.74379454388527 }, { "content": "//! # The `checks` Module\n\n//!\n\n//! This module contains implementations of several pre-command checks.\n\n\n\nuse hartex_core::discord::model::id::UserId;\n\n\n\nuse hartex_utils::FutureRetType;\n\n\n\nuse crate::context::CommandContext;\n\n\n\npub mod isglobadmin;\n\n\n\n/// # Trait `Check`\n\n///\n\n/// A pre-command check.\n\n///\n\n/// ## Trait Types\n\n/// - `CheckRetType`: the return type that the check returns\n", "file_path": "hartex_cmdsys/src/checks/mod.rs", "rank": 85, "score": 5.7039609108696645 }, { "content": "//! # The `model` Module\n\n//!\n\n//! This module contains some models for use in the `GetWhitelistedGuilds` future.\n\n\n\nuse sqlx::{\n\n postgres::PgRow,\n\n Result as SqlxResult,\n\n Row\n\n};\n\n\n\npub struct WhitelistedGuild {\n\n pub GuildName: String,\n\n pub GuildId: u64\n\n}\n\n\n\nimpl<'r> sqlx::FromRow<'r, PgRow> for WhitelistedGuild {\n\n fn from_row(row: &'r PgRow) -> SqlxResult<Self> {\n\n let name = row.try_get::<String, &str>(\"GuildName\")?;\n\n let id = row.try_get::<i64, &str>(\"GuildId\")?;\n\n\n\n Ok(Self {\n\n GuildName: name,\n\n GuildId: id as u64\n\n })\n\n }\n\n}\n", "file_path": "hartex_dbmani/src/whitelist/model.rs", "rank": 86, "score": 5.701814036273074 }, { "content": "//! # The `stopwatch` Module\n\n//!\n\n//! This module implements a stopwatch for timing things.\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse hartex_core::time::{\n\n DateTime,\n\n Local\n\n};\n\n\n\n/// # Struct `Stopwatch`\n\n///\n\n/// A stopwatch for timing things.\n\npub struct Stopwatch {\n\n start: DateTime<Local>\n\n}\n\n\n\nimpl Stopwatch {\n\n /// # Constructor `Stopwatch::new`\n", "file_path": "hartex_utils/src/stopwatch.rs", "rank": 87, "score": 5.6373168858274205 }, { "content": " Self::White => vec![ if foreground { 37 } else { 47 } ],\n\n Self::CustomU8 { n } => vec![ if foreground { 38 } else { 48 }, 5, n.into() ],\n\n Self::CustomRgb { r, g, b} => vec![ if foreground { 38 } else { 48 }, 5, r.into(), g.into(), b.into() ]\n\n }\n\n }\n\n}\n\n\n\n/// # Enum `SgrParam`\n\n///\n\n/// An enumerate representing the SGR Parameters, also known as the Select Graphics Rendition\n\n/// Parameters; which sets display attributes.\n\npub enum SgrParam {\n\n /// # Enum Variant `SgrParam::Reset`\n\n ///\n\n /// Reset or normal; converted to `0` when the `SgrParam::into_i32s`s` instance method is invoked\n\n /// on this variant.\n\n Reset,\n\n\n\n /// # Enum Variant `SgrParam::BoldOrIncreasedIntensity`\n\n ///\n", "file_path": "hartex_core/src/ansi.rs", "rank": 88, "score": 5.415004043204792 }, { "content": "//! # `hartex_model` - Models for HarTex Discord bot\n\n//!\n\n//! The `hartex_model` crate contains various models for use in HarTex Discord bot for its various\n\n//! functionalities.\n\n\n\npub mod payload;\n", "file_path": "hartex_model/src/lib.rs", "rank": 89, "score": 5.403301705291547 }, { "content": " },\n\n cache\n\n ).await\n\n }\n\n \"ping\" => {\n\n Ping.execute(\n\n CommandContext {\n\n inner: Arc::new(CommandContextInner {\n\n http,\n\n cluster,\n\n interaction\n\n })\n\n },\n\n cache\n\n ).await\n\n }\n\n \"source\" => {\n\n Source.execute(\n\n CommandContext {\n\n inner: Arc::new(CommandContextInner {\n", "file_path": "hartex_driver/src/interactions.rs", "rank": 90, "score": 5.350563371899403 }, { "content": "//! # `hartex_logging` - The HarTex Logging Library\n\n//!\n\n//! The `hartex_logging` library contains an implementation of a logger for the HarTex Discord bot.\n\n\n\n#![feature(format_args_capture)]\n\n\n\nuse hartex_core::{\n\n ansi::{\n\n ansi_display,\n\n AnsiColour,\n\n SgrParam\n\n },\n\n time::Local\n\n};\n\n\n\npub mod level;\n\n\n\n/// # Struct `Logger`\n\n///\n\n/// The main logger that HarTex Discord bot uses.\n", "file_path": "hartex_logging/src/lib.rs", "rank": 91, "score": 5.159617897178435 }, { "content": " /// Handles the `Identifying` event.\n\n ///\n\n /// ## Parameters\n\n ///\n\n /// - `payload`, type `Identifying`: the `Identifying` event payload\n\n pub async fn shard_identifying(payload: Identifying) -> HarTexResult<()> {\n\n Logger::verbose(\n\n format!(\n\n \"shard {} is identifying with the discord gateway\",\n\n payload.shard_id\n\n ),\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "hartex_driver/src/handler.rs", "rank": 92, "score": 5.026463273813528 }, { "content": "//! # The `nightly` Module\n\n//!\n\n//! This module contains configuration for opt-in nightly unstable features that are in the testing\n\n//! phase.\n\n//!\n\n//! This API is very unstable and may change rapidly as features are added into the bot.\n\n\n\nuse serde::Deserialize;\n\n\n\n/// # Struct `NightlyFeatures`\n\n///\n\n/// The opt-in nightly features that the bot provides.\n\n#[derive(Default, Deserialize)]\n\npub struct NightlyFeatures {\n\n // Experimental Support for the Discord Threads API\n\n #[serde(default = \"default_feature_enabled\")]\n\n pub threads: bool,\n\n // Experimental Support for localization Facilities, i.e. timezones, languages\n\n #[serde(default = \"default_feature_enabled\")]\n\n pub localization: bool\n\n}\n\n\n", "file_path": "hartex_conftoml/src/nightly.rs", "rank": 93, "score": 4.996463973486492 }, { "content": " Underline,\n\n\n\n /// # Enum Variant `SgrParam::SetColour`\n\n ///\n\n /// Sets the colour, either foreground or background; converted to `5` and the following\n\n /// parameters when the `SgrParam::into_i32s` instance method is invoked on this variant.\n\n ///\n\n /// ## Fields\n\n /// - `colour`, type `AnsiColour`: the colour to set.\n\n /// - `foreground`, type `bool`: whether the colour set is used for foreground; `false` when\n\n /// the colour is used for background.\n\n SetColour {\n\n colour: AnsiColour,\n\n foreground: bool\n\n }\n\n}\n\n\n\nimpl SgrParam {\n\n /// # Instance Method `SgrParam::into_i32s`\n\n ///\n", "file_path": "hartex_core/src/ansi.rs", "rank": 94, "score": 4.940350404089388 }, { "content": " /// Logs a message to the console with the \"error\" log level.\n\n ///\n\n /// ## Parameters\n\n /// - `message`, type `impl Into<String>`: the message to be logged\n\n /// - `module`, type `Option<&'static str>`: the module the where the static method is invoked; can be\n\n /// `None`, and defaults to the `hartex_logging` module.\n\n pub fn error(message: impl Into<String>, module: Option<&'static str>, file: &'static str, line: u32, column: u32) {\n\n Self::log(message, level::LogLevel::Error, module, file, line, column)\n\n }\n\n\n\n /// # Static Method `Logger::verbose`\n\n ///\n\n /// Logs a message to the console with the \"verbose\" log level.\n\n ///\n\n /// ## Parameters\n\n /// - `message`, type `impl Into<String>`: the message to be logged\n\n /// - `module`, type `Option<&'static str>`: the module the where the static method is invoked; can be\n\n /// `None`, and defaults to the `hartex_logging` module.\n\n pub fn verbose(message: impl Into<String>, module: Option<&'static str>, file: &'static str, line: u32, column: u32) {\n\n Self::log(message, level::LogLevel::Verbose, module, file, line, column)\n\n }\n\n}\n", "file_path": "hartex_logging/src/lib.rs", "rank": 95, "score": 4.876224137698504 }, { "content": " Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n let res = GetWhitelistedGuilds::default().await?;\n\n\n\n if !res.iter().any(|guild| {\n\n guild_id.0 == guild.GuildId\n\n }) {\n\n Logger::error(\n\n \"guild is not whitelisted\",\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n let guild = http.guild(guild_id).exec().await?.model().await?;\n", "file_path": "hartex_driver/src/handler.rs", "rank": 96, "score": 4.852442858402527 }, { "content": "//! # The `framework` Module\n\n//!\n\n//! This module contains the command framework, which glues the entire command system together.\n\n\n\nuse hartex_eventsys::{\n\n events::{\n\n Events,\n\n HarTexEvent\n\n },\n\n listener::Listeners\n\n};\n\n\n\n/// # Struct `CommandFramework`\n\n///\n\n/// The command framework.\n\n#[derive(Clone, Default)]\n\npub struct CommandFramework {\n\n listeners: Listeners<HarTexEvent>\n\n}\n\n\n", "file_path": "hartex_cmdsys/src/framework.rs", "rank": 97, "score": 4.691807520658695 }, { "content": " let message = format!(\"failed to decode base64; error: `{error:?}`\");\n\n\n\n Logger::error(\n\n &message,\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n return Err(HarTexError::Base64DecodeError {\n\n error\n\n });\n\n }\n\n };\n\n\n\n Logger::verbose(\n\n \"deserializing toml config...\",\n\n Some(module_path!()),\n\n file!(),\n", "file_path": "hartex_dbmani/src/guildconf.rs", "rank": 98, "score": 4.1768355158362365 }, { "content": " line!(),\n\n column!()\n\n );\n\n\n\n hartex_conftoml::from_string(match String::from_utf8(decoded) {\n\n Ok(string) => string,\n\n Err(error) => {\n\n let message = format!(\"failed to construct utf-8 string; error: `{error:?}`\");\n\n\n\n Logger::error(\n\n &message,\n\n Some(module_path!()),\n\n file!(),\n\n line!(),\n\n column!()\n\n );\n\n\n\n return Err(HarTexError::Utf8ValidationError {\n\n error\n\n });\n", "file_path": "hartex_dbmani/src/guildconf.rs", "rank": 99, "score": 4.168742923393253 } ]
Rust
bench/utils.rs
tauri-apps/benchmark_electron
09afd37775848c44c2f3712008bdb3760197e73e
use serde::Serialize; use std::{collections::HashMap, io::BufRead, path::PathBuf, process::{Command, Output, Stdio}}; pub fn root_path() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) } pub fn run_collect(cmd: &[&str]) -> (String, String) { let mut process_builder = Command::new(cmd[0]); process_builder .args(&cmd[1..]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); let prog = process_builder.spawn().expect("failed to spawn script"); let Output { stdout, stderr, status, } = prog.wait_with_output().expect("failed to wait on child"); let stdout = String::from_utf8(stdout).unwrap(); let stderr = String::from_utf8(stderr).unwrap(); if !status.success() { eprintln!("stdout: <<<{}>>>", stdout); eprintln!("stderr: <<<{}>>>", stderr); panic!("Unexpected exit code: {:?}", status.code()); } (stdout, stderr) } pub fn parse_max_mem(file_path: &str) -> Option<u64> { let file = std::fs::File::open(file_path).unwrap(); let output = std::io::BufReader::new(file); let mut highest: u64 = 0; for line in output.lines() { if let Ok(line) = line { let split = line.split(" ").collect::<Vec<_>>(); if split.len() == 3 { let current_bytes = str::parse::<f64>(split[1]).unwrap() as u64 * 1024 * 1024; if current_bytes > highest { highest = current_bytes; } } } } std::fs::remove_file(file_path).unwrap(); if highest > 0 { return Some(highest); } None } #[derive(Debug, Clone, Serialize)] pub struct StraceOutput { pub percent_time: f64, pub seconds: f64, pub usecs_per_call: Option<u64>, pub calls: u64, pub errors: u64, } pub fn parse_strace_output(output: &str) -> HashMap<String, StraceOutput> { let mut summary = HashMap::new(); let mut lines = output .lines() .filter(|line| !line.is_empty() && !line.contains("detached ...")); let count = lines.clone().count(); if count < 4 { return summary; } let total_line = lines.next_back().unwrap(); lines.next_back(); let data_lines = lines.skip(2); for line in data_lines { let syscall_fields = line.split_whitespace().collect::<Vec<_>>(); let len = syscall_fields.len(); let syscall_name = syscall_fields.last().unwrap(); if (5..=6).contains(&len) { summary.insert( syscall_name.to_string(), StraceOutput { percent_time: str::parse::<f64>(syscall_fields[0]).unwrap(), seconds: str::parse::<f64>(syscall_fields[1]).unwrap(), usecs_per_call: Some(str::parse::<u64>(syscall_fields[2]).unwrap()), calls: str::parse::<u64>(syscall_fields[3]).unwrap(), errors: if syscall_fields.len() < 6 { 0 } else { str::parse::<u64>(syscall_fields[4]).unwrap() }, }, ); } } let total_fields = total_line.split_whitespace().collect::<Vec<_>>(); summary.insert( "total".to_string(), StraceOutput { percent_time: str::parse::<f64>(total_fields[0]).unwrap(), seconds: str::parse::<f64>(total_fields[1]).unwrap(), usecs_per_call: None, calls: str::parse::<u64>(total_fields[2]).unwrap(), errors: str::parse::<u64>(total_fields[3]).unwrap(), }, ); summary } pub fn run(cmd: &[&str]) { let mut process_builder = Command::new(cmd[0]); process_builder.args(&cmd[1..]).stdin(Stdio::piped()); let mut prog = process_builder.spawn().expect("failed to spawn script"); let status = prog.wait().expect("failed to wait on child"); if !status.success() { panic!("Unexpected exit code: {:?}", status.code()); } }
use serde::Serialize; use std::{collections::HashMap, io::BufRead, path::PathBuf, process::{Command, Output, Stdio}}; pub fn root_path() -> PathBuf { PathBuf::from(env!("CARGO_MANIFEST_DIR")) } pub fn run_collect(cmd: &[&str]) -> (String, String) { let mut process_builder = Command::new(cmd[0]); process_builder .args(&cmd[1..]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); let prog = process_builder.spawn().expect("failed to spawn script"); let Output { stdout, stderr, status, } = prog.wait_with_output().expect("failed to wait on child"); let stdout = String::from_utf8(stdout).unwrap(); let stderr = String::from_utf8(stderr).unwrap(); if !status.success() { eprintln!("stdout: <<<{}>>>", stdout); eprintln!("stderr: <<<{}>>>", stderr); panic!("Unexpected exit code: {:?}", status.code()); } (stdout, stderr) }
#[derive(Debug, Clone, Serialize)] pub struct StraceOutput { pub percent_time: f64, pub seconds: f64, pub usecs_per_call: Option<u64>, pub calls: u64, pub errors: u64, } pub fn parse_strace_output(output: &str) -> HashMap<String, StraceOutput> { let mut summary = HashMap::new(); let mut lines = output .lines() .filter(|line| !line.is_empty() && !line.contains("detached ...")); let count = lines.clone().count(); if count < 4 { return summary; } let total_line = lines.next_back().unwrap(); lines.next_back(); let data_lines = lines.skip(2); for line in data_lines { let syscall_fields = line.split_whitespace().collect::<Vec<_>>(); let len = syscall_fields.len(); let syscall_name = syscall_fields.last().unwrap(); if (5..=6).contains(&len) { summary.insert( syscall_name.to_string(), StraceOutput { percent_time: str::parse::<f64>(syscall_fields[0]).unwrap(), seconds: str::parse::<f64>(syscall_fields[1]).unwrap(), usecs_per_call: Some(str::parse::<u64>(syscall_fields[2]).unwrap()), calls: str::parse::<u64>(syscall_fields[3]).unwrap(), errors: if syscall_fields.len() < 6 { 0 } else { str::parse::<u64>(syscall_fields[4]).unwrap() }, }, ); } } let total_fields = total_line.split_whitespace().collect::<Vec<_>>(); summary.insert( "total".to_string(), StraceOutput { percent_time: str::parse::<f64>(total_fields[0]).unwrap(), seconds: str::parse::<f64>(total_fields[1]).unwrap(), usecs_per_call: None, calls: str::parse::<u64>(total_fields[2]).unwrap(), errors: str::parse::<u64>(total_fields[3]).unwrap(), }, ); summary } pub fn run(cmd: &[&str]) { let mut process_builder = Command::new(cmd[0]); process_builder.args(&cmd[1..]).stdin(Stdio::piped()); let mut prog = process_builder.spawn().expect("failed to spawn script"); let status = prog.wait().expect("failed to wait on child"); if !status.success() { panic!("Unexpected exit code: {:?}", status.code()); } }
pub fn parse_max_mem(file_path: &str) -> Option<u64> { let file = std::fs::File::open(file_path).unwrap(); let output = std::io::BufReader::new(file); let mut highest: u64 = 0; for line in output.lines() { if let Ok(line) = line { let split = line.split(" ").collect::<Vec<_>>(); if split.len() == 3 { let current_bytes = str::parse::<f64>(split[1]).unwrap() as u64 * 1024 * 1024; if current_bytes > highest { highest = current_bytes; } } } } std::fs::remove_file(file_path).unwrap(); if highest > 0 { return Some(highest); } None }
function_block-full_function
[ { "content": "fn run_exec_time() -> Result<HashMap<String, HashMap<String, f64>>> {\n\n let benchmark_file = root_path().join(\"hyperfine_results.json\");\n\n let benchmark_file = benchmark_file.to_str().unwrap();\n\n\n\n let mut command = [\n\n \"hyperfine\",\n\n \"--export-json\",\n\n benchmark_file,\n\n \"--warmup\",\n\n \"3\",\n\n ]\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect::<Vec<_>>();\n\n\n\n for (_, example_exe, _return_code) in EXEC_TIME_BENCHMARKS {\n\n command.push(\n\n utils::root_path()\n\n .join(example_exe)\n\n .to_str()\n", "file_path": "bench/main.rs", "rank": 5, "score": 70336.69915645444 }, { "content": "fn read_json(filename: &str) -> Result<Value> {\n\n let f = fs::File::open(filename)?;\n\n Ok(serde_json::from_reader(f)?)\n\n}\n\n\n", "file_path": "bench/main.rs", "rank": 6, "score": 66540.84650999992 }, { "content": "fn get_binary_sizes() -> Result<HashMap<String, u64>> {\n\n let mut sizes = HashMap::<String, u64>::new();\n\n // add size for all EXEC_TIME_BENCHMARKS\n\n for (name, example_exe, _) in EXEC_TIME_BENCHMARKS {\n\n let meta = std::fs::metadata(example_exe).unwrap();\n\n sizes.insert(name.to_string(), meta.len());\n\n }\n\n\n\n Ok(sizes)\n\n}\n\n\n\nconst RESULT_KEYS: &[&str] = &[\"mean\", \"stddev\", \"user\", \"system\", \"min\", \"max\"];\n\n\n", "file_path": "bench/main.rs", "rank": 7, "score": 64603.84069957845 }, { "content": "fn run_strace_benchmarks(new_data: &mut BenchResult) -> Result<()> {\n\n use std::io::Read;\n\n\n\n let mut thread_count = HashMap::<String, u64>::new();\n\n let mut syscall_count = HashMap::<String, u64>::new();\n\n\n\n for (name, example_exe, _) in EXEC_TIME_BENCHMARKS {\n\n let mut file = tempfile::NamedTempFile::new()?;\n\n\n\n Command::new(\"strace\")\n\n .args(&[\n\n \"-c\",\n\n \"-f\",\n\n \"-o\",\n\n file.path().to_str().unwrap(),\n\n utils::root_path().join(example_exe).to_str().unwrap(),\n\n ])\n\n .stdout(Stdio::inherit())\n\n .spawn()?\n\n .wait()?;\n", "file_path": "bench/main.rs", "rank": 8, "score": 64430.308763834444 }, { "content": "fn run_max_mem_benchmark() -> Result<HashMap<String, u64>> {\n\n let mut results = HashMap::<String, u64>::new();\n\n\n\n for (name, example_exe, _) in EXEC_TIME_BENCHMARKS {\n\n let benchmark_file = utils::root_path().join(format!(\"mprof{}_.dat\", name));\n\n let benchmark_file = benchmark_file.to_str().unwrap();\n\n\n\n let proc = Command::new(\"mprof\")\n\n .args(&[\n\n \"run\",\n\n \"-C\",\n\n \"-o\",\n\n benchmark_file,\n\n utils::root_path().join(example_exe).to_str().unwrap(),\n\n ])\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::piped())\n\n .spawn()?;\n\n\n\n let proc_result = proc.wait_with_output()?;\n\n println!(\"{:?}\", proc_result);\n\n results.insert(\n\n name.to_string(),\n\n utils::parse_max_mem(&benchmark_file).unwrap(),\n\n );\n\n }\n\n\n\n Ok(results)\n\n}\n\n\n", "file_path": "bench/main.rs", "rank": 9, "score": 63605.893079979614 }, { "content": "fn write_json(filename: &str, value: &Value) -> Result<()> {\n\n let f = fs::File::create(filename)?;\n\n serde_json::to_writer(f, value)?;\n\n Ok(())\n\n}\n\n\n\n/// The list of the examples of the benchmark name, arguments and return code\n\nconst EXEC_TIME_BENCHMARKS: &[(&str, &str, Option<i32>)] = &[\n\n (\n\n \"electron_hello_world\",\n\n \"apps/hello_world/out/startup-electron-linux-x64/startup-electron\",\n\n None,\n\n ),\n\n (\n\n \"electron_cpu_intensive\",\n\n \"apps/cpu_intensive/out/cpu-intensive-linux-x64/cpu-intensive\",\n\n None,\n\n ),\n\n (\n\n \"electron_3mb_transfer\",\n\n \"apps/file_transfer/out/file-transfer-linux-x64/file-transfer\",\n\n None,\n\n ),\n\n];\n\n\n", "file_path": "bench/main.rs", "rank": 10, "score": 63449.39549595604 }, { "content": "fn root_dir() -> PathBuf {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .to_path_buf()\n\n}\n\n\n", "file_path": "bench/tools/src/build_benchmark_jsons.rs", "rank": 11, "score": 52961.2266909652 }, { "content": "fn write_json(filename: &PathBuf, value: &Value) {\n\n let f = File::create(filename).expect(\"Unable to create file\");\n\n serde_json::to_writer(f, value).expect(\"Unable to write json\");\n\n}\n\n\n", "file_path": "bench/tools/src/build_benchmark_jsons.rs", "rank": 12, "score": 42537.97768470565 }, { "content": "fn main() {\n\n let electron_data = root_dir().join(\"gh-pages\").join(\"electron-data.json\");\n\n let electron_recent = root_dir().join(\"gh-pages\").join(\"electron-recent.json\");\n\n\n\n // current data\n\n let current_data_buffer = BufReader::new(\n\n File::open(root_dir().join(\"bench.json\")).expect(\"Unable to read current data file\"),\n\n );\n\n let current_data: BenchResult =\n\n serde_json::from_reader(current_data_buffer).expect(\"Unable to read current data buffer\");\n\n\n\n // all data's\n\n let all_data_buffer =\n\n BufReader::new(File::open(&electron_data).expect(\"Unable to read all data file\"));\n\n let mut all_data: Vec<BenchResult> =\n\n serde_json::from_reader(all_data_buffer).expect(\"Unable to read all data buffer\");\n\n\n\n // add current data to alls data\n\n all_data.push(current_data);\n\n\n", "file_path": "bench/tools/src/build_benchmark_jsons.rs", "rank": 13, "score": 39222.58049426545 }, { "content": "fn main() -> Result<()> {\n\n if !env::args().any(|s| s == \"--bench\") {\n\n return Ok(());\n\n }\n\n\n\n println!(\"Starting electron benchmark\");\n\n\n\n env::set_current_dir(&utils::root_path())?;\n\n\n\n println!(\"{:?}\", &utils::root_path());\n\n\n\n let mut new_data = BenchResult {\n\n created_at: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),\n\n sha1: utils::run_collect(&[\"git\", \"rev-parse\", \"HEAD\"])\n\n .0\n\n .trim()\n\n .to_string(),\n\n exec_time: run_exec_time()?,\n\n binary_size: get_binary_sizes()?,\n\n ..Default::default()\n", "file_path": "bench/main.rs", "rank": 14, "score": 38890.02364385151 }, { "content": "\n\n let mut output = String::new();\n\n file.as_file_mut().read_to_string(&mut output)?;\n\n\n\n let strace_result = utils::parse_strace_output(&output);\n\n let clone = strace_result.get(\"clone\").map(|d| d.calls).unwrap_or(0) + 1;\n\n let total = strace_result.get(\"total\").unwrap().calls;\n\n thread_count.insert(name.to_string(), clone);\n\n syscall_count.insert(name.to_string(), total);\n\n }\n\n\n\n new_data.thread_count = thread_count;\n\n new_data.syscall_count = syscall_count;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bench/main.rs", "rank": 15, "score": 5.77917497193861 }, { "content": "// Copyright 2019-2021 Tauri Programme within The Commons Conservancy\n\n// SPDX-License-Identifier: Apache-2.0\n\n// SPDX-License-Identifier: MIT\n\n\n\nuse anyhow::Result;\n\nuse serde::Serialize;\n\nuse serde_json::Value;\n\nuse std::{\n\n collections::HashMap,\n\n env, fs,\n\n process::{Command, Stdio},\n\n};\n\nuse utils::root_path;\n\n\n\nmod utils;\n\n\n", "file_path": "bench/main.rs", "rank": 20, "score": 3.5813619939588963 }, { "content": " name.to_string(),\n\n data.into_iter()\n\n .filter(|(key, _)| RESULT_KEYS.contains(&key.as_str()))\n\n .map(|(key, val)| (key, val.as_f64().unwrap()))\n\n .collect(),\n\n );\n\n }\n\n\n\n Ok(results)\n\n}\n\n\n", "file_path": "bench/main.rs", "rank": 21, "score": 3.1098325514285348 }, { "content": " };\n\n\n\n if cfg!(target_os = \"linux\") {\n\n run_strace_benchmarks(&mut new_data)?;\n\n new_data.max_memory = run_max_mem_benchmark()?;\n\n }\n\n\n\n println!(\"===== <BENCHMARK RESULTS>\");\n\n serde_json::to_writer_pretty(std::io::stdout(), &new_data)?;\n\n println!(\"\\n===== </BENCHMARK RESULTS>\");\n\n\n\n if let Some(filename) = root_path().join(\"bench.json\").to_str() {\n\n write_json(filename, &serde_json::to_value(&new_data)?)?;\n\n } else {\n\n eprintln!(\"Cannot write bench.json, path is invalid\");\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "bench/main.rs", "rank": 22, "score": 3.084265842951874 }, { "content": " .unwrap()\n\n .to_string(),\n\n );\n\n }\n\n\n\n utils::run(&command.iter().map(|s| s.as_ref()).collect::<Vec<_>>());\n\n\n\n let mut results = HashMap::<String, HashMap<String, f64>>::new();\n\n let hyperfine_results = read_json(benchmark_file)?;\n\n for ((name, _, _), data) in EXEC_TIME_BENCHMARKS.iter().zip(\n\n hyperfine_results\n\n .as_object()\n\n .unwrap()\n\n .get(\"results\")\n\n .unwrap()\n\n .as_array()\n\n .unwrap(),\n\n ) {\n\n let data = data.as_object().unwrap().clone();\n\n results.insert(\n", "file_path": "bench/main.rs", "rank": 23, "score": 2.9672483968512515 }, { "content": "// Copyright 2019-2021 Tauri Programme within The Commons Conservancy\n\n// SPDX-License-Identifier: Apache-2.0\n\n// SPDX-License-Identifier: MIT\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse std::{collections::HashMap, fs::File, io::BufReader, path::PathBuf};\n\n\n", "file_path": "bench/tools/src/build_benchmark_jsons.rs", "rank": 24, "score": 2.0982742805161947 }, { "content": "# CPU\n\n\n\nThe CPU challenge measures how much time it takes to calculate all the prime numbers under XXXX wihtout blocking the UI and reporting how many have been found so far.\n\n\n\n### Acknowledgement\n\n\n", "file_path": "apps/cpu_intensive/README.md", "rank": 25, "score": 1.2015215464102529 }, { "content": " // use only latest 20 elements from alls data\n\n let recent: Vec<BenchResult>;\n\n if all_data.len() > 20 {\n\n recent = all_data[all_data.len() - 20..].to_vec();\n\n } else {\n\n recent = all_data.clone();\n\n }\n\n\n\n write_json(\n\n &electron_data,\n\n &serde_json::to_value(&all_data).expect(\"Unable to build final json (alls)\"),\n\n );\n\n write_json(\n\n &electron_recent,\n\n &serde_json::to_value(&recent).expect(\"Unable to build final json (recent)\"),\n\n );\n\n}\n", "file_path": "bench/tools/src/build_benchmark_jsons.rs", "rank": 26, "score": 1.1006815649091666 }, { "content": "# Benchmarks Results\n\n\n\n[![Chat Server](https://img.shields.io/badge/chat-on%20discord-7289da.svg)](https://discord.gg/SpmNs4S)\n\n[![devto](https://img.shields.io/badge/blog-dev.to-black.svg)](https://dev.to/tauri)\n\n[![devto](https://img.shields.io/badge/documentation-tauri.studio-purple.svg)](https://tauri.studio/docs/getting-started/intro)\n\n[![https://good-labs.github.io/greater-good-affirmation/assets/images/badge.svg](https://good-labs.github.io/greater-good-affirmation/assets/images/badge.svg)](https://good-labs.github.io/greater-good-affirmation)\n\n[![support](https://img.shields.io/badge/sponsor-open%20collective-blue.svg)](https://opencollective.com/tauri)\n\n\n\nAll benchmarks run on Github Actions on `ubuntu-latest` matrix. We measure various metrics of the following applications:\n\n\n\n- [electron_cpu_intensive](https://github.com/tauri-apps/benchmark_electron/tree/dev/apps/cpu_intensive)\n\n- [electron_file_transfer](https://github.com/tauri-apps/benchmark_electron/tree/dev/apps/file_transfer)\n\n- [electron_hello_world](https://github.com/tauri-apps/benchmark_electron/tree/dev/apps/hello_world)\n\n\n\n### Acknowledgement\n\n\n", "file_path": "README.md", "rank": 27, "score": 0.4595890575256254 } ]
Rust
tests/src/tests.rs
duanyytop/ckb-passport-lock
e1fb7df2eea0bf35abfe3cb349948754345debf8
use super::*; use ckb_testtool::context::Context; use ckb_tool::ckb_hash::{new_blake2b, blake2b_256}; use ckb_tool::ckb_types::{ bytes::Bytes, core::{TransactionBuilder, TransactionView}, packed::{self, *}, prelude::*, }; use ckb_tool::ckb_error::assert_error_eq; use ckb_tool::ckb_script::ScriptError; use openssl::hash::MessageDigest; use openssl::pkey::{PKey, Private, Public}; use openssl::rsa::Rsa; use openssl::sign::{Signer, Verifier}; use std::fs; const MAX_CYCLES: u64 = 70_000_000; const ERROR_ISO97962_INVALID_ARG9: i8 = 17; const WRONG_PUB_KEY: i8 = 6; const MESSAGE_SINGLE_SIZE: usize = 8; const SUB_SIGNATURE_SIZE: usize = 128; const TX_SIGNATURE_SIZE: usize = 512; const SIGN_INFO_SIZE: usize = 648; const ISO9796_2_ALGORITHM_ID: u8 = 2; const ISO9796_2_KEY_SIZE: u8 = 1; const ISO9796_2_PADDING: u8 = 0; const ISO9796_2_MD_SHA1: u8 = 4; fn blake160(data: &[u8]) -> [u8; 20] { let mut buf = [0u8; 20]; let hash = blake2b_256(data); buf.clone_from_slice(&hash[..20]); buf } fn sign_tx( tx: TransactionView, private_key: &PKey<Private>, public_key: &PKey<Public>, is_pub_key_hash_error: bool ) -> TransactionView { let witnesses_len = tx.witnesses().len(); let tx_hash = tx.hash(); let mut signed_witnesses: Vec<packed::Bytes> = Vec::new(); let mut blake2b = new_blake2b(); let mut message = [0u8; 32]; blake2b.update(&tx_hash.raw_data()); let witness = WitnessArgs::default(); let zero_lock: Bytes = { let mut buf = Vec::new(); buf.resize(SIGN_INFO_SIZE, 0); buf.into() }; let witness_for_digest = witness .clone() .as_builder() .lock(Some(zero_lock).pack()) .build(); let witness_len = witness_for_digest.as_bytes().len() as u64; blake2b.update(&witness_len.to_le_bytes()); blake2b.update(&witness_for_digest.as_bytes()); (1..witnesses_len).for_each(|n| { let witness = tx.witnesses().get(n).unwrap(); let witness_len = witness.raw_data().len() as u64; blake2b.update(&witness_len.to_le_bytes()); blake2b.update(&witness.raw_data()); }); blake2b.finalize(&mut message); let mut rsa_signature = [0u8; TX_SIGNATURE_SIZE]; for index in 0..4 { let mut signer = Signer::new(MessageDigest::sha1(), &private_key).unwrap(); signer.update(&message[MESSAGE_SINGLE_SIZE * index..MESSAGE_SINGLE_SIZE * (index + 1)]).unwrap(); rsa_signature[SUB_SIGNATURE_SIZE * index..SUB_SIGNATURE_SIZE * (index + 1)].copy_from_slice(&signer.sign_to_vec().unwrap()); } let mut signed_signature = rsa_signature.clone().to_vec(); let (mut rsa_info, _) = compute_pub_key_hash(public_key, is_pub_key_hash_error); signed_signature.append(&mut rsa_info); for index in 0..4 { let mut verifier = Verifier::new(MessageDigest::sha1(), &public_key).unwrap(); verifier.update(&message[MESSAGE_SINGLE_SIZE * index..MESSAGE_SINGLE_SIZE * (index + 1)]).unwrap(); assert!(verifier.verify(&rsa_signature[SUB_SIGNATURE_SIZE * index..SUB_SIGNATURE_SIZE * (index + 1)]).unwrap()); } signed_witnesses.push( witness .as_builder() .lock(Some(Bytes::from(signed_signature)).pack()) .build() .as_bytes() .pack(), ); for i in 1..witnesses_len { signed_witnesses.push(tx.witnesses().get(i).unwrap()); } tx.as_advanced_builder() .set_witnesses(signed_witnesses) .build() } fn compute_pub_key_hash(public_key: &PKey<Public>, is_pub_key_hash_error: bool) -> (Vec<u8>, Vec<u8>) { let mut result: Vec<u8> = vec![]; result.extend_from_slice(&[ISO9796_2_ALGORITHM_ID, ISO9796_2_KEY_SIZE, ISO9796_2_PADDING, ISO9796_2_MD_SHA1]); let rsa_public_key = public_key.rsa().unwrap(); let mut e = if is_pub_key_hash_error { let mut vec = rsa_public_key.e().to_vec(); vec.insert(0, 1); vec } else { rsa_public_key.e().to_vec() }; let mut n = rsa_public_key.n().to_vec(); e.reverse(); n.reverse(); while e.len() < 4 { e.push(0); } while n.len() < 128 { n.push(0); } result.append(&mut e); result.append(&mut n); let h = blake160(&result).into(); (result, h) } fn generate_random_key() -> (PKey<Private>, PKey<Public>) { let rsa = Rsa::generate(1024).unwrap(); let private_key = PKey::from_rsa(rsa).unwrap(); let public_key_pem: Vec<u8> = private_key.public_key_to_pem().unwrap(); let public_key = PKey::public_key_from_pem(&public_key_pem).unwrap(); (private_key, public_key) } #[test] fn test_wrong_signature() { let (private_key, public_key) = generate_random_key(); let mut context = Context::default(); let contract_bin: Bytes = Loader::default().load_binary("ckb-passport-lock"); let out_point = context.deploy_cell(contract_bin); let rsa_bin: Bytes = fs::read("../ckb-production-scripts/build/validate_signature_rsa") .expect("load rsa") .into(); let rsa_out_point = context.deploy_cell(rsa_bin); let rsa_dep = CellDep::new_builder().out_point(rsa_out_point).build(); let (_, public_key_hash) = compute_pub_key_hash(&public_key, false); let lock_script = context .build_script(&out_point, public_key_hash.into()) .expect("script"); let lock_script_dep = CellDep::new_builder().out_point(out_point).build(); let input_out_point1 = context.create_cell( CellOutput::new_builder() .capacity(1000u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let input_out_point2 = context.create_cell( CellOutput::new_builder() .capacity(300u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let inputs = vec![ CellInput::new_builder() .previous_output(input_out_point1) .build(), CellInput::new_builder() .previous_output(input_out_point2) .build(), ]; let outputs = vec![ CellOutput::new_builder() .capacity(500u64.pack()) .lock(lock_script.clone()) .build(), CellOutput::new_builder() .capacity(800u64.pack()) .lock(lock_script) .build(), ]; let outputs_data = vec![Bytes::new(); 2]; let mut witnesses = vec![]; for _ in 0..inputs.len() { witnesses.push(Bytes::new()) } let tx = TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data.pack()) .cell_dep(lock_script_dep) .cell_dep(rsa_dep) .witnesses(witnesses.pack()) .build(); let tx = context.complete_tx(tx); let tx = sign_tx(tx, &private_key, &public_key, false); let err = context.verify_tx(&tx, MAX_CYCLES).unwrap_err(); let script_cell_index = 0; assert_error_eq!( err, ScriptError::ValidationFailure(ERROR_ISO97962_INVALID_ARG9).input_lock_script(script_cell_index) ); } #[test] fn test_wrong_pub_key() { let (private_key, public_key) = generate_random_key(); let mut context = Context::default(); let contract_bin: Bytes = Loader::default().load_binary("ckb-passport-lock"); let out_point = context.deploy_cell(contract_bin); let rsa_bin: Bytes = fs::read("../ckb-production-scripts/build/validate_signature_rsa") .expect("load rsa") .into(); let rsa_out_point = context.deploy_cell(rsa_bin); let rsa_dep = CellDep::new_builder().out_point(rsa_out_point).build(); let (_, public_key_hash) = compute_pub_key_hash(&public_key, false); let lock_script = context .build_script(&out_point, public_key_hash.into()) .expect("script"); let lock_script_dep = CellDep::new_builder().out_point(out_point).build(); let input_out_point1 = context.create_cell( CellOutput::new_builder() .capacity(1000u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let input_out_point2 = context.create_cell( CellOutput::new_builder() .capacity(300u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let inputs = vec![ CellInput::new_builder() .previous_output(input_out_point1) .build(), CellInput::new_builder() .previous_output(input_out_point2) .build(), ]; let outputs = vec![ CellOutput::new_builder() .capacity(500u64.pack()) .lock(lock_script.clone()) .build(), CellOutput::new_builder() .capacity(800u64.pack()) .lock(lock_script) .build(), ]; let outputs_data = vec![Bytes::new(); 2]; let mut witnesses = vec![]; for _ in 0..inputs.len() { witnesses.push(Bytes::new()) } let tx = TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data.pack()) .cell_dep(lock_script_dep) .cell_dep(rsa_dep) .witnesses(witnesses.pack()) .build(); let tx = context.complete_tx(tx); let tx = sign_tx(tx, &private_key, &public_key, true); let err = context.verify_tx(&tx, MAX_CYCLES).unwrap_err(); let script_cell_index = 0; assert_error_eq!( err, ScriptError::ValidationFailure(WRONG_PUB_KEY).input_lock_script(script_cell_index) ); }
use super::*; use ckb_testtool::context::Context; use ckb_tool::ckb_hash::{new_blake2b, blake2b_256}; use ckb_tool::ckb_types::{ bytes::Bytes, core::{TransactionBuilder, TransactionView}, packed::{self, *}, prelude::*, }; use ckb_tool::ckb_error::assert_error_eq; use ckb_tool::ckb_script::ScriptError; use openssl::hash::MessageDigest; use openssl::pkey::{PKey, Private, Public}; use openssl::rsa::Rsa; use openssl::sign::{Signer, Verifier}; use std::fs; const MAX_CYCLES: u64 = 70_000_000; const ERROR_ISO97962_INVALID_ARG9: i8 = 17; const WRONG_PUB_KEY: i8 = 6; const MESSAGE_SINGLE_SIZE: usize = 8; const SUB_SIGNATURE_SIZE: usize = 128; const TX_SIGNATURE_SIZE: usize = 512; const SIGN_INFO_SIZE: usize = 648; const ISO9796_2_ALGORITHM_ID: u8 = 2; const ISO9796_2_KEY_SIZE: u8 = 1; const ISO9796_2_PADDING: u8 = 0; const ISO9796_2_MD_SHA1: u8 = 4; fn blake160(data: &[u8]) -> [u8; 20] { let mut buf = [0u8; 20]; let hash = blake2b_256(data); buf.clone_from_slice(&hash[..20]); buf } fn sign_tx( tx: TransactionView, private_key: &PKey<Private>, public_key: &PKey<Public>, is_pub_key_hash_error: bool ) -> TransactionView { let witnesses_len = tx.witnesses().len(); let tx_hash = tx.hash(); let mut signed_witnesses: Vec<packed::Bytes> = Vec::new(); let mut blake2b = new_blake2b(); let mut message = [0u8; 32]; blake2b.update(&tx_hash.raw_data()); let witness =
dep) .cell_dep(rsa_dep) .witnesses(witnesses.pack()) .build(); let tx = context.complete_tx(tx); let tx = sign_tx(tx, &private_key, &public_key, true); let err = context.verify_tx(&tx, MAX_CYCLES).unwrap_err(); let script_cell_index = 0; assert_error_eq!( err, ScriptError::ValidationFailure(WRONG_PUB_KEY).input_lock_script(script_cell_index) ); }
WitnessArgs::default(); let zero_lock: Bytes = { let mut buf = Vec::new(); buf.resize(SIGN_INFO_SIZE, 0); buf.into() }; let witness_for_digest = witness .clone() .as_builder() .lock(Some(zero_lock).pack()) .build(); let witness_len = witness_for_digest.as_bytes().len() as u64; blake2b.update(&witness_len.to_le_bytes()); blake2b.update(&witness_for_digest.as_bytes()); (1..witnesses_len).for_each(|n| { let witness = tx.witnesses().get(n).unwrap(); let witness_len = witness.raw_data().len() as u64; blake2b.update(&witness_len.to_le_bytes()); blake2b.update(&witness.raw_data()); }); blake2b.finalize(&mut message); let mut rsa_signature = [0u8; TX_SIGNATURE_SIZE]; for index in 0..4 { let mut signer = Signer::new(MessageDigest::sha1(), &private_key).unwrap(); signer.update(&message[MESSAGE_SINGLE_SIZE * index..MESSAGE_SINGLE_SIZE * (index + 1)]).unwrap(); rsa_signature[SUB_SIGNATURE_SIZE * index..SUB_SIGNATURE_SIZE * (index + 1)].copy_from_slice(&signer.sign_to_vec().unwrap()); } let mut signed_signature = rsa_signature.clone().to_vec(); let (mut rsa_info, _) = compute_pub_key_hash(public_key, is_pub_key_hash_error); signed_signature.append(&mut rsa_info); for index in 0..4 { let mut verifier = Verifier::new(MessageDigest::sha1(), &public_key).unwrap(); verifier.update(&message[MESSAGE_SINGLE_SIZE * index..MESSAGE_SINGLE_SIZE * (index + 1)]).unwrap(); assert!(verifier.verify(&rsa_signature[SUB_SIGNATURE_SIZE * index..SUB_SIGNATURE_SIZE * (index + 1)]).unwrap()); } signed_witnesses.push( witness .as_builder() .lock(Some(Bytes::from(signed_signature)).pack()) .build() .as_bytes() .pack(), ); for i in 1..witnesses_len { signed_witnesses.push(tx.witnesses().get(i).unwrap()); } tx.as_advanced_builder() .set_witnesses(signed_witnesses) .build() } fn compute_pub_key_hash(public_key: &PKey<Public>, is_pub_key_hash_error: bool) -> (Vec<u8>, Vec<u8>) { let mut result: Vec<u8> = vec![]; result.extend_from_slice(&[ISO9796_2_ALGORITHM_ID, ISO9796_2_KEY_SIZE, ISO9796_2_PADDING, ISO9796_2_MD_SHA1]); let rsa_public_key = public_key.rsa().unwrap(); let mut e = if is_pub_key_hash_error { let mut vec = rsa_public_key.e().to_vec(); vec.insert(0, 1); vec } else { rsa_public_key.e().to_vec() }; let mut n = rsa_public_key.n().to_vec(); e.reverse(); n.reverse(); while e.len() < 4 { e.push(0); } while n.len() < 128 { n.push(0); } result.append(&mut e); result.append(&mut n); let h = blake160(&result).into(); (result, h) } fn generate_random_key() -> (PKey<Private>, PKey<Public>) { let rsa = Rsa::generate(1024).unwrap(); let private_key = PKey::from_rsa(rsa).unwrap(); let public_key_pem: Vec<u8> = private_key.public_key_to_pem().unwrap(); let public_key = PKey::public_key_from_pem(&public_key_pem).unwrap(); (private_key, public_key) } #[test] fn test_wrong_signature() { let (private_key, public_key) = generate_random_key(); let mut context = Context::default(); let contract_bin: Bytes = Loader::default().load_binary("ckb-passport-lock"); let out_point = context.deploy_cell(contract_bin); let rsa_bin: Bytes = fs::read("../ckb-production-scripts/build/validate_signature_rsa") .expect("load rsa") .into(); let rsa_out_point = context.deploy_cell(rsa_bin); let rsa_dep = CellDep::new_builder().out_point(rsa_out_point).build(); let (_, public_key_hash) = compute_pub_key_hash(&public_key, false); let lock_script = context .build_script(&out_point, public_key_hash.into()) .expect("script"); let lock_script_dep = CellDep::new_builder().out_point(out_point).build(); let input_out_point1 = context.create_cell( CellOutput::new_builder() .capacity(1000u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let input_out_point2 = context.create_cell( CellOutput::new_builder() .capacity(300u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let inputs = vec![ CellInput::new_builder() .previous_output(input_out_point1) .build(), CellInput::new_builder() .previous_output(input_out_point2) .build(), ]; let outputs = vec![ CellOutput::new_builder() .capacity(500u64.pack()) .lock(lock_script.clone()) .build(), CellOutput::new_builder() .capacity(800u64.pack()) .lock(lock_script) .build(), ]; let outputs_data = vec![Bytes::new(); 2]; let mut witnesses = vec![]; for _ in 0..inputs.len() { witnesses.push(Bytes::new()) } let tx = TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data.pack()) .cell_dep(lock_script_dep) .cell_dep(rsa_dep) .witnesses(witnesses.pack()) .build(); let tx = context.complete_tx(tx); let tx = sign_tx(tx, &private_key, &public_key, false); let err = context.verify_tx(&tx, MAX_CYCLES).unwrap_err(); let script_cell_index = 0; assert_error_eq!( err, ScriptError::ValidationFailure(ERROR_ISO97962_INVALID_ARG9).input_lock_script(script_cell_index) ); } #[test] fn test_wrong_pub_key() { let (private_key, public_key) = generate_random_key(); let mut context = Context::default(); let contract_bin: Bytes = Loader::default().load_binary("ckb-passport-lock"); let out_point = context.deploy_cell(contract_bin); let rsa_bin: Bytes = fs::read("../ckb-production-scripts/build/validate_signature_rsa") .expect("load rsa") .into(); let rsa_out_point = context.deploy_cell(rsa_bin); let rsa_dep = CellDep::new_builder().out_point(rsa_out_point).build(); let (_, public_key_hash) = compute_pub_key_hash(&public_key, false); let lock_script = context .build_script(&out_point, public_key_hash.into()) .expect("script"); let lock_script_dep = CellDep::new_builder().out_point(out_point).build(); let input_out_point1 = context.create_cell( CellOutput::new_builder() .capacity(1000u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let input_out_point2 = context.create_cell( CellOutput::new_builder() .capacity(300u64.pack()) .lock(lock_script.clone()) .build(), Bytes::new(), ); let inputs = vec![ CellInput::new_builder() .previous_output(input_out_point1) .build(), CellInput::new_builder() .previous_output(input_out_point2) .build(), ]; let outputs = vec![ CellOutput::new_builder() .capacity(500u64.pack()) .lock(lock_script.clone()) .build(), CellOutput::new_builder() .capacity(800u64.pack()) .lock(lock_script) .build(), ]; let outputs_data = vec![Bytes::new(); 2]; let mut witnesses = vec![]; for _ in 0..inputs.len() { witnesses.push(Bytes::new()) } let tx = TransactionBuilder::default() .inputs(inputs) .outputs(outputs) .outputs_data(outputs_data.pack()) .cell_dep(lock_script_
random
[ { "content": "pub fn blake2b_256<T: AsRef<[u8]>>(s: T) -> [u8; 32] {\n\n if s.as_ref().is_empty() {\n\n return BLANK_HASH;\n\n }\n\n inner_blake2b_256(s)\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/hash.rs", "rank": 1, "score": 108113.07493456306 }, { "content": "fn inner_blake2b_256<T: AsRef<[u8]>>(s: T) -> [u8; 32] {\n\n let mut result = [0u8; 32];\n\n let mut blake2b = new_blake2b();\n\n blake2b.update(s.as_ref());\n\n blake2b.finalize(&mut result);\n\n result\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/hash.rs", "rank": 2, "score": 97962.85075657169 }, { "content": "pub fn blake2b_160<T: AsRef<[u8]>>(s: T) -> [u8; 20] {\n\n let mut result = [0u8; 20];\n\n let hash = blake2b_256(s);\n\n result.copy_from_slice(&hash[0..20]);\n\n result\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/hash.rs", "rank": 3, "score": 96585.01934223046 }, { "content": "pub fn new_blake2b() -> Blake2b {\n\n Blake2bBuilder::new(32)\n\n .personal(CKB_HASH_PERSONALIZATION)\n\n .build()\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/hash.rs", "rank": 4, "score": 95711.74432062534 }, { "content": "pub fn new_blake2b() -> Blake2b {\n\n Blake2bBuilder::new(32)\n\n .personal(CKB_HASH_PERSONALIZATION)\n\n .build()\n\n}", "file_path": "ckb-lib-rsa/build.rs", "rank": 5, "score": 82283.49185857932 }, { "content": "fn generate_message() -> Result<[u8; 32], Error> {\n\n let witness_args = load_witness_args(0, Source::GroupInput)?;\n\n let tx_hash = load_tx_hash()?;\n\n let mut blake2b = hash::new_blake2b();\n\n let mut message = [0u8; 32];\n\n blake2b.update(&tx_hash);\n\n let zero_lock: Bytes = {\n\n let mut buf = Vec::new();\n\n buf.resize(SIGNATURE_TOTAL_LEN, 0);\n\n buf.into()\n\n };\n\n let witness_for_digest = witness_args\n\n .as_builder()\n\n .lock(Some(zero_lock).pack())\n\n .build();\n\n let witness_len = witness_for_digest.as_bytes().len() as u64;\n\n blake2b.update(&witness_len.to_le_bytes());\n\n blake2b.update(&witness_for_digest.as_bytes());\n\n\n\n // Digest same group witnesses\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 6, "score": 81484.07466922027 }, { "content": "fn compute_pub_key_hash(pub_key_n: &[u8], pub_key_e: u32) -> Result<[u8; 20], Error> {\n\n let pub_key_vec_len = COMMON_HEADER + PUBLIC_KEY_N_LEN + PUBLIC_KEY_E_LEN; // common_header + n.len + e.len\n\n let mut pub_key_vec = Vec::new();\n\n for _ in 0..pub_key_vec_len {\n\n pub_key_vec.push(0u8);\n\n }\n\n\n\n pub_key_vec[0..4].copy_from_slice(&rsa::get_common_header());\n\n pub_key_vec[4..8].copy_from_slice(&pub_key_e.to_le_bytes());\n\n pub_key_vec[8..].copy_from_slice(&pub_key_n);\n\n\n\n Ok(hash::blake2b_160(pub_key_vec))\n\n}", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 8, "score": 74808.90562587694 }, { "content": "pub fn get_common_header() -> [u8; 4] {\n\n [ISO9796_2_ALGORITHM_ID, ISO9796_2_KEY_SIZE, ISO9796_2_PADDING, ISO9796_2_MD_SHA1]\n\n}\n", "file_path": "contracts/ckb-passport-lock/src/entry/rsa.rs", "rank": 11, "score": 65532.70504644836 }, { "content": "pub fn verify_iso9796_2_signature(lib: &LibRSA, n: &[u8], e: u32, msg: &[u8], sig: &[u8]) -> Result<(), Error> {\n\n let rsa_info = generate_rsa_info(&n, e, &sig)?;\n\n match lib.validate_signature(rsa_info.as_ref(), &msg) {\n\n Ok(_) => Ok(()),\n\n Err(err) => match err {\n\n 52 => Err(Error::ISO97962MismatchHash),\n\n 53 => Err(Error::ISO97962InvalidArg1),\n\n 54 => Err(Error::ISO97962InvalidArg2),\n\n 55 => Err(Error::ISO97962InvalidArg3),\n\n 56 => Err(Error::ISO97962InvalidArg4),\n\n 57 => Err(Error::ISO97962InvalidArg5),\n\n 58 => Err(Error::ISO97962InvalidArg6),\n\n 59 => Err(Error::ISO97962InvalidArg7),\n\n 60 => Err(Error::ISO97962InvalidArg8),\n\n 61 => Err(Error::ISO97962InvalidArg9),\n\n _ => Err(Error::ISO97962RSAVerifyError)\n\n }\n\n }\n\n}\n\n\n\n/** signature (in witness, or passed as arguments) memory layout\n\n * This structure contains the following information:\n\n * 1) Common header, 4 bytes\n\n * 2) RSA Public Key\n\n * 3) RSA Signature data\n", "file_path": "contracts/ckb-passport-lock/src/entry/rsa.rs", "rank": 12, "score": 64531.91615802255 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/main.rs", "rank": 13, "score": 60316.05740957484 }, { "content": "fn generate_rsa_info(n: &[u8], e: u32, sig: &[u8]) -> Result<Vec<u8>, Error> {\n\n if n.len() != sig.len() {\n\n return Err(Error::RSAPubKeySigLengthError)\n\n }\n\n\n\n let pub_key_size: u32 = (n.len() as u32) * 8;\n\n let rsa_info_len = pub_key_size / 4 + 8;\n\n\n\n let mut rsa_info = Vec::new();\n\n for _ in 0..rsa_info_len {\n\n rsa_info.push(0u8);\n\n }\n\n\n\n rsa_info[0..4].copy_from_slice(&get_common_header());\n\n rsa_info[4..8].copy_from_slice(&e.to_le_bytes());\n\n rsa_info[8..(8 + n.len())].copy_from_slice(&n);\n\n rsa_info[(8 + n.len())..(8 + n.len() * 2)].copy_from_slice(&sig);\n\n\n\n Ok(rsa_info)\n\n}\n\n\n\nconst ISO9796_2_ALGORITHM_ID: u8 = 2;\n\nconst ISO9796_2_KEY_SIZE: u8 = 1;\n\nconst ISO9796_2_PADDING: u8 = 0;\n\nconst ISO9796_2_MD_SHA1: u8 = 4;\n", "file_path": "contracts/ckb-passport-lock/src/entry/rsa.rs", "rank": 14, "score": 56023.53431803431 }, { "content": "fn main() {\n\n let out_path = Path::new(\"src\").join(\"code_hashes.rs\");\n\n let mut out_file = BufWriter::new(File::create(&out_path).expect(\"create code_hashes.rs\"));\n\n\n\n let path = \"../ckb-production-scripts/build/validate_signature_rsa\";\n\n\n\n let mut buf = [0u8; BUF_SIZE];\n\n\n\n // build hash\n\n let mut blake2b = new_blake2b();\n\n let mut fd = File::open(&path).expect(\"open file\");\n\n loop {\n\n let read_bytes = fd.read(&mut buf).expect(\"read file\");\n\n if read_bytes > 0 {\n\n blake2b.update(&buf[..read_bytes]);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n", "file_path": "ckb-lib-rsa/build.rs", "rank": 15, "score": 26791.183393167117 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let script = load_script()?;\n\n let args: Bytes = script.args().unpack();\n\n if args.len() != 20 {\n\n return Err(Error::InvalidArgument);\n\n }\n\n\n\n let witness_args = load_witness_args(0, Source::GroupInput)?;\n\n let witness: Bytes = witness_args\n\n .lock()\n\n .to_opt()\n\n .ok_or(Error::Encoding)?\n\n .unpack();\n\n\n\n let mut signature = [0u8; SIGNATURE_LEN];\n\n let mut pub_key_e = [0u8; PUBLIC_KEY_E_LEN];\n\n let mut pub_key_n = [0u8; PUBLIC_KEY_N_LEN];\n\n let pub_key_index = SIGNATURE_LEN + COMMON_HEADER;\n\n signature.copy_from_slice(&witness[0..SIGNATURE_LEN]);\n\n pub_key_e.copy_from_slice(&witness[pub_key_index..(pub_key_index + PUBLIC_KEY_E_LEN)]);\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 18, "score": 21897.90491181283 }, { "content": "/// function signature of validate_signature\n\ntype ValidateSignature = unsafe extern \"C\" fn(\n\n prefilled_data: *const u8,\n\n signature_buffer: *const u8,\n\n signature_size: u64,\n\n message_buffer: *const u8,\n\n message_size: u64,\n\n output: *mut u8,\n\n output_len: *mut u64,\n\n) -> i32;\n\n\n\n/// Symbol name\n\nconst VALIDATE_SIGNATURE: &[u8; 18] = b\"validate_signature\";\n\n\n\npub struct LibRSA {\n\n validate_signature: Symbol<ValidateSignature>,\n\n}\n\n\n\nimpl LibRSA {\n\n pub fn load<T>(context: &mut CKBDLContext<T>) -> Self {\n\n // load library\n", "file_path": "ckb-lib-rsa/src/librsa.rs", "rank": 19, "score": 20988.814662139113 }, { "content": "pub const CODE_HASH_RSA: [u8; 32] = [0, 163, 255, 146, 155, 228, 58, 221, 152, 77, 112, 21, 28, 220, 148, 206, 55, 96, 157, 156, 75, 240, 229, 111, 195, 91, 144, 128, 243, 41, 203, 10];\n", "file_path": "ckb-lib-rsa/src/code_hashes.rs", "rank": 20, "score": 20426.11182180907 }, { "content": "pub use blake2b_ref::{Blake2b, Blake2bBuilder};\n\n\n\npub const CKB_HASH_PERSONALIZATION: &[u8] = b\"ckb-default-hash\";\n\npub const BLANK_HASH: [u8; 32] = [\n\n 68, 244, 198, 151, 68, 213, 248, 197, 93, 100, 32, 98, 148, 157, 202, 228, 155, 196, 231, 239,\n\n 67, 211, 136, 197, 161, 47, 66, 181, 99, 61, 22, 62,\n\n];\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/hash.rs", "rank": 21, "score": 19641.591682125774 }, { "content": "-----------------------------------------------------------------------------\n\n|common header| E | N (KeySize/8 bytes) | RSA Signature (KeySize/8 bytes)|\n\n-----------------------------------------------------------------------------\n\nThe common header includes algorithm_id, key_size, padding, md_type whose data type are uint8_t.\n\nThe common header, E both occupy 4 bytes. E is in little endian(uint32_t).\n\nThe N must be little endian with [u8; 128]\n\nSo the total length in byte is: 4 + 4 + KeySize/8 + KeySize/8.\n\n*/\n", "file_path": "contracts/ckb-passport-lock/src/entry/rsa.rs", "rank": 22, "score": 15635.00201923593 }, { "content": "pub use blake2b_rs::{Blake2b, Blake2bBuilder};\n\n\n\nuse std::{\n\n fs::File,\n\n io::{BufWriter, Read, Write},\n\n path::Path,\n\n};\n\n\n\nconst BUF_SIZE: usize = 8 * 1024;\n\nconst CKB_HASH_PERSONALIZATION: &[u8] = b\"ckb-default-hash\";\n\n\n", "file_path": "ckb-lib-rsa/build.rs", "rank": 24, "score": 11.964432488779007 }, { "content": " let witness_len = witness_buf.len() as u64;\n\n blake2b.update(&witness_len.to_le_bytes());\n\n blake2b.update(&witness_buf);\n\n i += 1;\n\n },\n\n Err(SysError::IndexOutOfBound) => break,\n\n Err(err) => return Err(err.into()),\n\n }\n\n }\n\n blake2b.finalize(&mut message);\n\n\n\n Ok(message)\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 25, "score": 11.954210864822956 }, { "content": "// Import from `core` instead of from `std` since we are in no-std mode\n\nuse core::result::Result;\n\nuse alloc::vec::Vec;\n\n\n\nuse ckb_std::{\n\n ckb_constants::Source,\n\n syscalls::load_witness,\n\n error::SysError,\n\n dynamic_loading::CKBDLContext,\n\n ckb_types::{bytes::Bytes, prelude::*},\n\n high_level::{load_script, load_witness_args, load_transaction, load_tx_hash},\n\n};\n\nuse crate::error::Error;\n\n\n\nmod rsa;\n\nmod hash;\n\n\n\nconst MESSAGE_SINGLE_SIZE: usize = 8;\n\nconst SIGNATURE_LEN: usize = 512; // in byte\n\nconst SUB_SIGNATURE_LEN: usize = 128;\n\nconst COMMON_HEADER: usize = 4; \n\nconst PUBLIC_KEY_E_LEN: usize = 4; \n\nconst PUBLIC_KEY_N_LEN: usize = 128;\n\nconst SIGNATURE_TOTAL_LEN: usize = 648; \n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 27, "score": 10.992665070247993 }, { "content": " let mut i = 1;\n\n let mut witness_buf = Vec::new();\n\n loop {\n\n match load_witness(&mut witness_buf, 0, i, Source::GroupInput) {\n\n Ok(_witness) => {\n\n let witness_len = witness_buf.len() as u64;\n\n blake2b.update(&witness_len.to_le_bytes());\n\n blake2b.update(&witness_buf);\n\n i += 1;\n\n },\n\n Err(SysError::IndexOutOfBound) => break,\n\n Err(err) => return Err(err.into()),\n\n }\n\n }\n\n\n\n // Digest witnesses that not covered by inputs\n\n let mut i = load_transaction()?.raw().inputs().len();\n\n loop {\n\n match load_witness(&mut witness_buf, 0, i, Source::Input) {\n\n Ok(_witness) => {\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 28, "score": 10.002193722230494 }, { "content": " let mut hash = [0u8; 32];\n\n blake2b.finalize(&mut hash);\n\n\n\n write!(\n\n &mut out_file,\n\n \"pub const CODE_HASH_RSA: [u8; 32] = {:?};\\n\",\n\n hash\n\n )\n\n .expect(\"write to code_hashes.rs\");\n\n}\n\n\n", "file_path": "ckb-lib-rsa/build.rs", "rank": 30, "score": 9.348149295663184 }, { "content": " pub_key_n.copy_from_slice(&witness[(pub_key_index + PUBLIC_KEY_E_LEN)..]);\n\n\n\n let pub_key_e = u32::from_le_bytes(pub_key_e);\n\n\n\n let pub_key_hash = compute_pub_key_hash(&pub_key_n, pub_key_e)?;\n\n\n\n if args[..] != pub_key_hash {\n\n return Err(Error::WrongPubKey);\n\n }\n\n \n\n let message = generate_message()?;\n\n\n\n let mut context = unsafe { CKBDLContext::<[u8; 1024 * 128]>::new() };\n\n let lib = ckb_lib_rsa::LibRSA::load(&mut context);\n\n\n\n for index in 0..4 {\n\n let sub_message = &message[MESSAGE_SINGLE_SIZE * index..MESSAGE_SINGLE_SIZE * (index + 1)];\n\n let sub_signature = &signature[SUB_SIGNATURE_LEN * index..SUB_SIGNATURE_LEN * (index + 1)];\n\n match rsa::verify_iso9796_2_signature(&lib, &pub_key_n, pub_key_e, sub_message, sub_signature) {\n\n Ok(_) => continue,\n\n Err(err) => return Err(err)\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry.rs", "rank": 32, "score": 7.93581002136386 }, { "content": " let lib = context.load(&CODE_HASH_RSA).expect(\"load rsa\");\n\n\n\n // find symbols\n\n let validate_signature: Symbol<ValidateSignature> =\n\n unsafe { lib.get(VALIDATE_SIGNATURE).expect(\"load function\") };\n\n LibRSA {\n\n validate_signature,\n\n }\n\n }\n\n\n\n\n\n pub fn validate_signature(\n\n &self,\n\n signature: &[u8],\n\n message: &[u8],\n\n ) -> Result<(), i32> {\n\n let mut output = [0u8; 1024];\n\n let mut output_len: u64 = 1024;\n\n\n\n let f = &self.validate_signature;\n", "file_path": "ckb-lib-rsa/src/librsa.rs", "rank": 33, "score": 7.525611021569301 }, { "content": " let error_code = unsafe {\n\n f(\n\n Vec::new().as_ptr(),\n\n signature.as_ptr(),\n\n signature.len() as u64,\n\n message.as_ptr(),\n\n message.len() as u64,\n\n output.as_mut_ptr(),\n\n &mut output_len as *mut u64,\n\n )\n\n };\n\n\n\n if error_code != 0 {\n\n return Err(error_code);\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "ckb-lib-rsa/src/librsa.rs", "rank": 34, "score": 6.7974619459175845 }, { "content": "Deployment plan:\n\n---\n\nmigrated_capacity: 0.0 (CKB)\n\nnew_occupied_capacity: 121348.0 (CKB)\n\ntxs_fee_capacity: 0.003 (CKB)\n\ntotal_occupied_capacity: 121348.0 (CKB)\n\nrecipe:\n\n cells:\n\n - name: ckb-passport-lock\n\n index: 0\n\n tx_hash: \"0x01a294bb922a9e9b217e82d9f7cabfe6a72fb9920cdc3bd6d64e436ac234a5c7\"\n\n occupied_capacity: 58414.0 (CKB)\n\n data_hash: \"0x2cdedab61ac07247d10d6889fda575a64d58151777938e4aec55f2a8cf4587c6\"\n\n type_id: \"0xc00226dfdeee4c3a160f2dde4be5dd5317e0b65e07858f2a285e96c312510331\"\n\n - name: rsa_sighash_all\n\n index: 0\n\n tx_hash: \"0xc9df0a7dd2f8cd1ba940f84a91b4b6dca45647f4ab25b2b8fb2f53c86c21b848\"\n\n occupied_capacity: 62797.0 (CKB)\n\n data_hash: \"0x131126b55109a5852910b7ef14f0331170cf49209f5369814574244cb546a324\"\n\n type_id: ~\n\n dep_groups:\n\n - name: dep_group\n\n tx_hash: \"0x8944b5149074321e5cad1c18dbf575d7d46acdc17974e7c2c8afa320b3b772db\"\n\n index: 0\n\n occupied_capacity: 137.0 (CKB)\n\n```\n\n\n\n#### 4. Type yes or y and input the password to unlock the account.\n\n\n\n```\n\n(1/3) Sending tx 01a294bb922a9e9b217e82d9f7cabfe6a72fb9920cdc3bd6d64e436ac234a5c7\n\n(2/3) Sending tx c9df0a7dd2f8cd1ba940f84a91b4b6dca45647f4ab25b2b8fb2f53c86c21b848\n\n(3/3) Sending tx 8944b5149074321e5cad1c18dbf575d7d46acdc17974e7c2c8afa320b3b772db\n\nDeployment complete\n\n```\n\n\n\nNow the passport lock script has been deployed, you can refer to this script by using `tx_hash: 0x8944b5149074321e5cad1c18dbf575d7d46acdc17974e7c2c8afa320b3b772db index: 0` as `out_point`(your tx_hash should be another value).\n", "file_path": "README.md", "rank": 36, "score": 5.3563197238568625 }, { "content": "#![no_std]\n\n\n\nextern crate alloc;\n\n\n\nmod code_hashes;\n\nmod librsa;\n\n\n\npub use code_hashes::CODE_HASH_RSA;\n\npub use librsa::LibRSA;", "file_path": "ckb-lib-rsa/src/lib.rs", "rank": 39, "score": 4.665168016787808 }, { "content": "use alloc::vec::Vec;\n\nuse crate::code_hashes::CODE_HASH_RSA;\n\nuse ckb_std::dynamic_loading::{CKBDLContext, Symbol};\n\n\n\n/// function signature of validate_signature\n", "file_path": "ckb-lib-rsa/src/librsa.rs", "rank": 40, "score": 4.575215676230513 }, { "content": "use ckb_tool::ckb_types::bytes::Bytes;\n\nuse std::env;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\nconst TEST_ENV_VAR: &str = \"CAPSULE_TEST_ENV\";\n\n\n\npub enum TestEnv {\n\n Debug,\n\n Release,\n\n}\n\n\n\nimpl FromStr for TestEnv {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "tests/src/lib.rs", "rank": 43, "score": 3.5297946885331593 }, { "content": "use ckb_std::error::SysError;\n\n\n\n/// Error\n\n#[repr(i8)]\n\npub enum Error {\n\n IndexOutOfBound = 1,\n\n ItemMissing,\n\n LengthNotEnough,\n\n Encoding,\n\n InvalidArgument = 5,\n\n // Add customized errors here...\n\n WrongPubKey,\n\n RSAPubKeySigLengthError,\n\n ISO97962RSAVerifyError,\n\n ISO97962InvalidArg1,\n\n ISO97962InvalidArg2 = 10,\n\n ISO97962InvalidArg3,\n\n ISO97962InvalidArg4,\n\n ISO97962InvalidArg5,\n\n ISO97962InvalidArg6,\n", "file_path": "contracts/ckb-passport-lock/src/error.rs", "rank": 44, "score": 3.4419527552391442 }, { "content": "//! Generated by capsule\n\n//!\n\n//! `main.rs` is used to define rust lang items and modules.\n\n//! See `entry.rs` for the `main` function. \n\n//! See `error.rs` for the `Error` type.\n\n\n\n#![no_std]\n\n#![no_main]\n\n#![feature(lang_items)]\n\n#![feature(alloc_error_handler)]\n\n#![feature(panic_info_message)]\n\n\n\n// define modules\n\nmod entry;\n\nmod error;\n\n\n\nuse ckb_std::{\n\n default_alloc,\n\n};\n\n\n\nckb_std::entry!(program_entry);\n\ndefault_alloc!();\n\n\n\n/// program entry\n", "file_path": "contracts/ckb-passport-lock/src/main.rs", "rank": 45, "score": 2.818006325530471 }, { "content": "use core::result::Result;\n\nuse alloc::vec::Vec;\n\nuse ckb_lib_rsa::LibRSA;\n\nuse crate::error::Error;\n\n\n", "file_path": "contracts/ckb-passport-lock/src/entry/rsa.rs", "rank": 46, "score": 2.6674370045161044 }, { "content": "impl Loader {\n\n fn with_test_env(env: TestEnv) -> Self {\n\n let load_prefix = match env {\n\n TestEnv::Debug => \"debug\",\n\n TestEnv::Release => \"release\",\n\n };\n\n let dir = env::current_dir().unwrap();\n\n let mut base_path = PathBuf::new();\n\n base_path.push(dir);\n\n base_path.push(\"..\");\n\n base_path.push(\"build\");\n\n base_path.push(load_prefix);\n\n Loader(base_path)\n\n }\n\n\n\n pub fn load_binary(&self, name: &str) -> Bytes {\n\n let mut path = self.0.clone();\n\n path.push(name);\n\n fs::read(path).expect(\"binary\").into()\n\n }\n\n}\n", "file_path": "tests/src/lib.rs", "rank": 48, "score": 2.4974526546390576 }, { "content": " ISO97962InvalidArg7 = 15,\n\n ISO97962InvalidArg8,\n\n ISO97962InvalidArg9,\n\n ISO97962MismatchHash = 18,\n\n}\n\n\n\nimpl From<SysError> for Error {\n\n fn from(err: SysError) -> Self {\n\n use SysError::*;\n\n match err {\n\n IndexOutOfBound => Self::IndexOutOfBound,\n\n ItemMissing => Self::ItemMissing,\n\n LengthNotEnough(_) => Self::LengthNotEnough,\n\n Encoding => Self::Encoding,\n\n Unknown(err_code) => panic!(\"unexpected sys error {}\", err_code),\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/ckb-passport-lock/src/error.rs", "rank": 49, "score": 2.1871029901808132 }, { "content": " match s.to_lowercase().as_str() {\n\n \"debug\" => Ok(TestEnv::Debug),\n\n \"release\" => Ok(TestEnv::Release),\n\n _ => Err(\"no match\"),\n\n }\n\n }\n\n}\n\n\n\npub struct Loader(PathBuf);\n\n\n\nimpl Default for Loader {\n\n fn default() -> Self {\n\n let test_env = match env::var(TEST_ENV_VAR) {\n\n Ok(val) => val.parse().expect(\"test env\"),\n\n Err(_) => TestEnv::Debug,\n\n };\n\n Self::with_test_env(test_env)\n\n }\n\n}\n\n\n", "file_path": "tests/src/lib.rs", "rank": 52, "score": 1.244836880916044 }, { "content": "# ckb-passport-lock\n\n\n\n[![License](https://img.shields.io/badge/license-MIT-green)](https://github.com/duanyytop/ckb-passport-lock/blob/develop/COPYING)\n\n[![Github Actions CI](https://github.com/duanyytop/ckb-passport-lock/workflows/CI/badge.svg?branch=develop)](https://github.com/duanyytop/ckb-passport-lock/actions)\n\n\n\nThe lock script of e-passport on Nervos CKB using [Capsule](https://github.com/nervosnetwork/capsule)\n\n\n\n### Pre-requirement\n\n\n\n- [capsule](https://github.com/nervosnetwork/capsule) >= 0.4.3\n\n- [ckb-cli](https://github.com/nervosnetwork/ckb-cli) >= 0.35.0\n\n- [rsa_sighash_all](https://github.com/nervosnetwork/ckb-miscellaneous-scripts/blob/master/c/rsa_sighash_all.c) which supports loaded as a shared library.\n\n\n\n> Note: Capsule uses docker to build contracts and run tests. https://docs.docker.com/get-docker/\n\n> and docker and ckb-cli must be accessible in the PATH in order for them to be used by Capsule.\n\n\n\n### Getting Started\n\n\n\n- Init submodules:\n\n\n\n```\n\ngit submodule init && git submodule update -r --init\n\n```\n\n\n\n- Build the shared binary `validate_signature_rsa`:\n\n\n\n```\n\ncd ckb-production-scripts && git submodule init && git submodule update\n\n\n\nmake validate_signature_rsa-via-docker\n\n```\n\n\n\n- Build contracts:\n\n\n\n```sh\n\n# back to repo root directory\n\ncd .. && capsule build\n\n```\n\n\n\n- Run tests\n\n\n\n```sh\n\ncapsule test\n\n```\n\n\n\n### Deployment\n\n\n", "file_path": "README.md", "rank": 53, "score": 1.1340880901110961 }, { "content": "#### 1. Update the deployment configurations\n\n\n\nOpen `deployment.toml` :\n\n\n\n- cells describes which cells to be deployed.\n\n\n\n - `name`: Define the reference name used in the deployment configuration.\n\n - `enable_type_id` : If it is set to true means create a type_id for the cell.\n\n - `location` : Define the script binary path.\n\n - `dep_groups` describes which dep_groups to be created. Dep Group is a cell which bundles several cells as its members. When a dep group cell is used in cell_deps, it has the same effect as adding all its members into cell_deps. In our case, we don’t need dep_groups.\n\n\n\n- `lock` describes the lock field of the new deployed cells.It is recommended to set lock to the address(an address that you can unlock) of deployer in the dev chain and in the testnet, which is easier to update the script.\n\n\n\n#### 2. Build release version of the script\n\n\n\nThe release version of script doesn’t include debug symbols which makes the size smaller.\n\n\n\n```sh\n\ncapsule build --release\n\n```\n\n\n\n#### 3. Deploy the script\n\n\n\n```sh\n\ncapsule deploy --address <ckt1....> --fee 0.001\n\n```\n\n\n\nIf the `ckb-cli` has been installed and `dev-chain` RPC is connectable, you will see the deployment plan:\n\n\n\nnew_occupied_capacity and total_occupied_capacity refer how much CKB to store cells and data.\n\ntxs_fee_capacity refers how much CKB to pay the transaction fee.\n\n\n\n```\n", "file_path": "README.md", "rank": 54, "score": 0.9487249883455608 } ]
Rust
bzip2/src/tokio.rs
GMAP/RustStreamBench
d319445c448db15221cf62aa51af002308807c39
use std::mem; use std::fs::File; use std::io::prelude::*; use std::time::{SystemTime}; use {bzip2_sys}; use crossbeam_channel::{unbounded}; use{ futures::future::lazy, futures::sync::*, futures::{stream, Future, Stream}, tokio::prelude::*, }; struct Tcontent { order: usize, buffer_input: Vec<u8>, buffer_output: Vec<u8>, output_size: u32, } macro_rules! spawn_return { ($block:expr) => {{ let (sender, receiver) = oneshot::channel::<_>(); tokio::spawn(lazy(move || { let result = $block; sender.send(result).ok(); Ok(()) })); receiver }}; } pub fn tokio(threads: usize, file_action: &str, file_name: &str,) { let mut file = File::open(file_name).expect("No file found."); if file_action == "compress" { let compressed_file_name = file_name.to_owned() + &".bz2"; let mut buf_write = File::create(compressed_file_name).unwrap(); let mut buffer_input = vec![]; let mut buffer_output = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize = 0; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut order = 0; let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if bytes_left <= 0 { return Ok(Async::Ready(None)); } pos_init = pos_end; pos_end += if bytes_left < block_size { buffer_input.len()-pos_end } else { block_size }; bytes_left -= pos_end-pos_init; let buffer_slice = &buffer_input[pos_init..pos_end]; let content = Tcontent { order, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; (buffer_slice.len() as f64 *1.01) as usize+600], output_size: 0, }; order += 1; Ok(Async::Ready(Some(content))) }); let (collection_send, collection_recv) = unbounded(); let (send, recv) = (collection_send.clone(), collection_recv.clone()); let pipeline = processing_stream .map(move |mut content: Tcontent| { let send = collection_send.clone(); spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzCompressInit(&mut bz_buffer as *mut _, 9, 0, 30); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzCompress(&mut bz_buffer as *mut _, bzip2_sys::BZ_FINISH as _); bzip2_sys::BZ2_bzCompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } send.send(content).unwrap(); }) }).buffer_unordered(threads) .for_each(|_content| { Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); drop(send); let mut collection: Vec<Tcontent> = recv.iter().collect(); collection.sort_by_key(|content| content.order); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); for content in collection { buffer_output.extend(&content.buffer_output[0..content.output_size as usize]); } buf_write.write_all(&buffer_output).unwrap(); std::fs::remove_file(file_name).unwrap(); } else if file_action == "decompress" { let decompressed_file_name = &file_name.to_owned()[..file_name.len()-4]; let mut buf_write = File::create(decompressed_file_name).unwrap(); let mut buffer_input = vec![]; let mut buffer_output = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut queue_blocks: Vec<(usize, usize)> = Vec::new(); let mut counter = 0; while bytes_left > 0 { pos_init = pos_end; pos_end += { let buffer_slice; if buffer_input.len() > block_size+10000 { if (pos_init+block_size+10000) > buffer_input.len() { buffer_slice = &buffer_input[pos_init+10..]; }else{ buffer_slice = &buffer_input[pos_init+10..pos_init+block_size+10000]; } }else{ buffer_slice = &buffer_input[pos_init+10..]; } let ret = buffer_slice.windows(10).position(|window| window == b"BZh91AY&SY"); let pos = match ret { Some(i) => i+10, None => buffer_input.len()-pos_init, }; pos }; bytes_left -= pos_end-pos_init; queue_blocks.push((pos_init, pos_end)); } let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if counter >= queue_blocks.len() { return Ok(Async::Ready(None)); } let buffer_slice = &buffer_input[queue_blocks[counter].0..queue_blocks[counter].1]; let content = Tcontent { order: counter, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; block_size], output_size: 0, }; counter += 1; Ok(Async::Ready(Some(content))) }); let (collection_send, collection_recv) = unbounded(); let (send, recv) = (collection_send.clone(), collection_recv.clone()); let pipeline = processing_stream .map(move |mut content: Tcontent| { let send = collection_send.clone(); spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzDecompressInit(&mut bz_buffer as *mut _, 0, 0); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzDecompress(&mut bz_buffer as *mut _); bzip2_sys::BZ2_bzDecompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } send.send(content).unwrap(); }) }).buffer_unordered(threads) .for_each(|_| { Ok(())}) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); drop(send); let mut collection: Vec<Tcontent> = recv.iter().collect(); collection.sort_by_key(|content| content.order); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); for content in collection { buffer_output.extend(&content.buffer_output[0..content.output_size as usize]); } buf_write.write_all(&buffer_output).unwrap(); std::fs::remove_file(file_name).unwrap(); } } pub fn tokio_io(threads: usize, file_action: &str, file_name: &str,) { let mut file = File::open(file_name).expect("No file found."); if file_action == "compress" { let compressed_file_name = file_name.to_owned() + &".bz2"; let mut buf_write = File::create(compressed_file_name).unwrap(); let block_size = 900000; let mut pos_init: usize = 0; let mut pos_end = 0; let mut bytes_left: usize = file.metadata().unwrap().len() as usize; let mut order = 0; let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if bytes_left <= 0 { return Ok(Async::Ready(None)); } pos_init = pos_end; pos_end += if bytes_left < block_size { file.metadata().unwrap().len() as usize-pos_end } else { block_size }; bytes_left -= pos_end-pos_init; let mut buffer_slice: Vec<u8> = vec![0; pos_end-pos_init]; file.read(&mut buffer_slice).unwrap(); let content = Tcontent { order, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; (buffer_slice.len() as f64 *1.01) as usize+600], output_size: 0, }; order += 1; Ok(Async::Ready(Some(content))) }); let pipeline = processing_stream .map(move |mut content: Tcontent| { spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzCompressInit(&mut bz_buffer as *mut _, 9, 0, 30); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzCompress(&mut bz_buffer as *mut _, bzip2_sys::BZ_FINISH as _); bzip2_sys::BZ2_bzCompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } content }) }) .buffered(threads) .for_each(move |content: Tcontent| { buf_write.write(&content.buffer_output[0..content.output_size as usize]).unwrap(); Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); std::fs::remove_file(file_name).unwrap(); } else if file_action == "decompress" { let decompressed_file_name = &file_name.to_owned()[..file_name.len()-4]; let mut buf_write = File::create(decompressed_file_name).unwrap(); let mut buffer_input = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut queue_blocks: Vec<(usize, usize)> = Vec::new(); let mut counter = 0; while bytes_left > 0 { pos_init = pos_end; pos_end += { let buffer_slice; if buffer_input.len() > block_size+10000 { if (pos_init+block_size+10000) > buffer_input.len() { buffer_slice = &buffer_input[pos_init+10..]; }else{ buffer_slice = &buffer_input[pos_init+10..pos_init+block_size+10000]; } }else{ buffer_slice = &buffer_input[pos_init+10..]; } let ret = buffer_slice.windows(10).position(|window| window == b"BZh91AY&SY"); let pos = match ret { Some(i) => i+10, None => buffer_input.len()-pos_init, }; pos }; bytes_left -= pos_end-pos_init; queue_blocks.push((pos_init, pos_end)); } let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if counter >= queue_blocks.len() { return Ok(Async::Ready(None)); } let buffer_slice = &buffer_input[queue_blocks[counter].0..queue_blocks[counter].1]; let content = Tcontent { order: counter, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; block_size], output_size: 0, }; counter += 1; Ok(Async::Ready(Some(content))) }); let pipeline = processing_stream .map(move |mut content: Tcontent| { spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzDecompressInit(&mut bz_buffer as *mut _, 0, 0); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzDecompress(&mut bz_buffer as *mut _); bzip2_sys::BZ2_bzDecompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } content }) }) .buffered(threads) .for_each(move |content: Tcontent| { buf_write.write(&content.buffer_output[0..content.output_size as usize]).unwrap(); Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); std::fs::remove_file(file_name).unwrap(); } }
use std::mem; use std::fs::File; use std::io::prelude::*; use std::time::{SystemTime}; use {bzip2_sys}; use crossbeam_channel::{unbounded}; use{ futures::future::lazy, futures::sync::*, futures::{stream, Future, Stream}, tokio::prelude::*, }; struct Tcontent { order: usize, buffer_input: Vec<u8>, buffer_output: Vec<u8>, output_size: u32, } macro_rules! spawn_return { ($block:expr) => {{ let (sender, receiver) = oneshot::channel::<_>(); tokio::spawn(lazy(move || { let result = $block; sender.send(result).ok(); Ok(()) })); receiver }}; }
pub fn tokio_io(threads: usize, file_action: &str, file_name: &str,) { let mut file = File::open(file_name).expect("No file found."); if file_action == "compress" { let compressed_file_name = file_name.to_owned() + &".bz2"; let mut buf_write = File::create(compressed_file_name).unwrap(); let block_size = 900000; let mut pos_init: usize = 0; let mut pos_end = 0; let mut bytes_left: usize = file.metadata().unwrap().len() as usize; let mut order = 0; let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if bytes_left <= 0 { return Ok(Async::Ready(None)); } pos_init = pos_end; pos_end += if bytes_left < block_size { file.metadata().unwrap().len() as usize-pos_end } else { block_size }; bytes_left -= pos_end-pos_init; let mut buffer_slice: Vec<u8> = vec![0; pos_end-pos_init]; file.read(&mut buffer_slice).unwrap(); let content = Tcontent { order, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; (buffer_slice.len() as f64 *1.01) as usize+600], output_size: 0, }; order += 1; Ok(Async::Ready(Some(content))) }); let pipeline = processing_stream .map(move |mut content: Tcontent| { spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzCompressInit(&mut bz_buffer as *mut _, 9, 0, 30); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzCompress(&mut bz_buffer as *mut _, bzip2_sys::BZ_FINISH as _); bzip2_sys::BZ2_bzCompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } content }) }) .buffered(threads) .for_each(move |content: Tcontent| { buf_write.write(&content.buffer_output[0..content.output_size as usize]).unwrap(); Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); std::fs::remove_file(file_name).unwrap(); } else if file_action == "decompress" { let decompressed_file_name = &file_name.to_owned()[..file_name.len()-4]; let mut buf_write = File::create(decompressed_file_name).unwrap(); let mut buffer_input = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut queue_blocks: Vec<(usize, usize)> = Vec::new(); let mut counter = 0; while bytes_left > 0 { pos_init = pos_end; pos_end += { let buffer_slice; if buffer_input.len() > block_size+10000 { if (pos_init+block_size+10000) > buffer_input.len() { buffer_slice = &buffer_input[pos_init+10..]; }else{ buffer_slice = &buffer_input[pos_init+10..pos_init+block_size+10000]; } }else{ buffer_slice = &buffer_input[pos_init+10..]; } let ret = buffer_slice.windows(10).position(|window| window == b"BZh91AY&SY"); let pos = match ret { Some(i) => i+10, None => buffer_input.len()-pos_init, }; pos }; bytes_left -= pos_end-pos_init; queue_blocks.push((pos_init, pos_end)); } let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if counter >= queue_blocks.len() { return Ok(Async::Ready(None)); } let buffer_slice = &buffer_input[queue_blocks[counter].0..queue_blocks[counter].1]; let content = Tcontent { order: counter, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; block_size], output_size: 0, }; counter += 1; Ok(Async::Ready(Some(content))) }); let pipeline = processing_stream .map(move |mut content: Tcontent| { spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzDecompressInit(&mut bz_buffer as *mut _, 0, 0); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzDecompress(&mut bz_buffer as *mut _); bzip2_sys::BZ2_bzDecompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } content }) }) .buffered(threads) .for_each(move |content: Tcontent| { buf_write.write(&content.buffer_output[0..content.output_size as usize]).unwrap(); Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); std::fs::remove_file(file_name).unwrap(); } }
pub fn tokio(threads: usize, file_action: &str, file_name: &str,) { let mut file = File::open(file_name).expect("No file found."); if file_action == "compress" { let compressed_file_name = file_name.to_owned() + &".bz2"; let mut buf_write = File::create(compressed_file_name).unwrap(); let mut buffer_input = vec![]; let mut buffer_output = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize = 0; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut order = 0; let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if bytes_left <= 0 { return Ok(Async::Ready(None)); } pos_init = pos_end; pos_end += if bytes_left < block_size { buffer_input.len()-pos_end } else { block_size }; bytes_left -= pos_end-pos_init; let buffer_slice = &buffer_input[pos_init..pos_end]; let content = Tcontent { order, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; (buffer_slice.len() as f64 *1.01) as usize+600], output_size: 0, }; order += 1; Ok(Async::Ready(Some(content))) }); let (collection_send, collection_recv) = unbounded(); let (send, recv) = (collection_send.clone(), collection_recv.clone()); let pipeline = processing_stream .map(move |mut content: Tcontent| { let send = collection_send.clone(); spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzCompressInit(&mut bz_buffer as *mut _, 9, 0, 30); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzCompress(&mut bz_buffer as *mut _, bzip2_sys::BZ_FINISH as _); bzip2_sys::BZ2_bzCompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } send.send(content).unwrap(); }) }).buffer_unordered(threads) .for_each(|_content| { Ok(()) }) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); drop(send); let mut collection: Vec<Tcontent> = recv.iter().collect(); collection.sort_by_key(|content| content.order); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); for content in collection { buffer_output.extend(&content.buffer_output[0..content.output_size as usize]); } buf_write.write_all(&buffer_output).unwrap(); std::fs::remove_file(file_name).unwrap(); } else if file_action == "decompress" { let decompressed_file_name = &file_name.to_owned()[..file_name.len()-4]; let mut buf_write = File::create(decompressed_file_name).unwrap(); let mut buffer_input = vec![]; let mut buffer_output = vec![]; file.read_to_end(&mut buffer_input).unwrap(); let block_size = 900000; let mut pos_init: usize; let mut pos_end = 0; let mut bytes_left = buffer_input.len(); let mut queue_blocks: Vec<(usize, usize)> = Vec::new(); let mut counter = 0; while bytes_left > 0 { pos_init = pos_end; pos_end += { let buffer_slice; if buffer_input.len() > block_size+10000 { if (pos_init+block_size+10000) > buffer_input.len() { buffer_slice = &buffer_input[pos_init+10..]; }else{ buffer_slice = &buffer_input[pos_init+10..pos_init+block_size+10000]; } }else{ buffer_slice = &buffer_input[pos_init+10..]; } let ret = buffer_slice.windows(10).position(|window| window == b"BZh91AY&SY"); let pos = match ret { Some(i) => i+10, None => buffer_input.len()-pos_init, }; pos }; bytes_left -= pos_end-pos_init; queue_blocks.push((pos_init, pos_end)); } let start = SystemTime::now(); let processing_stream = stream::poll_fn(move || -> Poll<Option<Tcontent>,futures::sync::oneshot::Canceled> { if counter >= queue_blocks.len() { return Ok(Async::Ready(None)); } let buffer_slice = &buffer_input[queue_blocks[counter].0..queue_blocks[counter].1]; let content = Tcontent { order: counter, buffer_input: buffer_slice.to_vec().clone(), buffer_output: vec![0; block_size], output_size: 0, }; counter += 1; Ok(Async::Ready(Some(content))) }); let (collection_send, collection_recv) = unbounded(); let (send, recv) = (collection_send.clone(), collection_recv.clone()); let pipeline = processing_stream .map(move |mut content: Tcontent| { let send = collection_send.clone(); spawn_return!({ unsafe{ let mut bz_buffer: bzip2_sys::bz_stream = mem::zeroed(); bzip2_sys::BZ2_bzDecompressInit(&mut bz_buffer as *mut _, 0, 0); bz_buffer.next_in = content.buffer_input.as_ptr() as *mut _; bz_buffer.avail_in = content.buffer_input.len() as _; bz_buffer.next_out = content.buffer_output.as_mut_ptr() as *mut _; bz_buffer.avail_out = content.buffer_output.len() as _; bzip2_sys::BZ2_bzDecompress(&mut bz_buffer as *mut _); bzip2_sys::BZ2_bzDecompressEnd(&mut bz_buffer as *mut _); content.output_size = bz_buffer.total_out_lo32; } send.send(content).unwrap(); }) }).buffer_unordered(threads) .for_each(|_| { Ok(())}) .map_err(|e| println!("Error = {:?}", e)); tokio::run(pipeline); drop(send); let mut collection: Vec<Tcontent> = recv.iter().collect(); collection.sort_by_key(|content| content.order); let system_duration = start.elapsed().expect("Failed to get render time?"); let in_sec = system_duration.as_secs() as f64 + system_duration.subsec_nanos() as f64 * 1e-9; println!("Execution time: {} sec", in_sec); for content in collection { buffer_output.extend(&content.buffer_output[0..content.output_size as usize]); } buf_write.write_all(&buffer_output).unwrap(); std::fs::remove_file(file_name).unwrap(); } }
function_block-full_function
[ { "content": "struct Tcontent {\n\n\torder: u64,\n\n\tbuffer_input: Vec<u8>,\n\n\tbuffer_output: Vec<u8>,\n\n\toutput_size: u32,\n\n}\n\n\n\npub struct Reorder {\n\n storage: BTreeMap<u64, Tcontent>,\n\n}\n\n\n\nimpl Reorder {\n\n fn new() -> Reorder {\n\n Reorder {\n\n storage: BTreeMap::<u64, Tcontent>::new(),\n\n }\n\n }\n\n\n\n fn enqueue(&mut self, item: Tcontent) {\n\n self.storage.insert(item.order, item);\n", "file_path": "bzip2/src/std_threads.rs", "rank": 1, "score": 135165.86684190633 }, { "content": "struct Tcontent {\n\n\tbuffer_input: Vec<u8>,\n\n\tbuffer_output: Vec<u8>,\n\n\toutput_size: u32,\n\n}\n\n\n", "file_path": "bzip2/src/rust_ssp.rs", "rank": 2, "score": 135165.86684190633 }, { "content": "struct Tcontent {\n\n size: usize,\n\n line: i64,\n\n line_buffer: Vec<u8>,\n\n a_buffer: Vec<f64>,\n\n b_buffer: Vec<f64>,\n\n k_buffer: Vec<i32>,\n\n}\n\n\n\nmacro_rules! spawn_return {\n\n ($block:expr) => {{\n\n let (sender, receiver) = oneshot::channel::<_>();\n\n tokio::spawn(lazy(move || {\n\n let result = $block;\n\n sender.send(result).ok();\n\n Ok(())\n\n }));\n\n receiver\n\n }};\n\n}\n\n\n", "file_path": "micro-bench/src/tokio.rs", "rank": 3, "score": 135165.86684190633 }, { "content": "struct Tcontent {\n\n size: usize,\n\n line: i64,\n\n line_buffer: Vec<u8>,\n\n a_buffer: Vec<f64>,\n\n b_buffer: Vec<f64>,\n\n k_buffer: Vec<i32>,\n\n}\n\n\n", "file_path": "micro-bench/src/std_threads.rs", "rank": 4, "score": 132681.59912241343 }, { "content": "struct Tcontent {\n\n size: usize,\n\n line: i64,\n\n line_buffer: Vec<u8>,\n\n a_buffer: Vec<f64>,\n\n b_buffer: Vec<f64>,\n\n k_buffer: Vec<i32>,\n\n}\n\n\n", "file_path": "micro-bench/src/rust_ssp.rs", "rank": 5, "score": 132681.59912241343 }, { "content": "struct TcontentIter {\n\n\torder: usize,\n\n\tbuffer_input: Vec<u8>,\n\n\tbuffer_output: Vec<u8>,\n\n\toutput_size: u32,\n\n}\n\n\n", "file_path": "bzip2/src/rayon.rs", "rank": 6, "score": 105676.19259143861 }, { "content": "struct TcontentIter {\n\n\tbuffer_input: Vec<u8>,\n\n\tbuffer_output: Vec<u8>,\n\n\toutput_size: u32,\n\n}\n\n\n\n\n\n\n", "file_path": "bzip2/src/pipeliner.rs", "rank": 7, "score": 105676.19259143861 }, { "content": "struct TcontentIter {\n\n size: usize,\n\n line: i64,\n\n line_buffer: Vec<u8>,\n\n a_buffer: Vec<f64>,\n\n b_buffer: Vec<f64>,\n\n k_buffer: Vec<i32>,\n\n}\n\n\n\nimpl TcontentIter {\n\n fn new(size: usize) -> TcontentIter {\n\n TcontentIter { size,\n\n line: -1,\n\n line_buffer: vec![0; size],\n\n a_buffer: vec![0.0; size],\n\n b_buffer: vec![0.0; size],\n\n k_buffer: vec![0; size], }\n\n }\n\n}\n\n\n", "file_path": "micro-bench/src/pipeliner.rs", "rank": 8, "score": 103191.92487194571 }, { "content": "struct TcontentIter {\n\n size: usize,\n\n line: i64,\n\n line_buffer: Vec<u8>,\n\n a_buffer: Vec<f64>,\n\n b_buffer: Vec<f64>,\n\n k_buffer: Vec<i32>,\n\n}\n\n\n\nimpl TcontentIter {\n\n fn new(size: usize) -> TcontentIter {\n\n TcontentIter { size,\n\n line: -1,\n\n line_buffer: vec![0; size],\n\n a_buffer: vec![0.0; size],\n\n b_buffer: vec![0.0; size],\n\n k_buffer: vec![0; size], }\n\n }\n\n}\n\n\n", "file_path": "micro-bench/src/rayon.rs", "rank": 9, "score": 103191.92487194571 }, { "content": "// Internals: This is a thread-local object for in blocks\n\nstruct InBlockInfo<TInput, TCollected> {\n\n handler: Box<dyn In<TInput, TCollected>>\n\n}\n\n\n\n\n\nimpl <TInput, TCollected> PipelineBlock<TInput, TCollected> for InBlock<TInput, TCollected> {\n\n\n\n //used by the public API\n\n fn process(&self, input: WorkItem<TInput>) {\n\n match self.ordering {\n\n //For the unordered case, just enqueue it\n\n OrderingMode::Unordered => {\n\n (*self.work_queue).enqueue(input);\n\n },\n\n //For the ordered case: All InBlocks are single threaded\n\n //so we keep a count. Store under an atomic counter\n\n //in case we implement a multithreaded outblock\n\n OrderingMode::Ordered => {\n\n let c = self.counter.load(Ordering::SeqCst);\n\n (*self.ordered_work).enqueue(TimestampedWorkItem(input, c as u64));\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 10, "score": 99480.09980593054 }, { "content": "struct PrintResult;\n\nimpl In<String> for PrintResult {\n\n fn process(&mut self, input: String, order: u64) {\n\n // println!(\"Finished image {:?} {:?}\", order, input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 11, "score": 98685.34492754703 }, { "content": "struct StreamData {\n\n\torder: u64,\n\n frame: Mat,\n\n equalized: Option<Mat>,\n\n faces: Option<types::VectorOfRect>,\n\n}\n\nunsafe impl Sync for StreamData {}\n\nunsafe impl Send for StreamData {}\n\n\n\npub struct Reorder {\n\n storage: BTreeMap<u64, StreamData>,\n\n}\n\n\n\nimpl Reorder {\n\n fn new() -> Reorder {\n\n Reorder {\n\n storage: BTreeMap::<u64, StreamData>::new(),\n\n }\n\n }\n\n\n", "file_path": "eye-detector/src/par_better.rs", "rank": 12, "score": 96833.62335381501 }, { "content": " UChar* block;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 13, "score": 96342.45371626757 }, { "content": "struct StreamData {\n\n\torder: u64,\n\n frame: Mat,\n\n equalized: Option<Mat>,\n\n faces: Option<types::VectorOfRect>,\n\n}\n\nunsafe impl Sync for StreamData {}\n\nunsafe impl Send for StreamData {}\n\n\n\npub struct Reorder {\n\n storage: BTreeMap<u64, StreamData>,\n\n}\n\n\n\nimpl Reorder {\n\n fn new() -> Reorder {\n\n Reorder {\n\n storage: BTreeMap::<u64, StreamData>::new(),\n\n }\n\n }\n\n\n", "file_path": "eye-detector/src/par_std_threads.rs", "rank": 14, "score": 94770.10335328373 }, { "content": "struct SaveImageAndGetResult;\n\nimpl InOut<ImageToProcess, String> for SaveImageAndGetResult {\n\n fn process(&mut self, input: ImageToProcess) -> Option<String> {\n\n let result_dir = \"../processed_images\";\n\n\n\n let result = result_dir.to_owned()\n\n + \"/\"\n\n + input.path.file_stem().unwrap().to_str().unwrap()\n\n + \"_processed.\"\n\n + input.path.extension().unwrap().to_str().unwrap();\n\n\n\n raster::save(&input.image, &result).unwrap();\n\n\n\n return Some(result.to_string());\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 15, "score": 94688.97593787193 }, { "content": "// Internals: This is a thread-local object for inout blocks\n\nstruct InOutBlockInfo<TInput, TOutput, TCollected> {\n\n next_step: Arc<Box<dyn PipelineBlock<TOutput, TCollected>>>,\n\n transformer: Box<dyn InOut<TInput, TOutput>>\n\n}\n\n\n\n//Internals: Processing queue for inout blocks in the pipeline\n\npub struct InOutBlock<TInput, TOutput, TCollected> {\n\n work_queue: Arc<BlockingQueue<TInput>>,\n\n next_step: Arc<Box<dyn PipelineBlock<TOutput, TCollected>>>,\n\n transformer_factory: Box<FnMut() -> Box<dyn InOut<TInput, TOutput>>>,\n\n replicas: i32,\n\n}\n\n\n\nimpl<TInput, TOutput, TCollected> InOutBlock<TInput, TOutput, TCollected> {\n\n pub fn send_stop(&self) {\n\n (*self.work_queue).enqueue(WorkItem::Stop);\n\n }\n\n}\n\n\n\nimpl<TInput: 'static, TCollected: 'static, TOutput: 'static> PipelineBlock<TInput, TCollected> \n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 16, "score": 92730.53932990001 }, { "content": "use crate::work_storage::*;\n\nuse std::collections::BTreeMap;\n\nuse std::sync::{Arc};\n\nuse parking_lot::{Mutex, Condvar};\n\n\n\npub struct BlockingOrderedSet<T> {\n\n storage: Mutex<BTreeMap<u64, TimestampedWorkItem<T>>>,\n\n new_item_notifier: Condvar,\n\n}\n\n\n\nimpl<T> BlockingOrderedSet<T> {\n\n pub fn new() -> Arc<BlockingOrderedSet<T>> {\n\n Arc::new(BlockingOrderedSet {\n\n storage: Mutex::new(BTreeMap::<u64, TimestampedWorkItem<T>>::new()),\n\n new_item_notifier: Condvar::new(),\n\n })\n\n }\n\n\n\n pub fn enqueue(&self, item: TimestampedWorkItem<T>) {\n\n let mut queue = self.storage.lock();\n", "file_path": "libs/rust-ssp/src/work_storage/blocking_ordered_set.rs", "rank": 17, "score": 67506.11253925881 }, { "content": " match item {\n\n TimestampedWorkItem(_, order) => queue.insert(order, item)\n\n };\n\n self.new_item_notifier.notify_one();\n\n }\n\n\n\n pub fn wait_and_remove(&self, item: u64) -> TimestampedWorkItem<T> {\n\n let mut storage = self.storage.lock();\n\n while (*storage).is_empty() || !(*storage).contains_key(&item) {\n\n self.new_item_notifier.wait(&mut storage);\n\n }\n\n let removed_item = storage.remove(&item);\n\n\n\n match removed_item {\n\n Some(value) => return value,\n\n None => { panic!(\"Condition variable waited until item was found, but removal failed\") }\n\n }\n\n }\n\n}\n\n\n\nunsafe impl<T> Send for BlockingOrderedSet<T> {}\n\nunsafe impl<T> Sync for BlockingOrderedSet<T> {}\n", "file_path": "libs/rust-ssp/src/work_storage/blocking_ordered_set.rs", "rank": 18, "score": 67501.90067953774 }, { "content": "struct EmitterDecompress {\n\n buffer_input: Vec<u8>,\n\n block_size: usize,\n\n order: usize,\n\n queue_blocks: Vec<(usize, usize)>,\n\n}\n\n\n\nimpl EmitterDecompress {\n\n fn new(buffer_input: Vec<u8>, queue_blocks: Vec<(usize, usize)>) -> EmitterDecompress {\n\n EmitterDecompress { buffer_input,\n\n block_size: 900000,\n\n\t\t\t order: 0,\n\n\t\t\t queue_blocks,\n\n\t\t\t}\n\n }\n\n}\n\n\n\nimpl Iterator for EmitterDecompress {\n\n type Item = TcontentIter;\n\n\n", "file_path": "bzip2/src/pipeliner.rs", "rank": 19, "score": 67254.87391448101 }, { "content": "struct EmitterCompress {\n\n buffer_input: Vec<u8>,\n\n block_size: usize,\n\n pos_init: usize,\n\n pos_end: usize,\n\n bytes_left: usize,\n\n}\n\n\n\nimpl EmitterCompress {\n\n fn new(buffer_input: Vec<u8>) -> EmitterCompress {\n\n let bytes_left = buffer_input.len();\n\n EmitterCompress { buffer_input,\n\n block_size: 900000,\n\n pos_init: 0,\n\n\t\t\t pos_end: 0,\n\n\t\t\t bytes_left,\n\n\t\t\t}\n\n }\n\n}\n\n\n", "file_path": "bzip2/src/pipeliner.rs", "rank": 20, "score": 67254.87391448101 }, { "content": "struct EmitterCompress {\n\n buffer_input: Vec<u8>,\n\n block_size: usize,\n\n pos_init: usize,\n\n pos_end: usize,\n\n bytes_left: usize,\n\n order: usize,\n\n}\n\n\n\nimpl EmitterCompress {\n\n fn new(buffer_input: Vec<u8>) -> EmitterCompress {\n\n let bytes_left = buffer_input.len();\n\n EmitterCompress { buffer_input,\n\n block_size: 900000,\n\n pos_init: 0,\n\n\t\t\t pos_end: 0,\n\n\t\t\t bytes_left,\n\n\t\t\t order: 0,\n\n\t\t\t}\n\n }\n", "file_path": "bzip2/src/rayon.rs", "rank": 21, "score": 67254.87391448101 }, { "content": "struct EmitterDecompress {\n\n buffer_input: Vec<u8>,\n\n block_size: usize,\n\n order: usize,\n\n queue_blocks: Vec<(usize, usize)>,\n\n}\n\n\n\nimpl EmitterDecompress {\n\n fn new(buffer_input: Vec<u8>, queue_blocks: Vec<(usize, usize)>) -> EmitterDecompress {\n\n EmitterDecompress { buffer_input,\n\n block_size: 900000,\n\n\t\t\t order: 0,\n\n\t\t\t queue_blocks,\n\n\t\t\t}\n\n }\n\n}\n\n\n\nimpl Iterator for EmitterDecompress {\n\n type Item = TcontentIter;\n\n\n", "file_path": "bzip2/src/rayon.rs", "rank": 22, "score": 67254.87391448101 }, { "content": "struct WriteOutput{\n\n buf_write : std::fs::File,\n\n}\n\nimpl WriteOutput {\n\n fn new(compressed_file_name : &str) -> WriteOutput {\n\n WriteOutput { buf_write: File::create(compressed_file_name).unwrap() } \n\n }\n\n}\n\nimpl In<Tcontent> for WriteOutput {\n\n fn process(&mut self, content: Tcontent, _order: u64){\n\n self.buf_write.write(&content.buffer_output[0..content.output_size as usize]).unwrap();\n\n }\n\n}\n\n\n\n\n", "file_path": "bzip2/src/rust_ssp.rs", "rank": 23, "score": 66017.0340476183 }, { "content": "#[derive(Clone)]\n\nstruct ThreadsState{\n\n t_handle: Thread,\n\n is_parked: bool\n\n}\n\n\n\npub struct BetterCrossbeam {\n\n blocked_by_empty: RwLock<Vec<ThreadsState>>,\n\n blocked_by_full: RwLock<Vec<ThreadsState>>,\n\n}\n\n\n\nimpl BetterCrossbeam {\n\n pub fn new(threads: usize) -> Arc<BetterCrossbeam> {\n\n Arc::new(BetterCrossbeam {\n\n blocked_by_empty: RwLock::new(vec![ThreadsState{t_handle: thread::current(), is_parked:false};threads]),\n\n blocked_by_full: RwLock::new(vec![ThreadsState{t_handle: thread::current(), is_parked:false};threads]),\n\n })\n\n }\n\n //Sender threads may only be blocked by full state\n\n pub fn set_send_handle(&self, t_id: usize, thread: Thread){\n\n let mut t_handle = self.blocked_by_full.write().unwrap();\n", "file_path": "eye-detector/src/par_better.rs", "rank": 24, "score": 64856.98423550462 }, { "content": "struct MatData {\n\n frame: Mat,\n\n}\n\nunsafe impl Sync for MatData {}\n\nunsafe impl Send for MatData {}\n\n\n", "file_path": "eye-detector/src/par_tokio.rs", "rank": 25, "score": 64856.98423550462 }, { "content": "struct Finalize {\n\n counter: Arc<Mutex<i64>>\n\n}\n\n\n\nimpl Out<i32> for Finalize {\n\n fn process(&mut self, input: i32, order: u64) {\n\n *self.counter.lock().unwrap() += 1;\n\n }\n\n}\n\n\n\nlazy_static! {\n\n static ref counter : Arc<Mutex<i64>> = Arc::new(Mutex::new(0));\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/minimal_example.rs", "rank": 26, "score": 64856.98423550462 }, { "content": "struct EyesData {\n\n frame: Mat,\n\n equalized: Mat,\n\n faces: types::VectorOfRect,\n\n}\n\nunsafe impl Sync for EyesData {}\n\nunsafe impl Send for EyesData {}\n\n\n\nmacro_rules! spawn_return {\n\n ($block:expr) => {{\n\n let (sender, receiver) = oneshot::channel::<_>();\n\n tokio::spawn(lazy(move || {\n\n let result = $block;\n\n sender.send(result).ok();\n\n Ok(())\n\n }));\n\n receiver\n\n }};\n\n}\n\n\n", "file_path": "eye-detector/src/par_tokio.rs", "rank": 27, "score": 64856.98423550462 }, { "content": "struct DetectEyes{\n\n eye_detector : objdetect::CascadeClassifier,\n\n}\n\nimpl DetectEyes {\n\n fn new() -> DetectEyes {\n\n let eye_xml = core::find_file(\"config/haarcascade_eye.xml\", true, false).unwrap();\n\n let eye_detector = objdetect::CascadeClassifier::new(&eye_xml).unwrap();\n\n DetectEyes { eye_detector : eye_detector } \n\n }\n\n}\n\nimpl InOut<EyesData,MatData> for DetectEyes {\n\n fn process(&mut self, mut in_data : EyesData) -> Option<MatData>{\n\n for face in in_data.faces {\n\n\n\n let eyes = common::detect_eyes(&core::Mat::roi(&in_data.equalized,face).unwrap(),\n\n &mut self.eye_detector).unwrap();\n\n\n\n common::draw_in_frame(&mut in_data.frame,&eyes,&face).unwrap();\n\n\n\n }\n\n let out_data = MatData{ frame : in_data.frame };\n\n Some(out_data)\n\n }\n\n}\n\n\n", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 28, "score": 63767.61498425228 }, { "content": "struct ApplyGamma;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ApplyGamma {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n filter::gamma(&mut input.image, 2.0).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 29, "score": 63767.61498425228 }, { "content": "struct MatData {\n\n frame: Mat,\n\n}\n\nunsafe impl Sync for MatData {}\n\nunsafe impl Send for MatData {}\n\n\n", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 30, "score": 63767.61498425228 }, { "content": "struct ApplyEmboss;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ApplyEmboss {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n filter::emboss(&mut input.image).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 31, "score": 63767.61498425228 }, { "content": "struct WriteOutput{\n\n video_out : videoio::VideoWriter,\n\n}\n\nimpl WriteOutput {\n\n fn new(fps_out : f64, frame_size : core::Size) -> WriteOutput {\n\n\t let fourcc = videoio::VideoWriter::fourcc(\n\n\t 'm' as i8,\n\n\t 'p' as i8,\n\n\t 'g' as i8,\n\n\t '1' as i8\n\n\t ).unwrap();\n\n\t\tlet video_out = videoio::VideoWriter::new(\n\n\t \"output.avi\",\n\n\t fourcc,\n\n\t fps_out,\n\n\t frame_size,\n\n\t true\n\n\t ).unwrap();\n\n\t let out_opened = videoio::VideoWriter::is_opened(&video_out).unwrap();\n\n\t if !out_opened {\n", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 32, "score": 63767.61498425228 }, { "content": "struct ApplySharpen;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ApplySharpen {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n filter::sharpen(&mut input.image).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 33, "score": 63767.61498425228 }, { "content": "struct ApplyGrayscale;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ApplyGrayscale {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n filter::grayscale(&mut input.image).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 34, "score": 63767.61498425228 }, { "content": "struct EyesData {\n\n frame: Mat,\n\n equalized: Mat,\n\n faces: types::VectorOfRect,\n\n}\n\nunsafe impl Sync for EyesData {}\n\nunsafe impl Send for EyesData {}\n\n\n", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 35, "score": 63767.61498425228 }, { "content": "struct ImageToProcess {\n\n path: PathBuf,\n\n image: raster::Image,\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 36, "score": 63767.61498425228 }, { "content": "struct DummySave;\n\nimpl In<ImageToProcess> for DummySave {\n\n fn process(&mut self, input: ImageToProcess, order: u64) {\n\n // println!(\"Finished image {:?} {:?}\", order, input)\n\n }\n\n}\n\n\n\nmacro_rules! time {\n\n ($e:expr, $clock_name:expr) => {\n\n // let start = time::precise_time_s();\n\n $e;\n\n // let end = time::precise_time_s();\n\n // println!(\"{:?} took {:?}\", $clock_name, end - start);\n\n };\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 37, "score": 63767.61498425228 }, { "content": "struct DetectFaces{\n\n face_detector : objdetect::CascadeClassifier,\n\n}\n\nimpl DetectFaces {\n\n fn new() -> DetectFaces {\n\n let face_xml = core::find_file(\"config/haarcascade_frontalface_alt.xml\", true, false).unwrap();\n\n let face_detector = objdetect::CascadeClassifier::new(&face_xml).unwrap();\n\n DetectFaces { face_detector : face_detector } \n\n }\n\n}\n\nimpl InOut<MatData,EyesData> for DetectFaces {\n\n fn process(&mut self, in_data: MatData) -> Option<EyesData>{\n\n // Convert to gray and equalize frame\n\n let equalized = common::prepare_frame(&in_data.frame).unwrap();\n\n\n\n // Detect faces\n\n let faces = common::detect_faces(&equalized,&mut self.face_detector).unwrap();\n\n\n\n let out_data = EyesData{frame : in_data.frame, \n\n equalized : equalized, \n\n faces : faces};\n\n Some(out_data)\n\n }\n\n}\n\n\n\n\n", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 38, "score": 63767.61498425228 }, { "content": "struct ApplyMoreSaturation;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ApplyMoreSaturation {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n filter::saturation(&mut input.image, 0.2).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 39, "score": 63767.61498425228 }, { "content": "struct LoadImage;\n\nimpl InOut<PathBuf, ImageToProcess> for LoadImage {\n\n fn process(&mut self, input: PathBuf) -> Option<ImageToProcess> {\n\n Some(ImageToProcess {\n\n image: raster::open(input.to_str().unwrap()).unwrap(),\n\n path: input,\n\n })\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 40, "score": 63767.61498425228 }, { "content": "struct MultiplyBy2 {\n\n rng: ThreadRng,\n\n}\n\nimpl InOut<i32, i32> for MultiplyBy2 {\n\n fn process(&mut self, input: i32) -> i32 {\n\n //thread::sleep(Duration::from_millis(self.rng.gen_range(1, 100)));\n\n input\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/minimal_example.rs", "rank": 41, "score": 63767.61498425228 }, { "content": "struct ResizeTo500pxWidth;\n\nimpl InOut<ImageToProcess, ImageToProcess> for ResizeTo500pxWidth {\n\n fn process(&mut self, input: ImageToProcess) -> Option<ImageToProcess> {\n\n let mut input = input;\n\n raster::transform::resize_exact_width(&mut input.image, 500).unwrap();\n\n Some(input)\n\n }\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 42, "score": 62742.65753475725 }, { "content": " Bool initialisedOk;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib.c", "rank": 43, "score": 60450.997266248465 }, { "content": " Int32 nInUse;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 44, "score": 60436.730742884574 }, { "content": " Bool inUse[256];\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 45, "score": 60428.90244405676 }, { "content": " Int32 blockNo;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 46, "score": 60089.179120983885 }, { "content": "#define M_BLOCK_OUT (M_BLOCK + 1000000)\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/unzcrash.c", "rank": 47, "score": 60089.179120983885 }, { "content": "static void tooManyBlocks ( Int32 max_handled_blocks )\n\n{\n\n fprintf ( stderr,\n\n \"%s: `%s' appears to contain more than %d blocks\\n\",\n\n progName, inFileName, max_handled_blocks );\n\n fprintf ( stderr,\n\n \"%s: and cannot be handled. To fix, increase\\n\",\n\n progName );\n\n fprintf ( stderr, \n\n \"%s: BZ_MAX_HANDLED_BLOCKS in bzip2recover.c, and recompile.\\n\",\n\n progName );\n\n exit ( 1 );\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 48, "score": 60089.179120983885 }, { "content": "#define M_BLOCK 1000000\n\n\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/unzcrash.c", "rank": 49, "score": 60089.179120983885 }, { "content": "static\n\nint bz_config_ok ( void )\n\n{\n\n if (sizeof(int) != 4) return 0;\n\n if (sizeof(short) != 2) return 0;\n\n if (sizeof(char) != 1) return 0;\n\n return 1;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib.c", "rank": 50, "score": 58809.42876535595 }, { "content": " Int32 nblock_used;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 51, "score": 58787.93393598129 }, { "content": "void BZ2_blockSort ( EState* s )\n\n{\n\n UInt32* ptr = s->ptr; \n\n UChar* block = s->block;\n\n UInt32* ftab = s->ftab;\n\n Int32 nblock = s->nblock;\n\n Int32 verb = s->verbosity;\n\n Int32 wfact = s->workFactor;\n\n UInt16* quadrant;\n\n Int32 budget;\n\n Int32 budgetInit;\n\n Int32 i;\n\n\n\n if (nblock < 10000) {\n\n fallbackSort ( s->arr1, s->arr2, ftab, nblock, verb );\n\n } else {\n\n /* Calculate the location for quadrant, remembering to get\n\n the alignment right. Assumes that &(block[0]) is at least\n\n 2-byte aligned -- this should be ok since block is really\n\n the first section of arr2.\n\n */\n\n i = nblock+BZ_N_OVERSHOOT;\n\n if (i & 1) i++;\n\n quadrant = (UInt16*)(&(block[i]));\n\n\n\n /* (wfact-1) / 3 puts the default-factor-30\n\n transition point at very roughly the same place as \n\n with v0.1 and v0.9.0. \n\n Not that it particularly matters any more, since the\n\n resulting compressed stream is now the same regardless\n\n of whether or not we use the main sort or fallback sort.\n\n */\n\n if (wfact < 1 ) wfact = 1;\n\n if (wfact > 100) wfact = 100;\n\n budgetInit = nblock * ((wfact-1) / 3);\n\n budget = budgetInit;\n\n\n\n mainSort ( ptr, block, quadrant, ftab, nblock, verb, &budget );\n\n if (verb >= 3) \n\n VPrintf3 ( \" %d work, %d block, ratio %5.2f\\n\",\n\n budgetInit - budget,\n\n nblock, \n\n (float)(budgetInit - budget) /\n\n (float)(nblock==0 ? 1 : nblock) ); \n\n if (budget < 0) {\n\n if (verb >= 2) \n\n VPrintf0 ( \" too repetitive; using fallback\"\n\n \" sorting algorithm\\n\" );\n\n fallbackSort ( s->arr1, s->arr2, ftab, nblock, verb );\n\n }\n\n }\n\n\n\n s->origPtr = -1;\n\n for (i = 0; i < s->nblock; i++)\n\n if (ptr[i] == 0)\n\n { s->origPtr = i; break; };\n\n\n\n AssertH( s->origPtr != -1, 1003 );\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/blocksort.c", "rank": 52, "score": 58464.096483021676 }, { "content": " UInt32 blockCRC;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 53, "score": 58461.29728980095 }, { "content": " Int32 blockSize100k;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 54, "score": 58457.43592152853 }, { "content": " Int32 currBlockNo;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 55, "score": 58457.43592152853 }, { "content": "#define BLOCK_HEADER_HI 0x00003141UL\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 56, "score": 58457.43592152853 }, { "content": "#define BLOCK_ENDMARK_HI 0x00001772UL\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 57, "score": 58457.43592152853 }, { "content": "#define BLOCK_ENDMARK_LO 0x45385090UL\n\n\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 58, "score": 58457.43592152853 }, { "content": "static\n\nvoid add_pair_to_block ( EState* s )\n\n{\n\n Int32 i;\n\n UChar ch = (UChar)(s->state_in_ch);\n\n for (i = 0; i < s->state_in_len; i++) {\n\n BZ_UPDATE_CRC( s->blockCRC, ch );\n\n }\n\n s->inUse[s->state_in_ch] = True;\n\n switch (s->state_in_len) {\n\n case 1:\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n break;\n\n case 2:\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n break;\n\n case 3:\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n break;\n\n default:\n\n s->inUse[s->state_in_len-4] = True;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = (UChar)ch; s->nblock++;\n\n s->block[s->nblock] = ((UChar)(s->state_in_len-4));\n\n s->nblock++;\n\n break;\n\n }\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib.c", "rank": 59, "score": 58457.43592152853 }, { "content": "#define BLOCK_HEADER_LO 0x59265359UL\n\n\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 60, "score": 58457.43592152853 }, { "content": "static\n\nvoid prepare_new_block ( EState* s )\n\n{\n\n Int32 i;\n\n s->nblock = 0;\n\n s->numZ = 0;\n\n s->state_out_pos = 0;\n\n BZ_INITIALISE_CRC ( s->blockCRC );\n\n for (i = 0; i < 256; i++) s->inUse[i] = False;\n\n s->blockNo++;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib.c", "rank": 61, "score": 58457.43592152853 }, { "content": "void BZ2_compressBlock ( EState* s, Bool is_last_block )\n\n{\n\n if (s->nblock > 0) {\n\n\n\n BZ_FINALISE_CRC ( s->blockCRC );\n\n s->combinedCRC = (s->combinedCRC << 1) | (s->combinedCRC >> 31);\n\n s->combinedCRC ^= s->blockCRC;\n\n if (s->blockNo > 1) s->numZ = 0;\n\n\n\n if (s->verbosity >= 2)\n\n VPrintf4( \" block %d: crc = 0x%08x, \"\n\n \"combined CRC = 0x%08x, size = %d\\n\",\n\n s->blockNo, s->blockCRC, s->combinedCRC, s->nblock );\n\n\n\n BZ2_blockSort ( s );\n\n }\n\n\n\n s->zbits = (UChar*) (&((UChar*)s->arr2)[s->nblock]);\n\n\n\n /*-- If this is the first block, create the stream header. --*/\n\n if (s->blockNo == 1) {\n\n BZ2_bsInitWrite ( s );\n\n bsPutUChar ( s, BZ_HDR_B );\n\n bsPutUChar ( s, BZ_HDR_Z );\n\n bsPutUChar ( s, BZ_HDR_h );\n\n bsPutUChar ( s, (UChar)(BZ_HDR_0 + s->blockSize100k) );\n\n }\n\n\n\n if (s->nblock > 0) {\n\n\n\n bsPutUChar ( s, 0x31 ); bsPutUChar ( s, 0x41 );\n\n bsPutUChar ( s, 0x59 ); bsPutUChar ( s, 0x26 );\n\n bsPutUChar ( s, 0x53 ); bsPutUChar ( s, 0x59 );\n\n\n\n /*-- Now the block's CRC, so it is in a known place. --*/\n\n bsPutUInt32 ( s, s->blockCRC );\n\n\n\n /*-- \n\n Now a single bit indicating (non-)randomisation. \n\n As of version 0.9.5, we use a better sorting algorithm\n\n which makes randomisation unnecessary. So always set\n\n the randomised bit to 'no'. Of course, the decoder\n\n still needs to be able to handle randomised blocks\n\n so as to maintain backwards compatibility with\n\n older versions of bzip2.\n\n --*/\n\n bsW(s,1,0);\n\n\n\n bsW ( s, 24, s->origPtr );\n\n generateMTFValues ( s );\n\n sendMTFValues ( s );\n\n }\n\n\n\n\n\n /*-- If this is the last block, add the stream trailer. --*/\n\n if (is_last_block) {\n\n\n\n bsPutUChar ( s, 0x17 ); bsPutUChar ( s, 0x72 );\n\n bsPutUChar ( s, 0x45 ); bsPutUChar ( s, 0x38 );\n\n bsPutUChar ( s, 0x50 ); bsPutUChar ( s, 0x90 );\n\n bsPutUInt32 ( s, s->combinedCRC );\n\n if (s->verbosity >= 2)\n\n VPrintf1( \" final combined CRC = 0x%08x\\n \", s->combinedCRC );\n\n bsFinishWrite ( s );\n\n }\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/compress.c", "rank": 62, "score": 58457.43592152853 }, { "content": " Bool blockRandomised;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 63, "score": 58457.43592152853 }, { "content": "#define BZ_MAX_HANDLED_BLOCKS 50000\n\n\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzip2recover.c", "rank": 64, "score": 56915.2901758894 }, { "content": "extern void \n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 65, "score": 56911.97094893262 }, { "content": "extern void \n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 66, "score": 56911.97094893262 }, { "content": " UInt32 calculatedBlockCRC;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 67, "score": 56911.97094893262 }, { "content": " UInt32 storedBlockCRC;\n", "file_path": "bzip2/bzip2-sys/bzip2-1.0.8/bzlib_private.h", "rank": 68, "score": 56911.97094893262 }, { "content": "fn main() -> std::io::Result<()>{\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 4 {\n\n println!();\n\n panic!(\"Correct usage: $ ./{:?} <runtime> <nthreads> <compress/decompress> <file name>\", args[0]);\n\n }\n\n let run_mode = &args[1];\n\n let threads = args[2].parse::<usize>().unwrap();\n\n let file_action = &args[3];\n\n let file_name = &args[4];\n\n\n\n\tmatch run_mode.as_str() {\n\n \"sequential\" => sequential::sequential(file_action, file_name),\n\n \"sequential-io\" => sequential::sequential_io(file_action, file_name),\n\n \"rust-ssp\" => rust_ssp::rust_ssp(threads, file_action, file_name),\n\n \"rust-ssp-io\" => rust_ssp::rust_ssp_io(threads, file_action, file_name),\n\n \"std-threads\" => std_threads::std_threads(threads, file_action, file_name),\n\n \"std-threads-io\" => std_threads::std_threads_io(threads, file_action, file_name),\n\n \"tokio\" => tokio::tokio(threads, file_action, file_name),\n\n \"tokio-io\" => tokio::tokio_io(threads, file_action, file_name),\n\n \"rayon\" => rayon::rayon(threads, file_action, file_name),\n\n \"pipeliner\" => pipeliner::pipeliner(threads, file_action, file_name),\n\n _ => println!(\"Invalid run_mode, use: sequential | rust-ssp | std-threads | tokio | rayon | pipeliner\"),\n\n \n\n }\n\n\t\n\n\tOk(())\n\n}\n", "file_path": "bzip2/src/main.rs", "rank": 69, "score": 54973.08512603896 }, { "content": "fn main() -> std::io::Result<()>{\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 4 {\n\n println!();\n\n panic!(\"Correct usage: $ ./{:?} <runtime> <nthreads> <images dir>\", args[0]);\n\n }\n\n let run_mode = &args[1];\n\n let threads = args[2].parse::<usize>().unwrap();\n\n let dir_name = &args[3];\n\n\n\n\tmatch run_mode.as_str() {\n\n \"sequential\" => sequential::sequential(dir_name),\n\n \"rust-ssp\" => rust_ssp::rust_ssp(dir_name, threads),\n\n \"pipeliner\" => pipeliner::pipeliner(dir_name, threads),\n\n \"tokio\" => tokio::tokio(dir_name, threads),\n\n \"rayon\" => rayon::rayon(dir_name, threads),\n\n \"std-threads\" => std_threads::std_threads(dir_name, threads),\n\n _ => println!(\"Invalid run_mode, use: sequential | rust-ssp | std-threads | tokio | rayon | pipeliner\"),\n\n \n\n }\n\n\tOk(())\n\n}\n", "file_path": "image-processing/src/main.rs", "rank": 70, "score": 53875.910848203974 }, { "content": "use crate::*;\n\nuse crate::blocks::*;\n\nuse work_storage::{WorkItem, TimestampedWorkItem};\n\nuse std::sync::Arc;\n\nuse std::sync::atomic::{Ordering, AtomicUsize};\n\nuse std::thread::JoinHandle;\n\nuse std::thread;\n\nuse work_storage::{BlockingQueue, BlockingOrderedSet};\n\nuse parking_lot::{Mutex};\n\n\n\n//Public API: An output node, receives values and causes side effects\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 71, "score": 46935.04920515952 }, { "content": " Ok(result) => result.into_inner(),\n\n Err(_) => {\n\n panic!(\"Could not unwrap Arc in call to collect\");\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\nimpl<TInput: 'static, TCollected: 'static> InBlock<TInput, TCollected>\n\nwhere\n\n TInput: Send,\n\n TInput: Sync,\n\n TCollected: Send,\n\n TCollected: Sync,\n\n{\n\n pub fn monitor_posts(&mut self) -> MonitorLoop {\n\n match self.ordering {\n\n OrderingMode::Ordered => self.monitor_ordered(),\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 72, "score": 46931.20768559156 }, { "content": " };\n\n }\n\n })\n\n }\n\n\n\n}\n\n\n\n\n\nimpl<TInput, TCollected> InBlock<TInput, TCollected> {\n\n pub fn new(behavior: BlockMode, factory: Box<FnMut() -> Box<dyn In<TInput, TCollected>>>) -> InBlock<TInput, TCollected> {\n\n match behavior {\n\n BlockMode::Parallel(_) => unimplemented!(\"parallel inblocks not implemented\"),\n\n BlockMode::Sequential(ordering) => InBlock {\n\n work_queue: BlockingQueue::new(),\n\n handler: factory,\n\n ordering: ordering,\n\n ordered_work: BlockingOrderedSet::new(),\n\n counter: AtomicUsize::new(0),\n\n collected_items: Arc::new(Mutex::new(vec![]))\n\n },\n\n }\n\n }\n\n}\n\n\n\nunsafe impl<TInput, TCollected> Send for InBlockInfo<TInput, TCollected> {}\n\nunsafe impl<TInput, TCollected> Sync for InBlockInfo<TInput, TCollected> {}\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 73, "score": 46928.67988476977 }, { "content": "use crate::work_storage::{WorkItem, TimestampedWorkItem};\n\n\n\n\n\n//Base trait for all blocks in the pipeline\n\n//Used by the internals. Should be able to detal with\n\n//timestamped items and also perform some automatic timestamping on its own\n", "file_path": "libs/rust-ssp/src/blocks/blocks.rs", "rank": 74, "score": 46925.963828481705 }, { "content": " self.counter.store(c + 1, Ordering::SeqCst);\n\n }\n\n };\n\n ()\n\n }\n\n\n\n //Used internally\n\n fn process_timestamped(&self, input: TimestampedWorkItem<TInput>) {\n\n match self.ordering {\n\n OrderingMode::Unordered => match input {\n\n TimestampedWorkItem(work_item, _) => {\n\n (*self.work_queue).enqueue(work_item);\n\n }\n\n },\n\n OrderingMode::Ordered => (*self.ordered_work).enqueue(input)\n\n };\n\n }\n\n\n\n fn collect(self: Box<Self>) -> Vec<TCollected> {\n\n match Arc::try_unwrap(self.collected_items) {\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 75, "score": 46925.80810381563 }, { "content": " let collected: TCollected = info.handler.process(val, order);\n\n (*collected_list).push(collected);\n\n },\n\n TimestampedWorkItem(WorkItem::Dropped, order) => {\n\n ()\n\n }\n\n TimestampedWorkItem(WorkItem::Stop, _) => {\n\n break;\n\n }\n\n };\n\n }\n\n })\n\n }\n\n\n\n pub fn monitor_ordered(&mut self) -> MonitorLoop {\n\n let storage = self.ordered_work.clone();\n\n \n\n let mut info = InBlockInfo {\n\n handler: (self.handler)()\n\n };\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 76, "score": 46925.67495364839 }, { "content": " OrderingMode::Unordered => self.monitor_unordered()\n\n }\n\n \n\n }\n\n\n\n fn monitor_unordered(&mut self) -> MonitorLoop {\n\n let queue = self.work_queue.clone();\n\n\n\n let mut info = InBlockInfo {\n\n handler: (self.handler)()\n\n };\n\n\n\n let arc_collected = self.collected_items.clone();\n\n\n\n MonitorLoop::new(move || {\n\n let mut collected_list = arc_collected.lock();\n\n loop {\n\n let item = queue.wait_and_dequeue();\n\n match item {\n\n TimestampedWorkItem(WorkItem::Value(val), order) => {\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 77, "score": 46924.512025601485 }, { "content": " let arc_collected = self.collected_items.clone();\n\n\n\n MonitorLoop::new(move || {\n\n let mut next_item = 0;\n\n let mut collected_list = arc_collected.lock();\n\n loop {\n\n let item = storage.wait_and_remove(next_item);\n\n match item {\n\n TimestampedWorkItem(WorkItem::Value(val), order) => {\n\n debug_assert!(order == next_item);\n\n next_item += 1;\n\n let collected: TCollected = info.handler.process(val, order);\n\n (*collected_list).push(collected);\n\n }\n\n TimestampedWorkItem(WorkItem::Dropped, order) => {\n\n next_item += 1;\n\n }\n\n TimestampedWorkItem(WorkItem::Stop, _) => {\n\n break;\n\n }\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 78, "score": 46922.8404276402 }, { "content": " loop_function: Box<FnOnce() -> () + Send>\n\n}\n\n\n\nimpl MonitorLoop {\n\n\n\n pub fn new<F>(function: F) -> MonitorLoop \n\n where F: FnOnce() -> (), F: Send + 'static {\n\n MonitorLoop {\n\n loop_function: Box::new(function)\n\n }\n\n }\n\n\n\n pub fn run(self) {\n\n (self.loop_function)()\n\n }\n\n\n\n}", "file_path": "libs/rust-ssp/src/blocks/blocks.rs", "rank": 79, "score": 46919.10733166643 }, { "content": "//Base trait for all blocks in the pipeline\n\n//Used by the internals. Should be able to detal with\n\n//timestamped items and also perform some automatic timestamping on its own\n\npub trait PipelineBlock<TInput, TCollected> {\n\n fn process(&self, input: WorkItem<TInput>);\n\n fn process_timestamped(&self, input: TimestampedWorkItem<TInput>);\n\n fn collect(self: Box<Self>) -> Vec<TCollected>;\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum OrderingMode {\n\n Unordered,\n\n Ordered,\n\n}\n\n\n\npub enum BlockMode {\n\n Sequential(OrderingMode),\n\n Parallel(i32)\n\n}\n\n\n\n\n\n\n\npub struct MonitorLoop {\n", "file_path": "libs/rust-ssp/src/blocks/blocks.rs", "rank": 80, "score": 46690.19524478339 }, { "content": "use crate::blocks::*;\n\nuse crate::work_storage::*;\n\nuse std::sync::Arc;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::thread::JoinHandle;\n\nuse std::thread;\n\n\n\n// Public API: A Input-Output node; transforms some value into another\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 81, "score": 45981.60433536814 }, { "content": "for InOutBlock<TInput, TOutput, TCollected>\n\nwhere\n\n TInput: Send,\n\n TInput: Sync,\n\n{\n\n //used by the public API. Always unordered\n\n fn process(&self, input: WorkItem<TInput>) {\n\n (*self.work_queue).enqueue(input);\n\n }\n\n\n\n //Used internally\n\n fn process_timestamped(&self, input: TimestampedWorkItem<TInput>) {\n\n (*self.work_queue).enqueue_timestamped(input)\n\n }\n\n\n\n fn collect(self: Box<Self>) -> Vec<TCollected> {\n\n match Arc::try_unwrap(self.next_step) {\n\n Ok(result) => result.collect(),\n\n Err(_) => {\n\n panic!(\"Could not unwrap Arc in call to collect\");\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 82, "score": 45979.016495686876 }, { "content": " let alive_threads = Arc::new(AtomicUsize::new(self.replicas as usize));\n\n\n\n for _ in 0..self.replicas {\n\n let queue = self.work_queue.clone();\n\n let alive_threads = alive_threads.clone();\n\n \n\n let mut info = InOutBlockInfo {\n\n next_step: self.next_step.clone(),\n\n transformer: (self.transformer_factory)(),\n\n };\n\n \n\n let monitor_loop = MonitorLoop::new(move || {\n\n \n\n loop {\n\n let dequeued = queue.wait_and_dequeue();\n\n\n\n match dequeued {\n\n TimestampedWorkItem(WorkItem::Value(val), order) => {\n\n let output = info.transformer.process(val);\n\n\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 83, "score": 45976.27740157688 }, { "content": " BlockMode::Sequential(_) => InOutBlock::new_block(next_step, transformer_factory, 1),\n\n }\n\n }\n\n \n\n pub fn new_block(\n\n next_step: Box<dyn PipelineBlock<TOutput, TCollected>>,\n\n transformer: Box<FnMut() -> Box<dyn InOut<TInput, TOutput>>>,\n\n replicas: i32,\n\n ) -> InOutBlock<TInput, TOutput, TCollected> {\n\n InOutBlock {\n\n work_queue: BlockingQueue::new(),\n\n next_step: Arc::new(next_step),\n\n transformer_factory: transformer,\n\n replicas: replicas,\n\n }\n\n }\n\n\n\n\n\n pub fn monitor_posts(&mut self) -> Vec<MonitorLoop> {\n\n let mut monitors: Vec<MonitorLoop> = vec![];\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 84, "score": 45973.32422067188 }, { "content": " if let Some(val) = output {\n\n info.next_step.process_timestamped(TimestampedWorkItem(\n\n WorkItem::Value(val),\n\n order,\n\n ));\n\n } else {\n\n info.next_step.process_timestamped(TimestampedWorkItem(\n\n WorkItem::Dropped,\n\n order,\n\n ));\n\n }\n\n },\n\n TimestampedWorkItem(WorkItem::Dropped, order) => {\n\n info.next_step.process_timestamped(TimestampedWorkItem(\n\n WorkItem::Dropped,\n\n order,\n\n ));\n\n },\n\n TimestampedWorkItem(WorkItem::Stop, order) => {\n\n let mut threads = alive_threads.load(Ordering::SeqCst);\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 85, "score": 45973.291331765344 }, { "content": " }\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl<TInput: 'static, TOutput: 'static, TCollected: 'static> InOutBlock<TInput, TOutput, TCollected>\n\nwhere\n\n TInput: Send,\n\n TInput: Sync,\n\n{\n\n pub fn new(\n\n next_step: Box<dyn PipelineBlock<TOutput, TCollected>>,\n\n transformer: BlockMode,\n\n transformer_factory: Box<FnMut() -> Box<dyn InOut<TInput, TOutput>>>\n\n ) -> InOutBlock<TInput, TOutput, TCollected> {\n\n match transformer {\n\n BlockMode::Parallel(replicas) => {\n\n InOutBlock::new_block(next_step, transformer_factory, replicas)\n\n }\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 86, "score": 45973.22862869503 }, { "content": " \n\n threads -= 1;\n\n\n\n if threads == 0 {\n\n info.next_step.process_timestamped(TimestampedWorkItem(\n\n WorkItem::Stop,\n\n order,\n\n ));\n\n }\n\n\n\n alive_threads.store(threads, Ordering::SeqCst);\n\n\n\n //reenqueue the same item\n\n queue.enqueue_timestamped(dequeued);\n\n\n\n break;\n\n }\n\n }\n\n }\n\n });\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 87, "score": 45972.74443882209 }, { "content": " monitors.push(monitor_loop);\n\n }\n\n\n\n return monitors;\n\n }\n\n\n\n}\n\n\n\n/* Assume a MapBlock can be passed to threads, and assume we'll implement parallelism correctly */\n\nunsafe impl<TInput, TOutput, TCollected> Send for InOutBlockInfo<TInput, TOutput, TCollected> {}\n\nunsafe impl<TInput, TOutput, TCollected> Sync for InOutBlockInfo<TInput, TOutput, TCollected> {}\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 88, "score": 45972.632067453604 }, { "content": "pub fn seq_eye_tracker(input_video: &String) -> opencv::Result<()> {\n\n\n\n let mut video_in = videoio::VideoCapture::from_file(input_video, videoio::CAP_FFMPEG)?;\n\n let in_opened = videoio::VideoCapture::is_opened(&video_in)?;\n\n if !in_opened {\n\n panic!(\"Unable to open input video {:?}!\", input_video);\n\n }\n\n let frame_size = core::Size::new(video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_WIDTH as i32)? as i32,\n\n video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_HEIGHT as i32)? as i32,);\n\n let fourcc = videoio::VideoWriter::fourcc(\n\n 'm' as i8,\n\n 'p' as i8,\n\n 'g' as i8,\n\n '1' as i8\n\n )?;\n\n let fps_out = video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FPS as i32)?;\n\n let mut video_out = videoio::VideoWriter::new(\n\n \"output.avi\",\n\n fourcc,\n\n fps_out,\n", "file_path": "eye-detector/src/seq.rs", "rank": 89, "score": 45616.586636683576 }, { "content": "//Public API: An output node, receives values and causes side effects\n\npub trait In<TInput, TCollected=()> {\n\n fn process(&mut self, input: TInput, order: u64) -> TCollected;\n\n}\n\n\n\n\n\nimpl <TInput, TCollected, F> In<TInput, TCollected> for F where F: FnMut(TInput) -> TCollected {\n\n fn process(&mut self, input: TInput, _order: u64) -> TCollected {\n\n (*self)(input)\n\n }\n\n}\n\n\n\n//Internals: InBlock processing queue for blocks in the pipeline\n\npub struct InBlock<TInput, TCollected> {\n\n work_queue: Arc<BlockingQueue<TInput>>,\n\n ordered_work: Arc<BlockingOrderedSet<TInput>>,\n\n collected_items: Arc<Mutex<Vec<TCollected>>>,\n\n handler: Box<FnMut() -> Box<dyn In<TInput, TCollected>>>,\n\n ordering: OrderingMode,\n\n counter: AtomicUsize\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/blocks/in_block.rs", "rank": 90, "score": 42528.32721954833 }, { "content": "// Public API: A Input-Output node; transforms some value into another\n\npub trait InOut<TInput, TOutput> {\n\n fn process(&mut self, input: TInput) -> Option<TOutput>;\n\n}\n\n\n\n\n\nimpl <TInput, TOutput, F> InOut<TInput, TOutput> for F where F: FnMut(TInput) -> Option<TOutput> {\n\n fn process(&mut self, input: TInput) -> Option<TOutput> {\n\n (*self)(input)\n\n }\n\n}\n\n\n\n\n", "file_path": "libs/rust-ssp/src/blocks/inout_block.rs", "rank": 91, "score": 41742.583173584986 }, { "content": "pub fn better_eye_tracker(input_video: &String, nthreads: i32) -> opencv::Result<()> {\n\n\n\n let nthreads = nthreads as usize;\n\n\n\n let queue1: Arc<BetterCrossbeam> = BetterCrossbeam::new(nthreads);\n\n let queue2: Arc<BetterCrossbeam> = BetterCrossbeam::new(nthreads);\n\n let queue3: Arc<BetterCrossbeam> = BetterCrossbeam::new(nthreads);\n\n\n\n\tlet (queue1_send, queue1_recv) = bounded(512);\n\n let (queue2_send, queue2_recv) = bounded(512);\n\n let (queue3_send, queue3_recv) = bounded(512);\n\n\n\n\tlet mut video_in = videoio::VideoCapture::from_file(input_video, videoio::CAP_FFMPEG).unwrap();\n\n let in_opened = videoio::VideoCapture::is_opened(&video_in).unwrap();\n\n if !in_opened {\n\n panic!(\"Unable to open input video {:?}!\", input_video);\n\n }\n\n let frame_size = core::Size::new(video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_WIDTH as i32).unwrap() as i32,\n\n video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_HEIGHT as i32).unwrap() as i32,);\n\n let fourcc = videoio::VideoWriter::fourcc(\n", "file_path": "eye-detector/src/par_better.rs", "rank": 92, "score": 40921.00642773063 }, { "content": "pub fn tokio_eye_tracker(input_video: &String, nthreads: i32) -> opencv::Result<()> {\n\n let mut video_in = videoio::VideoCapture::from_file(input_video, videoio::CAP_FFMPEG)?;\n\n let in_opened = videoio::VideoCapture::is_opened(&video_in)?;\n\n if !in_opened {\n\n panic!(\"Unable to open input video {:?}!\", input_video);\n\n }\n\n let frame_size = core::Size::new(video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_WIDTH as i32)? as i32,\n\n video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_HEIGHT as i32)? as i32,);\n\n let fourcc = videoio::VideoWriter::fourcc(\n\n 'm' as i8,\n\n 'p' as i8,\n\n 'g' as i8,\n\n '1' as i8\n\n )?;\n\n let fps_out = video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FPS as i32)?;\n\n let mut video_out : videoio::VideoWriter = videoio::VideoWriter::new(\n\n \"output.avi\",\n\n fourcc,\n\n fps_out,\n\n frame_size,\n", "file_path": "eye-detector/src/par_tokio.rs", "rank": 93, "score": 40921.00642773063 }, { "content": "pub fn std_threads_eye_tracker(input_video: &String, nthreads: i32) -> opencv::Result<()> {\n\n\n\n\tlet (queue1_send, queue1_recv) = bounded(512);\n\n let (queue2_send, queue2_recv) = bounded(512);\n\n let (queue3_send, queue3_recv) = bounded(512);\n\n\n\n\tlet mut video_in = videoio::VideoCapture::from_file(input_video, videoio::CAP_FFMPEG).unwrap();\n\n let in_opened = videoio::VideoCapture::is_opened(&video_in).unwrap();\n\n if !in_opened {\n\n panic!(\"Unable to open input video {:?}!\", input_video);\n\n }\n\n let frame_size = core::Size::new(video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_WIDTH as i32).unwrap() as i32,\n\n video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_HEIGHT as i32).unwrap() as i32,);\n\n let fourcc = videoio::VideoWriter::fourcc(\n\n 'm' as i8,\n\n 'p' as i8,\n\n 'g' as i8,\n\n '1' as i8\n\n ).unwrap();\n\n let fps_out = video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FPS as i32).unwrap();\n", "file_path": "eye-detector/src/par_std_threads.rs", "rank": 94, "score": 39609.845266170465 }, { "content": "pub fn rust_spp_eye_tracker(input_video: &String, nthreads: i32) -> opencv::Result<()> {\n\n let mut video_in = videoio::VideoCapture::from_file(input_video, videoio::CAP_FFMPEG)?;\n\n let in_opened = videoio::VideoCapture::is_opened(&video_in)?;\n\n if !in_opened {\n\n panic!(\"Unable to open input video {:?}!\", input_video);\n\n }\n\n let frame_size = core::Size::new(video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_WIDTH as i32)? as i32,\n\n video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FRAME_HEIGHT as i32)? as i32,);\n\n let fps_out = video_in.get(videoio::VideoCaptureProperties::CAP_PROP_FPS as i32)?;\n\n\n\n let mut pipeline = pipeline![\n\n parallel!(DetectFaces::new(), nthreads),\n\n parallel!(DetectEyes::new(), nthreads),\n\n sequential_ordered!(WriteOutput::new(fps_out,frame_size))\n\n ];\n\n\n\n loop {\n\n // Read and post frames\n\n let mut frame = Mat::default()?;\n\n video_in.read(&mut frame)?;\n\n if frame.size()?.width == 0 {\n\n break;\n\n }\n\n pipeline.post(MatData{frame : frame}).unwrap();\n\n }\n\n\n\n pipeline.end_and_wait();\n\n\n\n Ok(())\n\n}", "file_path": "eye-detector/src/par_rust_spp.rs", "rank": 95, "score": 39609.845266170465 }, { "content": "pub fn pipeliner_pipeline(size: usize, threads: usize, iter_size1: i32, iter_size2: i32) {\n\n\n\n let start = SystemTime::now();\n\n let collection: Vec<TcontentIter> = TcontentIter::new(size)\n\n .with_threads(threads)\n\n .out_buffer(512)\n\n .ordered_map(move |mut content: TcontentIter| { \n\n let init_a = -2.125 as f64;\n\n let init_b = -1.5 as f64;\n\n let range = 3.0 as f64;\n\n let step = range / (size as f64);\n\n\n\n let im = init_b + (step * (content.line as f64));\n\n\n\n for j in 0 .. size {\n\n\n\n let mut a = init_a + step * j as f64;\n\n let cr = a;\n\n\n\n let mut b = im;\n", "file_path": "micro-bench/src/pipeliner.rs", "rank": 96, "score": 38515.66352518885 }, { "content": "use raster::filter;\n\nuse raster::Image;\n\nuse std::time::{SystemTime};\n\n\n\nuse{\n\n futures::future::lazy,\n\n futures::sync::*,\n\n futures::{stream, Future, Stream},\n\n};\n\n\n\nmacro_rules! spawn_return {\n\n ($block:expr) => {{\n\n let (sender, receiver) = oneshot::channel::<_>();\n\n tokio::spawn(lazy(move || {\n\n let result = $block;\n\n sender.send(result).ok();\n\n Ok(())\n\n }));\n\n receiver\n\n }};\n\n}\n\n\n", "file_path": "image-processing/src/tokio.rs", "rank": 97, "score": 29.214616263458876 }, { "content": " let end = time::precise_time_s();\n\n return end - start;\n\n}\n\n\n\nuse futures::future::lazy;\n\nuse futures::sync::*;\n\nuse futures::{stream, Future, Stream};\n\nuse tokio::prelude::*;\n\nuse tokio::*;\n\nuse tokio_core::reactor::Core;\n\n\n\nmacro_rules! spawn_return {\n\n ($block:expr) => {{\n\n let (sender, receiver) = oneshot::channel::<_>();\n\n tokio::spawn(lazy(move || {\n\n let result = $block;\n\n sender.send(result).ok();\n\n Ok(())\n\n }));\n\n receiver\n\n }};\n\n}\n\n\n", "file_path": "libs/rust-ssp/src/image_processing.rs", "rank": 98, "score": 28.744456640171567 } ]
Rust
kernel/src/process.rs
liva/node-replicated-kernel
2d953c3a984b0c3a48b6368062b6abdf5146da2a
use alloc::boxed::Box; use alloc::format; use alloc::string::{String, ToString}; use alloc::sync::Arc; use alloc::vec::Vec; use core::convert::TryInto; use cstr_core::CStr; use custom_error::custom_error; use kpi::process::FrameId; use crate::arch::memory::paddr_to_kernel_vaddr; use crate::arch::memory::LARGE_PAGE_SIZE; use crate::arch::process::UserPtr; use crate::arch::Module; use crate::error::KError; use crate::fs::Fd; use crate::kcb; use crate::memory::vspace::AddressSpace; use crate::memory::KernelAllocator; use crate::memory::{Frame, PhysicalPageProvider, VAddr}; use crate::prelude::overlaps; use crate::{mlnr, nr, round_up}; #[derive(PartialEq, Clone, Debug)] pub struct KernSlice { pub buffer: Arc<[u8]>, } impl KernSlice { pub fn new(base: u64, len: usize) -> KernSlice { let buffer = Arc::<[u8]>::new_uninit_slice(len); let mut buffer = unsafe { buffer.assume_init() }; let mut user_ptr = VAddr::from(base); let slice_ptr = UserPtr::new(&mut user_ptr); let user_slice: &mut [u8] = unsafe { core::slice::from_raw_parts_mut(slice_ptr.as_mut_ptr(), len) }; unsafe { Arc::get_mut_unchecked(&mut buffer).copy_from_slice(&user_slice[0..len]) }; KernSlice { buffer } } } pub fn userptr_to_str(useraddr: u64) -> Result<String, KError> { let mut user_ptr = VAddr::from(useraddr); let str_ptr = UserPtr::new(&mut user_ptr); unsafe { match CStr::from_ptr(str_ptr.as_ptr()).to_str() { Ok(path) => { if !path.is_ascii() || path.is_empty() { return Err(KError::NotSupported); } return Ok(String::from(path)); } Err(_) => return Err(KError::NotSupported), } } } pub type Pid = u64; pub type Eid = u64; custom_error! { #[derive(PartialEq, Clone)] pub ProcessError ProcessCreate{desc: String} = "Unable to create process: {desc}", ProcessNotSet = "The core has no current process set.", NoProcessFoundForPid = "No process was associated with the given Pid.", UnableToLoad = "Couldn't load process, invalid ELF file?", UnableToParseElf = "Couldn't parse ELF file, invalid?", NoExecutorAllocated = "We never allocated executors for this affinity region and process (need to fill cache).", ExecutorCacheExhausted = "The executor cache for given affinity is empty (need to refill)", InvalidGlobalThreadId = "Specified an invalid core", ExecutorNoLongerValid = "The excutor was removed from the current core.", ExecutorAlreadyBorrowed = "The executor on the core was already borrowed (that's a bug).", NotEnoughMemory = "Unable to reserve memory for internal process data-structures.", InvalidFrameId = "The provided FrameId is not registered with the process", } impl From<&str> for ProcessError { fn from(_err: &str) -> Self { ProcessError::UnableToLoad } } impl From<alloc::collections::TryReserveError> for ProcessError { fn from(_err: alloc::collections::TryReserveError) -> Self { ProcessError::NotEnoughMemory } } pub trait Process { type E: Executor + Copy + Sync + Send; type A: AddressSpace; fn new(module: &Module, pid: Pid, writable_sections: Vec<Frame>) -> Result<Self, ProcessError> where Self: core::marker::Sized; fn try_reserve_executors( &self, how_many: usize, affinity: topology::NodeId, ) -> Result<(), alloc::collections::TryReserveError>; fn allocate_executors(&mut self, frame: Frame) -> Result<usize, ProcessError>; fn vspace_mut(&mut self) -> &mut Self::A; fn vspace(&self) -> &Self::A; fn get_executor(&mut self, for_region: topology::NodeId) -> Result<Box<Self::E>, ProcessError>; fn allocate_fd(&mut self) -> Option<(u64, &mut Fd)>; fn deallocate_fd(&mut self, fd: usize) -> usize; fn get_fd(&self, index: usize) -> &Fd; fn pinfo(&self) -> &kpi::process::ProcessInfo; fn add_frame(&mut self, frame: Frame) -> Result<FrameId, ProcessError>; fn get_frame(&mut self, frame_id: FrameId) -> Result<Frame, ProcessError>; } pub trait ResumeHandle { unsafe fn resume(self) -> !; } pub trait Executor { type Resumer: ResumeHandle; fn id(&self) -> Eid; fn pid(&self) -> Pid; fn start(&self) -> Self::Resumer; fn resume(&self) -> Self::Resumer; fn upcall(&self, vector: u64, exception: u64) -> Self::Resumer; fn maybe_switch_vspace(&self); fn vcpu_kernel(&self) -> *mut kpi::arch::VirtualCpu; } struct DataSecAllocator { offset: VAddr, frames: Vec<(usize, Frame)>, } impl DataSecAllocator { fn finish(self) -> Vec<Frame> { self.frames .into_iter() .map(|(_offset, base)| base) .collect() } } impl elfloader::ElfLoader for DataSecAllocator { fn allocate(&mut self, load_headers: elfloader::LoadableHeaders) -> Result<(), &'static str> { for header in load_headers.into_iter() { let base = header.virtual_addr(); let size = header.mem_size() as usize; let flags = header.flags(); let page_mask = (LARGE_PAGE_SIZE - 1) as u64; let page_base: VAddr = VAddr::from(base & !page_mask); let size_page = round_up!(size + (base & page_mask) as usize, LARGE_PAGE_SIZE as usize); assert!(size_page >= size); assert_eq!(size_page % LARGE_PAGE_SIZE, 0); assert_eq!(page_base % LARGE_PAGE_SIZE, 0); if flags.is_write() { trace!( "base = {:#x} size = {:#x} page_base = {:#x} size_page = {:#x}", base, size, page_base, size_page ); let large_pages = size_page / LARGE_PAGE_SIZE; KernelAllocator::try_refill_tcache(0, large_pages).expect("Refill didn't work"); let kcb = crate::kcb::get_kcb(); let mut pmanager = kcb.mem_manager(); for i in 0..large_pages { let frame = pmanager .allocate_large_page() .expect("We refilled so allocation should work."); trace!( "add to self.frames (elf_va={:#x}, pa={:#x})", page_base.as_usize() + i * LARGE_PAGE_SIZE, frame.base ); self.frames .push((page_base.as_usize() + i * LARGE_PAGE_SIZE, frame)); } } } Ok(()) } fn load( &mut self, flags: elfloader::Flags, destination: u64, region: &[u8], ) -> Result<(), &'static str> { debug!( "load(): destination = {:#x} region.len() = {:#x}", destination, region.len(), ); if flags.is_write() { let mut destination: usize = destination.try_into().unwrap(); let mut region_remaining = region.len(); let mut region = region; for (elf_begin, frame) in self.frames.iter() { trace!( "load(): into process vspace at {:#x} #bytes {:#x} offset_in_frame = {:#x}", destination, region.len(), *elf_begin ); let range_frame_elf = *elf_begin..*elf_begin + frame.size; let range_region_elf = destination..destination + region_remaining; if overlaps(&range_region_elf, &range_frame_elf) { trace!( "The frame overlaps with copy region (range_frame_elf={:x?} range_region_elf={:x?})", range_frame_elf, range_region_elf ); let copy_start = core::cmp::max(range_frame_elf.start, range_region_elf.start) - destination; let copy_end = core::cmp::min(range_frame_elf.end, range_region_elf.end) - destination; let region_to_copy = &region[copy_start..copy_end]; trace!("copy range = {:x?}", copy_start..copy_end); let copy_in_frame_start = destination - *elf_begin; let frame_vaddr = paddr_to_kernel_vaddr(frame.base); unsafe { core::ptr::copy_nonoverlapping( region_to_copy.as_ptr(), frame_vaddr.as_mut_ptr::<u8>().add(copy_in_frame_start), copy_end - copy_start, ); trace!( "Copied {} bytes from {:p} to {:p}", copy_end - copy_start, region_to_copy.as_ptr(), frame_vaddr.as_mut_ptr::<u8>().add(copy_start) ); destination += copy_end - copy_start; region = &region[copy_end..]; region_remaining -= copy_end - copy_start; } } } } Ok(()) } fn relocate(&mut self, entry: &elfloader::Rela<elfloader::P64>) -> Result<(), &'static str> { let addr = self.offset + entry.get_offset(); for (pheader_offset, frame) in self.frames.iter() { let elf_vbase = self.offset + *pheader_offset & !(LARGE_PAGE_SIZE - 1); if addr >= elf_vbase && addr <= elf_vbase + frame.size() { let kernel_vaddr = paddr_to_kernel_vaddr(frame.base); let offset_in_frame = addr - elf_vbase; let kernel_addr = kernel_vaddr + offset_in_frame; trace!( "DataSecAllocator relocation paddr {:#x} kernel_addr {:#x}", offset_in_frame + frame.base.as_u64(), kernel_addr ); use elfloader::TypeRela64; if let TypeRela64::R_RELATIVE = TypeRela64::from(entry.get_type()) { unsafe { *(kernel_addr.as_mut_ptr::<u64>()) = self.offset.as_u64() + entry.get_addend(); } } else { return Err("Can only handle R_RELATIVE for relocation"); } } } Ok(()) } } pub fn make_process(binary: &'static str) -> Result<Pid, KError> { KernelAllocator::try_refill_tcache(7, 1)?; let kcb = kcb::get_kcb(); let mut mod_file = None; for module in &kcb.arch.kernel_args().modules { if module.name() == binary { mod_file = Some(module); } } let mod_file = mod_file.expect(format!("Couldn't find '{}' binary.", binary).as_str()); info!( "binary={} cmdline={} module={:?}", binary, kcb.cmdline.test_cmdline, mod_file ); let elf_module = unsafe { elfloader::ElfBinary::new(mod_file.name(), mod_file.as_slice()) .map_err(|_e| ProcessError::UnableToParseElf)? }; let offset = if !elf_module.is_pie() { VAddr::zero() } else { VAddr::from(0x20_0000_0000usize) }; let mut data_sec_loader = DataSecAllocator { offset, frames: Vec::with_capacity(2), }; elf_module .load(&mut data_sec_loader) .map_err(|_e| ProcessError::UnableToLoad)?; let data_frames: Vec<Frame> = data_sec_loader.finish(); kcb.replica .as_ref() .map_or(Err(KError::ReplicaNotSet), |(replica, token)| { let response = replica.execute_mut(nr::Op::ProcCreate(&mod_file, data_frames), *token); match response { Ok(nr::NodeResult::ProcCreated(pid)) => { if cfg!(feature = "mlnrfs") { match mlnr::MlnrKernelNode::add_process(pid) { Ok(pid) => Ok(pid.0), Err(e) => unreachable!("{}", e), } } else { Ok(pid) } } _ => unreachable!("Got unexpected response"), } }) } pub fn allocate_dispatchers(pid: Pid) -> Result<(), KError> { trace!("Allocate dispatchers"); let mut create_per_region: Vec<(topology::NodeId, usize)> = Vec::with_capacity(topology::MACHINE_TOPOLOGY.num_nodes() + 1); if topology::MACHINE_TOPOLOGY.num_nodes() > 0 { for node in topology::MACHINE_TOPOLOGY.nodes() { let threads = node.threads().count(); create_per_region.push((node.id, threads)); } } else { create_per_region.push((0, topology::MACHINE_TOPOLOGY.num_threads())); } for (affinity, to_create) in create_per_region { let mut dispatchers_created = 0; while dispatchers_created < to_create { KernelAllocator::try_refill_tcache(20, 1)?; let mut frame = { let kcb = crate::kcb::get_kcb(); kcb.physical_memory.gmanager.unwrap().node_caches[affinity as usize] .lock() .allocate_large_page()? }; unsafe { frame.zero(); } let kcb = crate::kcb::get_kcb(); kcb.replica .as_ref() .map_or(Err(KError::ReplicaNotSet), |(replica, token)| { let response = replica.execute_mut(nr::Op::DispatcherAllocation(pid, frame), *token)?; match response { nr::NodeResult::ExecutorsCreated(how_many) => { assert!(how_many > 0); dispatchers_created += how_many; Ok(how_many) } _ => unreachable!("Got unexpected response"), } }) .unwrap(); } } debug!("Allocated dispatchers"); Ok(()) }
use alloc::boxed::Box; use alloc::format; use alloc::string::{String, ToString}; use alloc::sync::Arc; use alloc::vec::Vec; use core::convert::TryInto; use cstr_core::CStr; use custom_error::custom_error; use kpi::process::FrameId; use crate::arch::memory::paddr_to_kernel_vaddr; use crate::arch::memory::LARGE_PAGE_SIZE; use crate::arch::process::UserPtr; use crate::arch::Module; use crate::error::KError; use crate::fs::Fd; use crate::kcb; use crate::memory::vspace::AddressSpace; use crate::memory::KernelAllocator; use crate::memory::{Frame, PhysicalPageProvider, VAddr}; use crate::prelude::overlaps; use crate::{mlnr, nr, round_up}; #[derive(PartialEq, Clone, Debug)] pub struct KernSlice { pub buffer: Arc<[u8]>, } impl KernSlice { pub fn new(base: u64, len: usize) -> KernSlice { let buffer = Arc::<[u8]>::new_uninit_slice(len); let mut buffer = unsafe { buffer.assume_init() }; let mut user_ptr = VAddr::from(base); let slice_ptr = UserPtr::new(&mut user_ptr); let user_slice: &mut [u8] = unsafe { core::slice::from_raw_parts_mut(slice_ptr.as_mut_ptr(), len) }; unsafe { Arc::get_mut_unchecked(&mut buffer).copy_from_slice(&user_slice[0..len]) }; KernSlice { buffer } } } pub fn userptr_to_str(useraddr: u64) -> Result<String, KError> { let mut user_ptr = VAddr::from(useraddr); let str_ptr = UserPtr::new(&mut user_ptr); unsafe { match CStr::from_ptr(str_ptr.as_ptr()).to_str() { Ok(path) => { if !path.is_ascii() || path.is_empty() { return Err(KError::NotSupported); } return Ok(String::from(path)); } Err(_) => return Err(KError::NotSupported), } } } pub type Pid = u64; pub type Eid = u64; custom_error! { #[derive(PartialEq, Clone)] pub ProcessError ProcessCreate{desc: String} = "Unable to create process: {desc}", ProcessNotSet = "The core has no current process set.", NoProcessFoundForPid = "No process was associated with the given Pid.", UnableToLoad = "Couldn't load process, invalid ELF file?", UnableToParseElf = "Couldn't parse ELF file, invalid?", NoExecutorAllocated = "We never allocated executors for this affinity region and process (need to fill cache).", ExecutorCacheExhausted = "The executor cache for given affinity is empty (need to refill)", InvalidGlobalThreadId = "Specified an invalid core", ExecutorNoLongerValid = "The excutor was removed from the current core.", ExecutorAlreadyBorrowed = "The executor on the core was already borrowed (that's a bug).", NotEnoughMemory = "Unable to reserve memory for internal process data-structures.", InvalidFrameId = "The provided FrameId is not registered with the process", } impl From<&str> for ProcessError { fn from(_err: &str) -> Self { ProcessError::UnableToLoad } } impl From<alloc::collections::TryReserveError> for ProcessError { fn from(_err: alloc::collections::TryReserveError) -> Self { ProcessError::NotEnoughMemory } } pub trait Process { type E: Executor + Copy + Sync + Send; type A: AddressSpace; fn new(module: &Module, pid: Pid, writable_sections: Vec<Frame>) -> Result<Self, ProcessError> where Self: core::marker::Sized; fn try_reserve_executors( &self, how_many: usize, affinity: topology::NodeId, ) -> Result<(), alloc::collections::TryReserveError>; fn allocate_executors(&mut self, frame: Frame) -> Result<usize, ProcessError>; fn vspace_mut(&mut self) -> &mut Self::A; fn vspace(&self) -> &Self::A; fn get_executor(&mut self, for_region: topology::NodeId) -> Result<Box<Self::E>, ProcessError>; fn allocate_fd(&mut self) -> Option<(u64, &mut Fd)>; fn deallocate_fd(&mut self, fd: usize) -> usize; fn get_fd(&self, index: usize) -> &Fd; fn pinfo(&self) -> &kpi::process::ProcessInfo; fn add_frame(&mut self, frame: Frame) -> Result<FrameId, ProcessError>; fn get_frame(&mut self, frame_id: FrameId) -> Result<Frame, ProcessError>; } pub trait ResumeHandle { unsafe fn resume(self) -> !; } pub trait Executor { type Resumer: ResumeHandle; fn id(&self) -> Eid; fn pid(&self) -> Pid; fn start(&self) -> Self::Resumer; fn resume(&self) -> Self::Resumer; fn upcall(&self, vector: u64, exception: u64) -> Self::Resumer; fn maybe_switch_vspace(&self); fn vcpu_kernel(&self) -> *mut kpi::arch::VirtualCpu; } struct DataSecAllocator { offset: VAddr, frames: Vec<(usize, Frame)>, } impl DataSecAllocator { fn finish(self) -> Vec<Frame> { self.frames .into_iter() .map(|(_offset, base)| base) .collect() } } impl elfloader::ElfLoader for DataSecAllocator { fn allocate(&mut self, load_headers: elfloader::LoadableHeaders) -> Result<(), &'static str> { for header in load_headers.into_iter() { let base = header.virtual_addr(); let size = header.mem_size() as usize; let flags = header.flags(); let page_mask = (LARGE_PAGE_SIZE - 1) as u64; let page_base: VAddr = VAddr::from(base & !page_mask); let size_page = round_up!(size + (base & page_mask) as usize, LARGE_PAGE_SIZE as usize); assert!(size_page >= size); assert_eq!(size_page % LARGE_PAGE_SIZE, 0); assert_eq!(page_base % LARGE_PAGE_SIZE, 0); if flags.is_write() { trace!( "base = {:#x} size = {:#x} page_base = {:#x} size_page = {:#x}", base, size, page_base, size_page ); let large_pages = size_page / LARGE_PAGE_SIZE; KernelAllocator::try_refill_tcache(0, large_pages).expect("Refill didn't work"); let kcb = crate::kcb::get_kcb(); let mut pmanager = kcb.mem_manager(); for i in 0..large_pages { let frame = pmanager .allocate_large_page() .expect("We refilled so allocation should work."); trace!( "add to self.frames (elf_va={:#x}, pa={:#x})", page_base.as_usize() + i * LARGE_PAGE_SIZE, frame.base ); self.frames .push((page_base.as_usize() + i * LARGE_PAGE_SIZE, frame)); } } } Ok(()) } fn load( &mut self, flags: elfloader::Flags, destination: u64, region: &[u8], ) -> Result<(), &'static str> { debug!( "load(): destination = {:#x} region.len() = {:#x}", destination, region.len(), ); if flags.is_write() { let mut destination: usize = destination.try_into().unwrap(); let mut region_remaining = region.len(); let mut region = region; for (elf_begin, frame) in self.frames.iter() { trace!( "load(): into process vspace at {:#x} #bytes {:#x} offset_in_frame = {:#x}", destination, region.len(), *elf_begin ); let range_frame_elf = *elf_begin..*elf_begin + frame.size; let range_region_elf = destination..destination + region_remaining; if overlaps(&range_region_elf, &range_frame_elf) { trace!( "The frame overlaps with copy region (range_frame_elf={:x?} range_region_elf={:x?})", range_frame_elf, range_region_elf ); let copy_start = core::cmp::max(range_frame_elf.start, range_region_elf.start) - destination; let copy_end = core::cmp::min(range_frame_elf.end, range_region_elf.end) - destination; let region_to_copy = &region[copy_start..copy_end]; trace!("copy range = {:x?}", copy_start..copy_end); let copy_in_frame_start = destination - *elf_begin; let frame_vaddr = paddr_to_kernel_vaddr(frame.base); unsafe { core::ptr::copy_nonoverlapping( region_to_copy.as_ptr(), frame_vaddr.as_mut_ptr::<u8>().add(copy_in_frame_start), copy_end - copy_start, ); trace!( "Copied {} bytes from {:p} to {:p}", copy_end - copy_start, region_to_copy.as_ptr(), frame_vaddr.as_mut_ptr::<u8>().add(copy_start) ); destination += copy_end - copy_start; region = &region[copy_end..]; region_remaining -= copy_end - copy_start; } } } } Ok(()) } fn relocate(&mut self, entry: &elfloader::Rela<elfloader::P64>) -> Result<(), &'static str> { let addr = self.offset + entry.get_offset(); for (pheader_offset, frame) in self.frames.iter() { let elf_vbase = self.offset + *pheader_offset & !(LARGE_PAGE_SIZE - 1); if addr >= elf_vbase && addr <= elf_vbase + frame.size() { let kernel_vaddr = paddr_to_kernel_vaddr(frame.base); let offset_in_frame = addr - elf_vbase; let kernel_addr = kernel_vaddr + offset_in_frame; trace!( "DataSecAllocator relocation paddr {:#x} kernel_addr {:#x}", offset_in_frame + frame.base.as_u64(), kernel_addr ); use elfloader::TypeRela64; if let TypeRela64::R_RELATIVE = TypeRela64::from(entry.get_type()) { unsafe { *(kernel_addr.as_mut_ptr::<u64>()) = self.offset.as_u64() + entry.get_addend(); } } else { return Err("Can only handle R_RELATIVE for relocation"); } } } Ok(()) } }
pub fn allocate_dispatchers(pid: Pid) -> Result<(), KError> { trace!("Allocate dispatchers"); let mut create_per_region: Vec<(topology::NodeId, usize)> = Vec::with_capacity(topology::MACHINE_TOPOLOGY.num_nodes() + 1); if topology::MACHINE_TOPOLOGY.num_nodes() > 0 { for node in topology::MACHINE_TOPOLOGY.nodes() { let threads = node.threads().count(); create_per_region.push((node.id, threads)); } } else { create_per_region.push((0, topology::MACHINE_TOPOLOGY.num_threads())); } for (affinity, to_create) in create_per_region { let mut dispatchers_created = 0; while dispatchers_created < to_create { KernelAllocator::try_refill_tcache(20, 1)?; let mut frame = { let kcb = crate::kcb::get_kcb(); kcb.physical_memory.gmanager.unwrap().node_caches[affinity as usize] .lock() .allocate_large_page()? }; unsafe { frame.zero(); } let kcb = crate::kcb::get_kcb(); kcb.replica .as_ref() .map_or(Err(KError::ReplicaNotSet), |(replica, token)| { let response = replica.execute_mut(nr::Op::DispatcherAllocation(pid, frame), *token)?; match response { nr::NodeResult::ExecutorsCreated(how_many) => { assert!(how_many > 0); dispatchers_created += how_many; Ok(how_many) } _ => unreachable!("Got unexpected response"), } }) .unwrap(); } } debug!("Allocated dispatchers"); Ok(()) }
pub fn make_process(binary: &'static str) -> Result<Pid, KError> { KernelAllocator::try_refill_tcache(7, 1)?; let kcb = kcb::get_kcb(); let mut mod_file = None; for module in &kcb.arch.kernel_args().modules { if module.name() == binary { mod_file = Some(module); } } let mod_file = mod_file.expect(format!("Couldn't find '{}' binary.", binary).as_str()); info!( "binary={} cmdline={} module={:?}", binary, kcb.cmdline.test_cmdline, mod_file ); let elf_module = unsafe { elfloader::ElfBinary::new(mod_file.name(), mod_file.as_slice()) .map_err(|_e| ProcessError::UnableToParseElf)? }; let offset = if !elf_module.is_pie() { VAddr::zero() } else { VAddr::from(0x20_0000_0000usize) }; let mut data_sec_loader = DataSecAllocator { offset, frames: Vec::with_capacity(2), }; elf_module .load(&mut data_sec_loader) .map_err(|_e| ProcessError::UnableToLoad)?; let data_frames: Vec<Frame> = data_sec_loader.finish(); kcb.replica .as_ref() .map_or(Err(KError::ReplicaNotSet), |(replica, token)| { let response = replica.execute_mut(nr::Op::ProcCreate(&mod_file, data_frames), *token); match response { Ok(nr::NodeResult::ProcCreated(pid)) => { if cfg!(feature = "mlnrfs") { match mlnr::MlnrKernelNode::add_process(pid) { Ok(pid) => Ok(pid.0), Err(e) => unreachable!("{}", e), } } else { Ok(pid) } } _ => unreachable!("Got unexpected response"), } }) }
function_block-full_function
[ { "content": "/// TODO: This method makes file-operations slow, improve it to use large page sizes. Or maintain a list of\n\n/// (low, high) memory limits per process and check if (base, size) are within the process memory limits.\n\nfn user_virt_addr_valid(pid: Pid, base: u64, size: u64) -> Result<(u64, u64), KError> {\n\n let mut base = base;\n\n let upper_addr = base + size;\n\n\n\n if upper_addr < KERNEL_BASE {\n\n while base <= upper_addr {\n\n // Validate addresses for the buffer end.\n\n if upper_addr - base <= BASE_PAGE_SIZE as u64 {\n\n match nr::KernelNode::<Ring3Process>::resolve(pid, VAddr::from(base)) {\n\n Ok(_) => {\n\n return nr::KernelNode::<Ring3Process>::resolve(\n\n pid,\n\n VAddr::from(upper_addr - 1),\n\n )\n\n }\n\n Err(e) => return Err(e.clone()),\n\n }\n\n }\n\n\n\n match nr::KernelNode::<Ring3Process>::resolve(pid, VAddr::from(base)) {\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 1, "score": 433535.67381693405 }, { "content": "/// Spawns a new process\n\n///\n\n/// This function is way too long because of several things that need to happen,\n\n/// and they are currently (TODO) not neatly encapsulated away in modules/functions\n\n/// We're loading a process from a module:\n\n/// - First we are constructing our own custom elfloader trait to load figure out\n\n/// which program headers in the module will be writable (these should not be replicated by NR)\n\n/// - Then we continue by creating a new Process through an nr call\n\n/// - Then we allocate a bunch of memory on all NUMA nodes to create enough dispatchers\n\n/// so we can run on all cores\n\n/// - Finally we allocate a dispatcher to the current core (0) and start running the process\n\npub fn spawn(binary: &'static str) -> Result<Pid, KError> {\n\n let kcb = kcb::get_kcb();\n\n\n\n let pid = make_process(binary)?;\n\n allocate_dispatchers(pid)?;\n\n\n\n // Set current thread to run executor from our process (on the current core)\n\n let thread = topology::MACHINE_TOPOLOGY.current_thread();\n\n let (_gtid, _eid) = nr::KernelNode::<Ring3Process>::allocate_core_to_process(\n\n pid,\n\n INVALID_EXECUTOR_START, // This VAddr is irrelevant as it is overriden later\n\n thread.node_id.or(Some(0)),\n\n Some(thread.id),\n\n )?;\n\n\n\n Ok(pid)\n\n}\n", "file_path": "kernel/src/arch/x86_64/process.rs", "rank": 2, "score": 417621.23732419044 }, { "content": "pub fn spawn(binary: &'static str) -> Result<Pid, KError> {\n\n Ok(0)\n\n}\n", "file_path": "kernel/src/arch/unix/process.rs", "rank": 3, "score": 417565.9519350347 }, { "content": "/// Translate a physical memory address into a kernel addressable location.\n\npub fn paddr_to_kernel_vaddr(p: PAddr) -> VAddr {\n\n let paddr_val: u64 = p.into();\n\n VAddr::from((paddr_val + KERNEL_BASE) as usize)\n\n}\n", "file_path": "kernel/src/arch/x86_64/memory.rs", "rank": 5, "score": 412748.7242885934 }, { "content": "/// Translate a physical memory address into a kernel addressable location.\n\npub fn paddr_to_kernel_vaddr(p: PAddr) -> VAddr {\n\n let paddr_val: u64 = p.into();\n\n VAddr::from((paddr_val + KERNEL_BASE) as usize)\n\n}\n\n\n\n/// Page allocator based on mmap/munmap system calls for backing slab memory.\n\npub struct MemoryMapper {\n\n /// Currently allocated bytes.\n\n currently_allocated: usize,\n\n}\n\n\n\nimpl MemoryMapper {\n\n pub fn new() -> MemoryMapper {\n\n MemoryMapper {\n\n currently_allocated: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl MemoryMapper {\n", "file_path": "kernel/src/arch/unix/memory.rs", "rank": 6, "score": 412748.7242885934 }, { "content": "/// Translate a kernel 'virtual' address to the physical address of the memory.\n\npub fn kernel_vaddr_to_paddr(v: VAddr) -> PAddr {\n\n let vaddr_val: usize = v.into();\n\n PAddr::from(vaddr_val as u64 - KERNEL_BASE)\n\n}\n\n\n", "file_path": "kernel/src/arch/unix/memory.rs", "rank": 8, "score": 385399.3570355867 }, { "content": "/// Translate a kernel 'virtual' address to the physical address of the memory.\n\npub fn kernel_vaddr_to_paddr(v: VAddr) -> PAddr {\n\n let vaddr_val: usize = v.into();\n\n PAddr::from(vaddr_val as u64 - KERNEL_BASE)\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory.rs", "rank": 9, "score": 385399.3570355867 }, { "content": "/// Allocates `pages` * `BASE_PAGE_SIZE` bytes of physical memory\n\n/// and return the address.\n\npub fn allocate_pages(st: &SystemTable<Boot>, pages: usize, typ: MemoryType) -> PAddr {\n\n let num = st\n\n .boot_services()\n\n .allocate_pages(AllocateType::AnyPages, typ, pages)\n\n .expect_success(format!(\"Allocation of {} failed for type {:?}\", pages, typ).as_str());\n\n\n\n // TODO: The UEFI Specification does not say if the pages we get are zeroed or not\n\n // (UEFI Specification 2.8, EFI_BOOT_SERVICES.AllocatePages())\n\n unsafe {\n\n st.boot_services()\n\n .memset(num as *mut u8, pages * BASE_PAGE_SIZE, 0u8)\n\n };\n\n\n\n PAddr::from(num)\n\n}\n\n\n\n/// Debug function to see what's currently in the UEFI address space.\n", "file_path": "bootloader/src/main.rs", "rank": 10, "score": 339625.5631126919 }, { "content": "/// Determine the size of a regular file,\n\n///\n\n/// The only -- crappy -- way to do this with UEFI, seems to be\n\n/// to seek to infinity and then call get_position on it?\n\nfn determine_file_size(file: &mut RegularFile) -> usize {\n\n file.set_position(0xFFFFFFFFFFFFFFFF)\n\n .expect_success(\"Seek to the end of kernel\");\n\n let file_size = file\n\n .get_position()\n\n .expect_success(\"Couldn't determine binary size\") as usize;\n\n file.set_position(0)\n\n .expect_success(\"Reset file handle position failed\");\n\n\n\n file_size\n\n}\n\n\n", "file_path": "bootloader/src/modules.rs", "rank": 11, "score": 338376.1111581855 }, { "content": "pub fn test_panic_fmt(args: core::fmt::Arguments, file: &'static str, line: u32) {\n\n sprintln!(\"\\npanicked at '\");\n\n sprintln!(\"', {}:{}\", file, line);\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 12, "score": 324332.13988048106 }, { "content": "/// This method converts the file offset to buffer number with-in a file.\n\n/// The assumption is that the buffer-size is equal for all the buffers\n\n/// in a file.\n\nfn offset_to_buffernum(offset: usize, buffer_size: usize) -> usize {\n\n offset / buffer_size\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n\n\n #[test]\n\n /// This method test the offset to buffer number conversion for a file.\n\n /// It uses BASE_PAGE_SIZE as buffer size.\n\n fn test_offset_to_buffernum() {\n\n let mut buffer_num: i64 = -1;\n\n for i in 0..10000 {\n\n if (i % BASE_PAGE_SIZE) == 0 {\n\n buffer_num += 1;\n\n }\n\n assert_eq!(offset_to_buffernum(i, BASE_PAGE_SIZE), buffer_num as usize);\n\n }\n\n }\n", "file_path": "kernel/src/fs/file.rs", "rank": 13, "score": 315414.43543848506 }, { "content": "/// System call handler for printing\n\nfn process_print(buf: UserValue<&str>) -> Result<(u64, u64), KError> {\n\n let mut kcb = super::kcb::get_kcb();\n\n let buffer: &str = *buf;\n\n\n\n // A poor mans line buffer scheme:\n\n match &mut kcb.print_buffer {\n\n Some(kbuf) => match buffer.find(\"\\n\") {\n\n Some(idx) => {\n\n let (low, high) = buffer.split_at(idx + 1);\n\n kbuf.push_str(low);\n\n {\n\n let r = klogger::SERIAL_LINE_MUTEX.lock();\n\n sprint!(\"{}\", kbuf);\n\n }\n\n kbuf.clear();\n\n kbuf.push_str(high);\n\n }\n\n None => {\n\n kbuf.push_str(buffer);\n\n if kbuf.len() > 2048 {\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 14, "score": 312803.4526737789 }, { "content": "/// Look for all files in the root folder all SimpleFileSystems that are registered.\n\n///\n\n/// When running on qemu this is likely the esp partition in `target/x86_64-uefi/debug/esp/`\n\n/// the esp dir gets mounted with qemu using `-drive if=none,format=raw,file=fat:rw:$ESP_DIR,id=esp`\n\n///\n\n/// When running on bare-metal, ipxe registers its own virtual file system where modules are stored.\n\npub fn load_modules_on_all_sfs(st: &SystemTable<Boot>, dir_name: &str) -> Vec<(String, Module)> {\n\n let all_handles = st\n\n .boot_services()\n\n .find_handles::<SimpleFileSystem>()\n\n .expect_success(\"Can't find any SimpleFileSystems?\");\n\n let mut modules: Vec<(String, Module)> = Vec::with_capacity(KernelArgs::MAX_MODULES);\n\n for handle in all_handles {\n\n let fhandle = st\n\n .boot_services()\n\n .handle_protocol::<SimpleFileSystem>(handle)\n\n .expect_success(\"Don't have SimpleFileSystem support\");\n\n let fhandle = unsafe { &mut *fhandle.get() };\n\n modules.extend(load_modules(st, fhandle));\n\n }\n\n\n\n modules\n\n}\n\n\n", "file_path": "bootloader/src/modules.rs", "rank": 15, "score": 308944.87451147666 }, { "content": "/// Calculate how many base and large pages we need to fit a given size.\n\n///\n\n/// # Returns\n\n/// A tuple containing (base-pages, large-pages).\n\n/// base-pages will never exceed LARGE_PAGE_SIZE / BASE_PAGE_SIZE.\n\npub fn size_to_pages(size: usize) -> (usize, usize) {\n\n let bytes_not_in_large = size % LARGE_PAGE_SIZE;\n\n\n\n let div = bytes_not_in_large / BASE_PAGE_SIZE;\n\n let rem = bytes_not_in_large % BASE_PAGE_SIZE;\n\n let base_pages = if rem > 0 { div + 1 } else { div };\n\n\n\n let remaining_size = size - bytes_not_in_large;\n\n let div = remaining_size / LARGE_PAGE_SIZE;\n\n let rem = remaining_size % LARGE_PAGE_SIZE;\n\n let large_pages = if rem > 0 { div + 1 } else { div };\n\n\n\n (base_pages, large_pages)\n\n}\n\n\n\nimpl KernelAllocator {\n\n /// Try to allocate a piece of memory.\n\n fn try_alloc(&self, layout: Layout) -> Result<ptr::NonNull<u8>, AllocationError> {\n\n let kcb = kcb::try_get_kcb().ok_or(AllocationError::KcbUnavailable)?;\n\n match KernelAllocator::allocator_for(layout) {\n", "file_path": "kernel/src/memory/mod.rs", "rank": 16, "score": 302978.33305601537 }, { "content": "fn handle_process(arg1: u64, arg2: u64, arg3: u64) -> Result<(u64, u64), KError> {\n\n let op = ProcessOperation::from(arg1);\n\n\n\n match op {\n\n ProcessOperation::Log => {\n\n let buffer: *const u8 = arg2 as *const u8;\n\n let len: usize = arg3 as usize;\n\n\n\n let user_str = unsafe {\n\n let slice = core::slice::from_raw_parts(buffer, len);\n\n core::str::from_utf8_unchecked(slice)\n\n };\n\n\n\n process_print(UserValue::new(user_str))\n\n }\n\n ProcessOperation::GetVCpuArea => unsafe {\n\n let kcb = super::kcb::get_kcb();\n\n\n\n let vcpu_vaddr = kcb.arch.current_process()?.vcpu_addr().as_u64();\n\n\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 17, "score": 296894.3443093653 }, { "content": "/// System call handler for vspace operations\n\nfn handle_vspace(arg1: u64, arg2: u64, arg3: u64) -> Result<(u64, u64), KError> {\n\n let op = VSpaceOperation::from(arg1);\n\n let base = VAddr::from(arg2);\n\n let region_size = arg3;\n\n trace!(\"handle_vspace {:?} {:#x} {:#x}\", op, base, region_size);\n\n\n\n let kcb = super::kcb::get_kcb();\n\n let mut plock = kcb.arch.current_process();\n\n\n\n match op {\n\n VSpaceOperation::Map => unsafe {\n\n plock.as_ref().map_or(Err(KError::ProcessNotSet), |p| {\n\n let (bp, lp) = crate::memory::size_to_pages(region_size as usize);\n\n let mut frames = Vec::with_capacity(bp + lp);\n\n crate::memory::KernelAllocator::try_refill_tcache(20 + bp, lp)?;\n\n\n\n // TODO(apihell): This `paddr` is bogus, it will return the PAddr of the\n\n // first frame mapped but if you map multiple Frames, no chance getting that\n\n // Better would be a function to request physically consecutive DMA memory\n\n // or use IO-MMU translation (see also rumpuser_pci_dmalloc)\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 18, "score": 296815.48092228844 }, { "content": "type LwpMain = Option<unsafe extern \"C\" fn(arg: *mut u8) -> *mut u8>;\n\n\n\nstatic CURLWPID: AtomicI32 = AtomicI32::new(1);\n\n\n\nstatic AVAILABLE_CORES: AtomicUsize = AtomicUsize::new(1);\n\n\n", "file_path": "lib/vibrio/src/rumprt/prt/mod.rs", "rank": 20, "score": 293597.68489244237 }, { "content": "/// Load a binary from the UEFI FAT partition, and return\n\n/// a slice to the data in memory along with a Module struct\n\n/// that can be passed to the kernel.\n\npub fn load_binary_into_memory(\n\n st: &SystemTable<Boot>,\n\n dir: &mut Directory,\n\n file: &mut uefi::proto::media::file::FileInfo,\n\n name: &str,\n\n) -> Module {\n\n // Get the binary, this should be a plain old\n\n // ELF executable.\n\n let mut module_file = locate_binary(&st, dir, name);\n\n let module_size = determine_file_size(&mut module_file);\n\n debug!(\"Found {} binary with {} bytes\", name, module_size);\n\n let module_base_paddr = allocate_pages(\n\n &st,\n\n round_up!(module_size, BASE_PAGE_SIZE) / BASE_PAGE_SIZE,\n\n MemoryType(MODULE),\n\n );\n\n trace!(\"Load the {} binary (in a vector)\", name);\n\n let module_blob: &mut [u8] = unsafe {\n\n slice::from_raw_parts_mut(\n\n paddr_to_uefi_vaddr(module_base_paddr).as_mut_ptr::<u8>(),\n", "file_path": "bootloader/src/modules.rs", "rank": 22, "score": 282723.15248355916 }, { "content": "/// This is used to determine, how many buffers to add dependeing on the number\n\n/// of bytes and buffer-size.\n\nfn ceil(bytes: usize, buffer_size: usize) -> usize {\n\n let mut val = bytes / buffer_size;\n\n if bytes > val * buffer_size {\n\n val += 1;\n\n }\n\n val\n\n}\n\n\n", "file_path": "kernel/src/fs/file.rs", "rank": 23, "score": 282573.7254405562 }, { "content": "/// Register a periodic timer to advance replica\n\n///\n\n/// TODO(api): Ideally this should come from Instant::now() +\n\n/// Duration::from_millis(10) and for that we need a way to reliably\n\n/// convert between TSC and Instant\n\npub fn set(deadline: u64) {\n\n let kcb = get_kcb();\n\n let mut apic = kcb.arch.apic();\n\n apic.tsc_enable();\n\n unsafe { apic.tsc_set(x86::time::rdtsc() + deadline) };\n\n}\n", "file_path": "kernel/src/arch/x86_64/timer.rs", "rank": 24, "score": 279331.88505980046 }, { "content": "/// Register a periodic timer to advance replica.\n\npub fn set(deadline: u64) {}\n", "file_path": "kernel/src/arch/unix/timer.rs", "rank": 25, "score": 279327.65179569926 }, { "content": "/// System call handler for process exit\n\nfn process_exit(code: u64) -> Result<(u64, u64), KError> {\n\n debug!(\"Process got exit, we are done for now...\");\n\n // TODO: For now just a dummy version that exits Qemu\n\n if code != 0 {\n\n // When testing we want to indicate to our integration\n\n // test that our user-space test failed with a non-zero exit\n\n super::debug::shutdown(crate::ExitReason::UserSpaceError);\n\n } else {\n\n super::debug::shutdown(crate::ExitReason::Ok);\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 26, "score": 277277.58913906885 }, { "content": "/// Escape tags in such a way that it is suitable for inclusion in a\n\n/// Graphviz HTML label.\n\npub fn escape_html(s: &str) -> String {\n\n s.replace(\"&\", \"&amp;\")\n\n .replace(\"\\\"\", \"&quot;\")\n\n .replace(\"<\", \"&lt;\")\n\n .replace(\">\", \"&gt;\")\n\n}\n\n\n\nimpl<'a> LabelText<'a> {\n\n pub fn label<S: Into<Cow<'a, str>>>(s: S) -> LabelText<'a> {\n\n LabelStr(s.into())\n\n }\n\n\n\n pub fn escaped<S: Into<Cow<'a, str>>>(s: S) -> LabelText<'a> {\n\n EscStr(s.into())\n\n }\n\n\n\n pub fn html<S: Into<Cow<'a, str>>>(s: S) -> LabelText<'a> {\n\n HtmlStr(s.into())\n\n }\n\n\n", "file_path": "kernel/src/graphviz.rs", "rank": 27, "score": 276202.14893976995 }, { "content": "pub fn context_switch(prev_cookie: *mut u8, next_cookie: *mut u8) {\n\n //trace!(\"got context switched {:p} {:p}\", prev_cookie, next_cookie);\n\n\n\n let prev: *mut rumprun_lwp = prev_cookie as *mut rumprun_lwp;\n\n let next: *mut rumprun_lwp = next_cookie as *mut rumprun_lwp;\n\n unsafe {\n\n if !prev.is_null() && (*prev).rl_lwpctl.lc_curcpu != LWPCTL_CPU_EXITED {\n\n (*prev).rl_lwpctl.lc_curcpu = LWPCTL_CPU_NONE;\n\n }\n\n if !next.is_null() {\n\n (*next).rl_lwpctl.lc_curcpu = Environment::scheduler().core_id as i32;\n\n (*next).rl_lwpctl.lc_pctr += 1;\n\n }\n\n }\n\n}\n\n\n\nextern \"C\" {\n\n fn rump_pub_lwproc_curlwp() -> *const c_void;\n\n fn rump_pub_lwproc_switch(lwp: *const c_void);\n\n fn rump_pub_lwproc_newlwp(pid: c_int) -> c_int;\n\n fn getpid() -> c_int;\n\n}\n\n\n\nunsafe extern \"C\" fn rumprun_makelwp_tramp(arg: *mut u8) -> *mut u8 {\n\n rump_pub_lwproc_switch(arg as *const c_void);\n\n let lwp = Environment::thread().rumprun_lwp as *const rumprun_lwp;\n\n (((*lwp).start).unwrap())((*lwp).arg);\n\n unreachable!(\"does it exit or not -- hey it probably can?\")\n\n}\n\n\n", "file_path": "lib/vibrio/src/rumprt/prt/mod.rs", "rank": 28, "score": 272764.06227705686 }, { "content": "/// Retrieve the KCB by reading the gs register.\n\n///\n\n/// # Panic\n\n/// This will fail in case the KCB is not yet set (i.e., early on during\n\n/// initialization).\n\npub fn get_kcb<'a>() -> &'a mut Kcb<Arch86Kcb> {\n\n unsafe {\n\n let kcb = segmentation::rdgsbase() as *mut Kcb<Arch86Kcb>;\n\n assert!(kcb != ptr::null_mut(), \"KCB not found in gs register.\");\n\n let kptr = ptr::NonNull::new_unchecked(kcb);\n\n &mut *kptr.as_ptr()\n\n }\n\n}\n\n\n\n/// Installs the KCB by setting storing a pointer to it in the `gs`\n\n/// register.\n\n///\n\n/// We also set IA32_KERNEL_GSBASE to the pointer to make sure\n\n/// when we call `swapgs` on a syscall entry, we restore the pointer\n\n/// to the KCB (user-space may change the `gs` register for\n\n/// TLS etc.).\n\nunsafe fn set_kcb<A: ArchSpecificKcb>(kcb: ptr::NonNull<Kcb<A>>) {\n\n // Set up the GS register to point to the KCB\n\n segmentation::wrgsbase(kcb.as_ptr() as u64);\n\n // Set up swapgs instruction to reset the gs register to the KCB on irq, trap or syscall\n", "file_path": "kernel/src/arch/x86_64/kcb.rs", "rank": 29, "score": 270235.9325702861 }, { "content": "pub fn get_kcb<'a>() -> &'a mut Kcb<ArchKcb> {\n\n unsafe { &mut *KCB as &mut Kcb<ArchKcb> }\n\n}\n\n\n\nunsafe fn set_kcb(kcb: ptr::NonNull<Kcb<ArchKcb>>) {\n\n KCB = kcb.as_ptr();\n\n}\n\n\n\n/// Initialize the KCB in the system.\n\n///\n\n/// Should be called during set-up. Afterwards we can use `get_kcb` safely.\n\npub(crate) fn init_kcb<A: ArchSpecificKcb + Any>(mut kcb: &'static mut Kcb<A>) {\n\n let any_kcb = &mut kcb as &mut dyn Any;\n\n if let Some(ckcb) = any_kcb.downcast_mut::<&'static mut Kcb<ArchKcb>>() {\n\n let kptr: ptr::NonNull<Kcb<ArchKcb>> = (*ckcb).into();\n\n unsafe { set_kcb(kptr) };\n\n } else {\n\n panic!(\"Tried to install incompatible KCB.\");\n\n }\n\n}\n", "file_path": "kernel/src/arch/unix/kcb.rs", "rank": 30, "score": 270219.6860240761 }, { "content": "/// Maps an error code to a human-readable description.\n\npub fn errno_to_str(err: c_int) -> &'static str {\n\n match err {\n\n 0 => \"Success\",\n\n EPERM => \"Operation not permitted\",\n\n ENOENT => \"No such file or directory\",\n\n ESRCH => \"No such process\",\n\n EINTR => \"Interrupted system call\",\n\n EIO => \"Input/output error\",\n\n ENXIO => \"Device not configured\",\n\n E2BIG => \"Argument list too long\",\n\n ENOEXEC => \"Exec format error\",\n\n EBADF => \"Bad file descriptor\",\n\n ECHILD => \"No child processes\",\n\n EDEADLK => \"Resource deadlock avoided\",\n\n ENOMEM => \"Cannot allocate memory\",\n\n EACCES => \"Permission denied\",\n\n EFAULT => \"Bad address\",\n\n ENOTBLK => \"Block device required\",\n\n EBUSY => \"Device busy\",\n\n EEXIST => \"File exists\",\n", "file_path": "lib/vibrio/src/rumprt/errno.rs", "rank": 31, "score": 266163.2072428389 }, { "content": "fn handle_system(arg1: u64, arg2: u64, arg3: u64) -> Result<(u64, u64), KError> {\n\n let op = SystemOperation::from(arg1);\n\n\n\n match op {\n\n SystemOperation::GetHardwareThreads => {\n\n let vaddr_buf = arg2; // buf.as_mut_ptr() as u64\n\n let vaddr_buf_len = arg3; // buf.len() as u64\n\n\n\n let hwthreads = topology::MACHINE_TOPOLOGY.threads();\n\n let mut return_threads = Vec::with_capacity(topology::MACHINE_TOPOLOGY.num_threads());\n\n for hwthread in hwthreads {\n\n return_threads.push(kpi::system::CpuThread {\n\n id: hwthread.id as usize,\n\n node_id: hwthread.node_id.unwrap_or(0) as usize,\n\n package_id: hwthread.package_id as usize,\n\n core_id: hwthread.core_id as usize,\n\n thread_id: hwthread.thread_id as usize,\n\n });\n\n }\n\n\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 32, "score": 262206.3666485832 }, { "content": "/// Determines the necessary space for per-thread TLS memory region.\n\n///\n\n/// Total required bytes is the sum of the `tdata`, `tbss`,\n\n/// and a statically defined extra section.\n\n/// (i.e., the sum of all return values)\n\npub fn get_tls_info() -> (&'static [u8], Layout) {\n\n let pinfo: kpi::process::ProcessInfo =\n\n kpi::syscalls::Process::process_info().expect(\"Can't get pinfo?\");\n\n if pinfo.has_tls {\n\n let _bss_size = pinfo.tls_len_total - pinfo.tls_data_len;\n\n unsafe {\n\n // Safe: We know this exists because our ELF loader put TLS there (hopefully)\n\n (\n\n core::slice::from_raw_parts(\n\n pinfo.tls_data as *const u8,\n\n pinfo.tls_data_len as usize,\n\n ),\n\n Layout::from_size_align_unchecked(\n\n pinfo.tls_len_total as usize + core::mem::size_of::<ThreadControlBlock>(),\n\n pinfo.alignment as usize,\n\n ),\n\n )\n\n }\n\n } else {\n\n (&[], Layout::new::<ThreadControlBlock>())\n\n }\n\n}\n", "file_path": "lib/lineup/src/tls2/bespin.rs", "rank": 33, "score": 260150.98788854244 }, { "content": "#[cfg(target_family = \"unix\")]\n\npub fn get_tls_info() -> (&'static [u8], Layout) {\n\n // We only use this for tests, so we just estimate our TLS size...\n\n // Ideally we parse the ELF of our process to determine the static TLS size\n\n (&[], Layout::new::<ThreadControlBlock>())\n\n}\n", "file_path": "lib/lineup/src/tls2/unix.rs", "rank": 34, "score": 260127.02551796872 }, { "content": "/// Try to retrieve the KCB by reading the gs register.\n\n///\n\n/// This may return None if they KCB is not yet set\n\n/// (i.e., during initialization).\n\npub fn try_get_kcb<'a>() -> Option<&'a mut Kcb<Arch86Kcb>> {\n\n unsafe {\n\n let kcb = segmentation::rdgsbase() as *mut Kcb<Arch86Kcb>;\n\n if kcb != ptr::null_mut() {\n\n let kptr = ptr::NonNull::new_unchecked(kcb);\n\n Some(&mut *kptr.as_ptr())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/kcb.rs", "rank": 35, "score": 258848.17643111455 }, { "content": "pub fn try_get_kcb<'a>() -> Option<&'a mut Kcb<ArchKcb>> {\n\n unsafe {\n\n if !KCB.is_null() {\n\n Some(&mut *KCB as &mut Kcb<ArchKcb>)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/unix/kcb.rs", "rank": 36, "score": 258826.66229730932 }, { "content": "fn redis_benchmark(nic: &'static str, requests: usize) -> Result<rexpect::session::PtySession> {\n\n fn spawn_bencher(port: u16, requests: usize) -> Result<rexpect::session::PtySession> {\n\n spawn(\n\n format!(\n\n \"redis-benchmark -h 172.31.0.10 -p {} -t ping,get,set -n {} -P 30 --csv\",\n\n port, requests\n\n )\n\n .as_str(),\n\n Some(25000),\n\n )\n\n }\n\n\n\n let mut redis_client = spawn_bencher(REDIS_PORT, requests)?;\n\n // redis reports the tputs as floating points\n\n redis_client.exp_string(\"\\\"PING_INLINE\\\",\\\"\")?;\n\n let (_line, ping_tput) = redis_client.exp_regex(\"[-+]?[0-9]*\\\\.?[0-9]+\")?;\n\n redis_client.exp_string(\"\\\"\")?;\n\n\n\n redis_client.exp_string(\"\\\"PING_BULK\\\",\\\"\")?;\n\n let (_line, ping_bulk_tput) = redis_client.exp_regex(\"[-+]?[0-9]*\\\\.?[0-9]+\")?;\n", "file_path": "kernel/tests/integration-test.rs", "rank": 37, "score": 258682.35030104005 }, { "content": "#[allow(unused)] // Currently only used in integration_main.rs\n\npub fn overlaps<T: PartialOrd>(a: &core::ops::Range<T>, b: &core::ops::Range<T>) -> bool {\n\n a.start < b.end && b.start < a.end\n\n}\n", "file_path": "kernel/src/prelude.rs", "rank": 38, "score": 257352.77102801483 }, { "content": "/// Walk through the root directory of the UEFI SimpleFileSystem\n\n/// and return all files we find in that directory as\n\n/// a list tuples (filename, module).\n\n///\n\n/// Does not recurse into subdirectories.\n\npub fn load_modules(\n\n st: &SystemTable<Boot>,\n\n fhandle: &mut SimpleFileSystem,\n\n) -> Vec<(String, Module)> {\n\n let mut dir_handle = fhandle.open_volume().expect_success(\"Can't open volume\");\n\n\n\n // We have capacity for 32 modules if you want to increase this\n\n // also change `modules` in KernelArgs.\n\n let mut modules = Vec::with_capacity(KernelArgs::MAX_MODULES);\n\n\n\n for _m in 0..KernelArgs::MAX_MODULES {\n\n const MAX_FILE_INFO_SIZE: usize = 256;\n\n let mut buffer: &mut [u8] = &mut [0u8; MAX_FILE_INFO_SIZE];\n\n\n\n match dir_handle.read_entry(&mut buffer) {\n\n Ok(completion) => {\n\n if let Some(file_info) = completion.unwrap() {\n\n let file_name_16 = DCStr16(file_info.file_name().as_ptr());\n\n if !file_info.attribute().contains(FileAttribute::DIRECTORY) {\n\n let name_string: String = file_name_16.into();\n", "file_path": "bootloader/src/modules.rs", "rank": 39, "score": 254676.86845039367 }, { "content": "#[allow(non_camel_case_types)]\n\ntype rump_biodone_fn = Option<unsafe extern \"C\" fn(*mut c_void, c_size_t, c_int)>;\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct RumpHyperUpcalls {\n\n pub hyp_schedule: Option<unsafe extern \"C\" fn()>,\n\n pub hyp_unschedule: Option<unsafe extern \"C\" fn()>,\n\n pub hyp_backend_unschedule:\n\n Option<unsafe extern \"C\" fn(arg1: c_int, arg2: *mut c_int, arg3: *const c_void)>,\n\n pub hyp_backend_schedule: Option<unsafe extern \"C\" fn(arg1: c_int, arg2: *const c_void)>,\n\n pub hyp_lwproc_switch: Option<unsafe extern \"C\" fn(arg1: *mut threads::lwp)>,\n\n pub hyp_lwproc_release: Option<unsafe extern \"C\" fn()>,\n\n pub hyp_lwproc_rfork:\n\n Option<unsafe extern \"C\" fn(arg1: *mut c_void, arg2: c_int, arg3: *const c_char) -> c_int>,\n\n pub hyp_lwproc_newlwp: Option<unsafe extern \"C\" fn(arg1: pid_t) -> c_int>,\n\n pub hyp_lwproc_curlwp: Option<unsafe extern \"C\" fn() -> *mut threads::lwp>,\n\n pub hyp_syscall:\n\n Option<unsafe extern \"C\" fn(arg1: c_int, arg2: *mut c_void, arg3: *mut c_long) -> c_int>,\n\n pub hyp_lwpexit: Option<unsafe extern \"C\" fn()>,\n\n pub hyp_execnotify: Option<unsafe extern \"C\" fn(arg1: *const c_char)>,\n\n pub hyp_getpid: Option<unsafe extern \"C\" fn() -> pid_t>,\n\n pub hyp_extra: [*mut c_void; 8usize],\n\n}\n\n\n\nstatic HYPERUPCALLS: AtomicPtr<RumpHyperUpcalls> = AtomicPtr::new(ptr::null_mut());\n\n\n", "file_path": "lib/vibrio/src/rumprt/mod.rs", "rank": 41, "score": 247520.00989484607 }, { "content": "/// Establishes a route for a GSI on the IOAPIC.\n\n///\n\n/// # TODO\n\n/// Currently this just enables everything and routes it to\n\n/// core 0. This is because, we should probably just support MSI(X)\n\n/// and don't invest a lot in legacy interrupts...\n\npub fn ioapic_establish_route(_gsi: u64, _core: u64) {\n\n use crate::memory::{paddr_to_kernel_vaddr, vspace::MapAction, PAddr};\n\n\n\n for io_apic in topology::MACHINE_TOPOLOGY.io_apics() {\n\n let addr = PAddr::from(io_apic.address as u64);\n\n\n\n let mut inst =\n\n unsafe { x86::apic::ioapic::IoApic::new(paddr_to_kernel_vaddr(addr).as_usize()) };\n\n trace!(\n\n \"This IOAPIC supports {} Interrupts\",\n\n inst.supported_interrupts()\n\n );\n\n\n\n for i in 0..inst.supported_interrupts() {\n\n let gsi = io_apic.global_irq_base + i as u32;\n\n if gsi < 16 {\n\n trace!(\n\n \"Enable irq {} which maps to GSI#{}\",\n\n i,\n\n io_apic.global_irq_base + i as u32\n\n );\n\n if i != 2 && i != 1 {\n\n inst.enable(i, 0);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/irq.rs", "rank": 42, "score": 247311.0005309167 }, { "content": "fn page_sizes() -> impl Strategy<Value = usize> {\n\n prop::sample::select(vec![BASE_PAGE_SIZE, LARGE_PAGE_SIZE])\n\n}\n\n\n\nprop_compose! {\n\n fn frames(max_base: u64, _max_size: usize)(base in base_aligned_addr(max_base), size in page_sizes()) -> Frame {\n\n let paddr = if base & 0x1 > 0 {\n\n PAddr::from(base).align_down_to_base_page()\n\n } else {\n\n PAddr::from(base).align_down_to_large_page()\n\n };\n\n\n\n Frame::new(paddr, size, 0)\n\n }\n\n}\n\n\n\nprop_compose! {\n\n fn vaddrs(max: u64)(base in 0..max) -> VAddr { VAddr::from(base & !0xfff) }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/vspace/test.rs", "rank": 43, "score": 246997.5173103076 }, { "content": "/// Return the address range of `start_ap.S` as (start, end)\n\n///\n\n/// # Note\n\n/// The addresses returned are start and end in kernel space\n\n/// (above KERNEL_BASE, within the relocated ELF file). But\n\n/// when we boot we have to copy the code in a lower address region\n\n/// where a 16-bit mode CPU can execute.\n\nfn ap_code_address_range() -> (PAddr, PAddr) {\n\n extern \"C\" {\n\n /// The first symbol in `start_ap.S`\n\n static x86_64_start_ap: *const u8;\n\n /// The very last symbol in `start_ap.S`\n\n static x86_64_start_ap_end: *const u8;\n\n }\n\n\n\n unsafe {\n\n (\n\n PAddr::from(&x86_64_start_ap as *const _ as u64),\n\n PAddr::from(&x86_64_start_ap_end as *const _ as u64),\n\n )\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/coreboot.rs", "rank": 44, "score": 246909.42542644808 }, { "content": "pub trait MemManager: PhysicalPageProvider + AllocatorStatistics + GrowBackend {}\n\n\n\n/// Definition to parse the kernel command-line arguments.\n", "file_path": "kernel/src/kcb.rs", "rank": 45, "score": 246665.21127491334 }, { "content": "pub fn bench(ncores: Option<usize>, open_files: usize, benchmark: String, write_ratio: usize) {\n\n info!(\"thread_id,benchmark,core,write_ratio,open_files,duration_total,duration,operations\");\n\n\n\n let hwthreads = vibrio::syscalls::System::threads().expect(\"Can't get system topology\");\n\n let mut cores = Vec::with_capacity(ncores.unwrap());\n\n\n\n let mut maximum = 1; // We already have core 0\n\n for hwthread in hwthreads.iter().take(ncores.unwrap_or(hwthreads.len())) {\n\n cores.push(hwthread.id);\n\n if hwthread.id != 0 {\n\n match vibrio::syscalls::Process::request_core(\n\n hwthread.id,\n\n VAddr::from(vibrio::upcalls::upcall_while_enabled as *const fn() as u64),\n\n ) {\n\n Ok(_) => {\n\n maximum += 1;\n\n continue;\n\n }\n\n Err(e) => {\n\n error!(\"Can't spawn on {:?}: {:?}\", hwthread.id, e);\n", "file_path": "usr/init/src/fxmark/mod.rs", "rank": 46, "score": 246662.09446704655 }, { "content": "/// Returns a vector of build path information with an entry\n\n/// for every application we want to build.\n\n///\n\n/// Format is: (folder_name, baking_output_binary, baking_input_binary)\n\n///\n\n/// The baking output binary should be placed in 'target/x86_64-bespin-none/debug|release/build'\n\n/// (If you change this also don't forget to adapt the `run.py` script)\n\n/// in the same location where static C library builds are stored\n\n/// this goes slightly against convention that we shouldn't place\n\n/// things out of OUT_DIR, but since we're abusing build.rs already anyways ¯\\_(ツ)_/¯\n\nfn build_plan() -> Vec<(&'static str, &'static str, &'static str, bool)> {\n\n let mut plan: Vec<(&'static str, &'static str, &'static str, bool)> = Default::default();\n\n\n\n let unwind_hack = true; // Adds -Wl,-allow-multiple-definition to rumprun-bake\n\n\n\n if cfg!(feature = \"redis\") {\n\n plan.push((\n\n \"redis\",\n\n \"../../../../redis.bin\",\n\n \"bin/redis-server\",\n\n !unwind_hack,\n\n ));\n\n }\n\n\n\n if cfg!(feature = \"memcached\") {\n\n plan.push((\n\n \"memcached\",\n\n \"../../../../memcached.bin\",\n\n \"build/memcached\",\n\n !unwind_hack,\n", "file_path": "usr/rkapps/build.rs", "rank": 47, "score": 245943.03526332224 }, { "content": "/// Provides information about the allocator.\n\npub trait AllocatorStatistics {\n\n /// Current free memory (in bytes) this allocator has.\n\n fn free(&self) -> usize {\n\n self.size() - self.allocated()\n\n }\n\n\n\n /// Memory (in bytes) that was handed out by this allocator\n\n /// and has not yet been reclaimed (memory currently in use).\n\n fn allocated(&self) -> usize;\n\n\n\n /// Total memory (in bytes) that is maintained by this allocator.\n\n fn size(&self) -> usize;\n\n\n\n /// Potential capacity (in bytes) that the allocator can maintain.\n\n ///\n\n /// Some allocator may have unlimited capacity, in that case\n\n /// they can return usize::max.\n\n ///\n\n /// e.g. this should hold `capacity() >= free() + allocated()`\n\n fn capacity(&self) -> usize;\n", "file_path": "kernel/src/memory/mod.rs", "rank": 48, "score": 244975.40830677515 }, { "content": "pub trait PhysicalAllocator {\n\n /// Allocates a frame meeting the size and alignment\n\n /// guarantees of layout.\n\n ///\n\n /// If this method returns an Ok(frame), then the frame returned\n\n /// will be a frame pointing to a block of storage suitable for\n\n /// holding an instance of layout.\n\n ///\n\n /// The returned block of storage may or may not have its\n\n /// contents initialized.\n\n ///\n\n /// This method allocates at least a multiple of `BASE_PAGE_SIZE`\n\n /// so it can result in large amounts of internal fragmentation.\n\n unsafe fn allocate_frame(&mut self, layout: Layout) -> Result<Frame, AllocationError>;\n\n\n\n /// Give a frame previously allocated using `allocate_frame` back\n\n /// to the physical memory allocator.\n\n ///\n\n /// # Safety\n\n ///\n", "file_path": "kernel/src/memory/mod.rs", "rank": 49, "score": 244964.24309236015 }, { "content": "/// Generic address space functionality.\n\npub trait AddressSpace {\n\n /// Maps a list of `frames` at `base` in the address space\n\n /// with the access rights defined by `action`.\n\n fn map_frames(\n\n &mut self,\n\n base: VAddr,\n\n frames: &Vec<(Frame, MapAction)>,\n\n ) -> Result<(), AddressSpaceError> {\n\n let mut cur_base = base;\n\n for (frame, action) in frames.into_iter() {\n\n self.map_frame(cur_base, *frame, *action)?;\n\n cur_base = VAddr::from(cur_base.as_usize().checked_add(frame.size()).ok_or(\n\n AddressSpaceError::BaseOverflow {\n\n base: base.as_u64(),\n\n },\n\n )?);\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "kernel/src/memory/vspace.rs", "rank": 50, "score": 244562.75016990298 }, { "content": "fn kcb_resume_handle(kcb: &crate::kcb::Kcb<Arch86Kcb>) -> Ring3Resumer {\n\n Ring3Resumer::new_restore(kcb.arch.get_save_area_ptr())\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/irq.rs", "rank": 51, "score": 243373.4321701742 }, { "content": "/// A trait to allocate and release physical pages from an allocator.\n\npub trait PhysicalPageProvider {\n\n /// Allocate a `BASE_PAGE_SIZE` for the given architecture from the allocator.\n\n fn allocate_base_page(&mut self) -> Result<Frame, AllocationError>;\n\n /// Release a `BASE_PAGE_SIZE` for the given architecture back to the allocator.\n\n fn release_base_page(&mut self, f: Frame) -> Result<(), AllocationError>;\n\n\n\n /// Allocate a `LARGE_PAGE_SIZE` for the given architecture from the allocator.\n\n fn allocate_large_page(&mut self) -> Result<Frame, AllocationError>;\n\n /// Release a `LARGE_PAGE_SIZE` for the given architecture back to the allocator.\n\n fn release_large_page(&mut self, f: Frame) -> Result<(), AllocationError>;\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 52, "score": 240418.50994577742 }, { "content": "/// Trying to get the file handle for the kernel binary.\n\nfn locate_binary(st: &SystemTable<Boot>, directory: &mut Directory, name: &str) -> RegularFile {\n\n // Look for the given binary name in the root folder of our EFI partition\n\n // in our case this is `target/x86_64-uefi/debug/esp/`\n\n // whereas the esp dir gets mounted with qemu using\n\n // `-drive if=none,format=raw,file=fat:rw:$ESP_DIR,id=esp`\n\n let binary_file = directory\n\n .open(\n\n format!(\"{}\", name).as_str(),\n\n FileMode::Read,\n\n FileAttribute::READ_ONLY,\n\n )\n\n .expect_success(format!(\"Unable to locate binary '{}'\", name).as_str())\n\n .into_type()\n\n .expect_success(\"Can't cast it to a file common type??\");\n\n\n\n let binary_file: RegularFile = match binary_file {\n\n FileType::Regular(t) => t,\n\n _ => panic!(\"Binary was found but is not a regular file type, check your build.\"),\n\n };\n\n\n\n debug!(\"Found the binary {}\", name);\n\n binary_file\n\n}\n\n\n", "file_path": "bootloader/src/modules.rs", "rank": 53, "score": 238314.8069073708 }, { "content": "/// This is invoked through the kernel whenever we get an\n\n/// upcall (trap happened or interrupt came in) we resume\n\n/// exection here so we can handle it accordingly.\n\n///\n\n/// # XXX verify if this is true:\n\n/// When we resume from here we can assume the following:\n\n///\n\n/// * The `enabled` area of [kpi::arch::VirtualCpuState] contains\n\n/// where we left off before we got interrupted.\n\n/// * The [kpi::arch::VirtualCpu] `disabled` flag was set to true and\n\n/// needs to be cleared again.\n\npub fn upcall_while_enabled(control: &mut kpi::arch::VirtualCpu, cmd: u64, arg: u64) -> ! {\n\n trace!(\n\n \"upcall_while_enabled {:?} vec={:#x} err={}\",\n\n control,\n\n cmd,\n\n arg\n\n );\n\n\n\n let sched = &PROCESS_SCHEDULER;\n\n\n\n if cmd == kpi::upcall::NEW_CORE {\n\n use lineup::tls2::SchedulerControlBlock;\n\n let core_id = arg;\n\n log::info!(\"Got a new core ({}) assigned to us.\", core_id);\n\n\n\n let scb: SchedulerControlBlock = SchedulerControlBlock::new(core_id as usize);\n\n loop {\n\n sched.run(&scb);\n\n }\n\n }\n", "file_path": "lib/vibrio/src/upcalls.rs", "rank": 54, "score": 236994.30698505155 }, { "content": "pub trait PageTableProvider<'a> {\n\n fn allocate_pml4<'b>(&mut self) -> Option<&'b mut paging::PML4>;\n\n fn new_pdpt(&mut self) -> Option<paging::PML4Entry>;\n\n fn new_pd(&mut self) -> Option<paging::PDPTEntry>;\n\n fn new_pt(&mut self) -> Option<paging::PDEntry>;\n\n fn new_page(&mut self) -> Option<paging::PTEntry>;\n\n}\n\n\n\n#[allow(dead_code)]\n\npub struct BespinPageTableProvider;\n\n\n\nimpl BespinPageTableProvider {\n\n #[allow(dead_code)]\n\n pub const fn new() -> BespinPageTableProvider {\n\n BespinPageTableProvider\n\n }\n\n}\n\n\n\nimpl<'a> PageTableProvider<'a> for BespinPageTableProvider {\n\n /// Allocate a PML4 table.\n", "file_path": "kernel/src/memory/mod.rs", "rank": 55, "score": 235258.16413695755 }, { "content": "fn kcb_iret_handle(kcb: &crate::kcb::Kcb<Arch86Kcb>) -> Ring3Resumer {\n\n Ring3Resumer::new_iret(kcb.arch.get_save_area_ptr())\n\n}\n\n\n\n/// Handler for all exceptions that happen early during the initialization\n\n/// (i.e., before we have a KCB) or are unrecoverable errors.\n\n///\n\n/// For these execptions we use different assembly bootstrap wrappers\n\n/// that don't assume `gs` has a KCB reference\n\n/// or save the context (because we don't have a KCB yet).\n\n///\n\n/// The only thing this is used for is to report as much as possible, and\n\n/// then exit.\n\n#[inline(never)]\n\n#[no_mangle]\n\npub extern \"C\" fn handle_generic_exception_early(a: ExceptionArguments) -> ! {\n\n sprintln!(\"[IRQ] Got an exception during kernel initialization:\");\n\n //sprintln!(\"{:?}\", a);\n\n\n\n match a.vector as u8 {\n", "file_path": "kernel/src/arch/x86_64/irq.rs", "rank": 56, "score": 230421.5380032801 }, { "content": "/// Dummy implementation of noop_context_switch().\n\nfn noop_context_switch(_a1: *mut u8, _a2: *mut u8) {}\n\n\n", "file_path": "lib/lineup/src/upcalls.rs", "rank": 57, "score": 229029.7163320629 }, { "content": "pub fn test_main_static(tests: &[&TestDescAndFn]) {\n\n test_start(tests.len());\n\n\n\n let mut failed = 0;\n\n let mut ignored = 0;\n\n let mut passed = 0;\n\n for test in tests {\n\n if test.desc.ignore {\n\n ignored += 1;\n\n test_ignored(test.desc.name.0);\n\n } else {\n\n let meta_data = test_before_run(test.desc.name.0);\n\n\n\n unsafe {\n\n __TEST_PANICKED = false;\n\n }\n\n\n\n test.testfn.0();\n\n\n\n unsafe {\n", "file_path": "lib/ctest/src/lib.rs", "rank": 58, "score": 221413.5845092469 }, { "content": "pub fn max_open_files() -> usize {\n\n let max_cores = vibrio::syscalls::System::threads()\n\n .expect(\"Can't get system topology\")\n\n .len();\n\n let max_files = match max_cores {\n\n 28 => 14,\n\n 56 => 28,\n\n 32 => 16,\n\n 64 => 32,\n\n 96 => 24,\n\n 192 => 48,\n\n _ => unreachable!(\n\n \"Unable to decide max #open files for mix-workload(max-cores {})\",\n\n max_cores\n\n ),\n\n };\n\n max_files\n\n}\n\n\n", "file_path": "usr/init/src/fxmark/mod.rs", "rank": 59, "score": 220562.34674820196 }, { "content": "pub fn calculate_throughput(ops: u64, time: Duration) -> usize {\n\n let nano_duration = time.as_nanos();\n\n let nano_per_operation = nano_duration / ops as u128;\n\n (Duration::from_secs(1).as_nanos() / nano_per_operation)\n\n .try_into()\n\n .unwrap()\n\n}\n\n\n\nunsafe impl Sync for MWRM {}\n", "file_path": "usr/init/src/fxmark/mwrm.rs", "rank": 60, "score": 220257.29145240725 }, { "content": "#[inline(always)]\n\npub fn backtrace_from(rbp: u64, rsp: u64, rip: u64) {\n\n let kernel_info = kcb::try_get_kcb().map(|k| {\n\n (\n\n k.kernel_binary(),\n\n k.arch.kernel_args().kernel_elf_offset.as_u64(),\n\n )\n\n });\n\n\n\n if kernel_info.is_some() {\n\n sprintln!(\"Backtrace:\");\n\n let (elf_data, relocated_offset) = kernel_info.expect(\"Don't have kernel info\");\n\n match elfloader::ElfBinary::new(\"kernel\", &elf_data) {\n\n Ok(elf_binary) => {\n\n let context = new_ctxt(&elf_binary);\n\n\n\n let mut count = 0;\n\n backtracer::trace_from(backtracer::EntryPoint::new(rbp, rsp, rip), |frame| {\n\n count += 1;\n\n backtrace_format(context.as_ref(), relocated_offset, count, frame)\n\n });\n\n }\n\n Err(e) => {\n\n sprintln!(\"Backtrace unavailable (can't parse kernel binary: '{}')\", e);\n\n }\n\n }\n\n } else {\n\n sprintln!(\"Backtrace unavailable (binary information missing)\");\n\n }\n\n}\n\n\n", "file_path": "kernel/src/panic.rs", "rank": 61, "score": 217577.61578186112 }, { "content": "/// The buffer is used by the file. Each buffer is BASE_PAGE_SIZE\n\n/// long and a file consists of many such buffers.\n\nstruct Buffer {\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl Buffer {\n\n /// This function tries to allocate a vector of BASE_PAGE_SIZE long\n\n /// and returns a buffer in case of the success; error otherwise.\n\n pub fn try_alloc_buffer() -> Result<Buffer, FileSystemError> {\n\n let mut data = Vec::new();\n\n match data.try_reserve(BASE_PAGE_SIZE) {\n\n Ok(_) => Ok(Buffer { data }),\n\n Err(_) => Err(FileSystemError::OutOfMemory),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\n/// File type has a list of buffers and modes to access the file\n\npub struct File {\n\n mcache: Vec<Buffer>,\n", "file_path": "kernel/src/fs/file.rs", "rank": 62, "score": 216944.1273917039 }, { "content": "pub fn thread_defaults(max_cores: usize) -> Vec<usize> {\n\n let mut threads = Vec::with_capacity(12);\n\n\n\n // On larger machines thread increments are bigger than on\n\n // smaller machines:\n\n let thread_incremements = if max_cores > 120 {\n\n 8\n\n } else if max_cores > 24 {\n\n 8\n\n } else if max_cores > 16 {\n\n 4\n\n } else {\n\n 2\n\n };\n\n\n\n for t in (0..(max_cores + 1)).step_by(thread_incremements) {\n\n if t == 0 {\n\n // Can't run on 0 threads\n\n threads.push(t + 1);\n\n } else {\n", "file_path": "kernel/tests/integration-test.rs", "rank": 63, "score": 215126.7697396867 }, { "content": "#[allow(unused)]\n\npub fn rumpkern_curlwp() -> u64 {\n\n unsafe { threads::rumpuser_curlwp() as *const _ as u64 }\n\n}\n\n\n", "file_path": "lib/vibrio/src/rumprt/mod.rs", "rank": 64, "score": 213460.70836084796 }, { "content": "pub trait ArchSpecificKcb {\n\n type Process: Process + Sync;\n\n\n\n fn hwthread_id(&self) -> u64;\n\n fn install(&mut self);\n\n}\n", "file_path": "kernel/src/kcb.rs", "rank": 65, "score": 212231.41597762104 }, { "content": "/// Returns a slice to the bootstrap code in the kernel ELF .text section\n\n///\n\n/// Ideally this region of memory shouldn't be modified (it's mapped read-only by\n\n/// default anyways). We first copy it into a low memory region and then do the\n\n/// final adjustments there.\n\nfn get_orignal_bootstrap_code() -> &'static [u8] {\n\n let (start_address, _end_address) = ap_code_address_range();\n\n let boot_code_size = get_boostrap_code_size();\n\n\n\n // This is safe since this is in the kernel binary and always only\n\n // mapped read-only.\n\n let ap_bootstrap_code: &'static [u8] =\n\n unsafe { core::slice::from_raw_parts(start_address.as_u64() as *const u8, boot_code_size) };\n\n\n\n ap_bootstrap_code\n\n}\n\n\n\n/// Returns a slice to the bootstrap code region from where we boot new cores.\n\n///\n\n/// # Safety\n\n/// Basically this is only safe in the beginning of system initialization\n\n/// and we need to make sure we have memory backing the REAL_MODE_BASE region\n\n/// first.\n\nunsafe fn get_boostrap_code_region() -> &'static mut [u8] {\n\n let real_mode_destination: &mut [u8] =\n", "file_path": "kernel/src/arch/x86_64/coreboot.rs", "rank": 66, "score": 209860.68585269485 }, { "content": "/// Signals that given test is ignored.\n\npub fn test_ignored(name: &str) {\n\n sprintln!(\"test {} ... ignored\", name);\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 67, "score": 207255.1830279535 }, { "content": "pub fn test_failed(_name: &str) {\n\n sprintln!(\"FAILED\");\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 68, "score": 207249.58384178852 }, { "content": "pub fn test_success(_name: &str) {\n\n sprintln!(\"OK\");\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 69, "score": 207249.58384178852 }, { "content": "pub fn test_before_run(name: &str) {\n\n sprintln!(\"test {} ... \", name);\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 70, "score": 207249.58384178858 }, { "content": "#[cfg(target_os = \"bespin\")]\n\n#[alloc_error_handler]\n\nfn oom(layout: core::alloc::Layout) -> ! {\n\n panic!(\"oom {:?}\", layout)\n\n}\n", "file_path": "lib/vibrio/src/lib.rs", "rank": 71, "score": 204123.35714801215 }, { "content": "/// Type to represent an IRQ vector.\n\ntype IrqVector = u64;\n", "file_path": "lib/lineup/src/lib.rs", "rank": 72, "score": 203296.4980059595 }, { "content": "/// Generates a random path entry.\n\nfn path_names() -> impl Strategy<Value = String> {\n\n prop_oneof![\n\n Just(String::from(\"/\")),\n\n Just(String::from(\"bespin\")),\n\n Just(String::from(\"hello\")),\n\n Just(String::from(\"world\")),\n\n Just(String::from(\"memory\")),\n\n Just(String::from(\"the\")),\n\n Just(String::from(\"fs\")),\n\n Just(String::from(\"rusty\")),\n\n Just(String::from(\"os\"))\n\n ]\n\n}\n\n\n", "file_path": "kernel/src/fs/test.rs", "rank": 73, "score": 200861.97319455846 }, { "content": "/// Invoked when unit tests terminate. Should panic if the unit\n\n/// Tests is considered a failure. By default, invokes `report()`\n\n/// and checks for a `0` result.\n\npub fn assert_test_result<T: Termination>(result: T) {\n\n let code = result.report();\n\n assert_eq!(\n\n code, 0,\n\n \"the test returned a termination value with a non-zero status code ({}) \\\n\n which indicates a failure\",\n\n code\n\n );\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 74, "score": 200778.17052778605 }, { "content": "/// Abstract definition of a file descriptor.\n\npub trait FileDescriptor {\n\n fn init_fd() -> Fd;\n\n fn update_fd(&mut self, mnode: Mnode, flags: FileFlags);\n\n fn get_mnode(&self) -> Mnode;\n\n fn get_flags(&self) -> FileFlags;\n\n fn get_offset(&self) -> usize;\n\n fn update_offset(&self, new_offset: usize);\n\n}\n\n\n\n/// A file descriptor representaion.\n\n#[derive(Debug, Default)]\n\npub struct Fd {\n\n mnode: Mnode,\n\n flags: FileFlags,\n\n offset: AtomicUsize,\n\n}\n\n\n\nimpl FileDescriptor for Fd {\n\n fn init_fd() -> Fd {\n\n Fd {\n", "file_path": "kernel/src/fs/mod.rs", "rank": 75, "score": 200574.94764714467 }, { "content": "/// Abstract definition of file-system interface operations.\n\npub trait FileSystem {\n\n fn create(&mut self, pathname: &str, modes: Modes) -> Result<u64, FileSystemError>;\n\n fn write(\n\n &mut self,\n\n mnode_num: Mnode,\n\n buffer: &[u8],\n\n offset: usize,\n\n ) -> Result<usize, FileSystemError>;\n\n fn read(\n\n &self,\n\n mnode_num: Mnode,\n\n buffer: &mut UserSlice,\n\n offset: usize,\n\n ) -> Result<usize, FileSystemError>;\n\n fn lookup(&self, pathname: &str) -> Option<Arc<Mnode>>;\n\n fn file_info(&self, mnode: Mnode) -> FileInfo;\n\n fn delete(&mut self, pathname: &str) -> Result<bool, FileSystemError>;\n\n fn truncate(&mut self, pathname: &str) -> Result<bool, FileSystemError>;\n\n fn rename(&mut self, oldname: &str, newname: &str) -> Result<bool, FileSystemError>;\n\n fn mkdir(&mut self, pathname: &str, modes: Modes) -> Result<bool, FileSystemError>;\n\n}\n\n\n", "file_path": "kernel/src/fs/mod.rs", "rank": 76, "score": 200574.84489354654 }, { "content": "/// The backend implementation necessary to implement if we want a client to be\n\n/// able to grow our allocator by providing a list of frames.\n\npub trait GrowBackend {\n\n /// How much capacity we have to add base pages.\n\n fn base_page_capcacity(&self) -> usize;\n\n\n\n /// Add a slice of base-pages to `self`.\n\n fn grow_base_pages(&mut self, free_list: &[Frame]) -> Result<(), AllocationError>;\n\n\n\n /// How much capacity we have to add large pages.\n\n fn large_page_capcacity(&self) -> usize;\n\n\n\n /// Add a slice of large-pages to `self`.\n\n fn grow_large_pages(&mut self, free_list: &[Frame]) -> Result<(), AllocationError>;\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 77, "score": 199987.56382916332 }, { "content": "/// The backend implementation necessary to implement if we want\n\n/// a system manager to take away be able to take away memory\n\n/// from our allocator.\n\npub trait ReapBackend {\n\n /// Ask to give base-pages back.\n\n ///\n\n /// An implementation should put the pages in the `free_list` and remove\n\n /// them from the local allocator.\n\n fn reap_base_pages(&mut self, free_list: &mut [Option<Frame>]);\n\n\n\n /// Ask to give large-pages back.\n\n ///\n\n /// An implementation should put the pages in the `free_list` and remove\n\n /// them from the local allocator.\n\n fn reap_large_pages(&mut self, free_list: &mut [Option<Frame>]);\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 78, "score": 199977.02658832743 }, { "content": "/// Edge is connection of two nodes and slot within the page-table.\n\ntype Ed<'a> = ((Nd<'a>, usize), (Nd<'a>, usize));\n\n\n\nimpl<'a> dot::Labeller<'a> for PageTable {\n\n type Node = Nd<'a>;\n\n type Edge = Ed<'a>;\n\n\n\n fn graph_id(&'a self) -> dot::Id<'a> {\n\n dot::Id::new(\"vspace\").unwrap()\n\n }\n\n\n\n fn node_shape(&'a self, n: &Self::Node) -> Option<dot::LabelText<'a>> {\n\n match n {\n\n Nd::PT(_pt, _) => Some(dot::LabelText::label(\"record\")),\n\n Nd::PD(_pd, _) => Some(dot::LabelText::label(\"record\")),\n\n Nd::PDPT(_pdpt, _) => Some(dot::LabelText::label(\"record\")),\n\n Nd::PML4(_pml4, _) => Some(dot::LabelText::label(\"record\")),\n\n Nd::HugePage(_addr) => None,\n\n }\n\n }\n\n\n", "file_path": "kernel/src/arch/x86_64/vspace/debug.rs", "rank": 79, "score": 197581.9063259803 }, { "content": "#[cfg(target_os = \"none\")]\n\n#[no_mangle]\n\n#[allow(non_snake_case)]\n\npub fn _Unwind_Resume() {\n\n loop {}\n\n}\n", "file_path": "kernel/src/panic.rs", "rank": 80, "score": 197015.8612419633 }, { "content": "/// Creates a path of depth a given depth (4), represented as a\n\n/// vector of Strings.\n\nfn path() -> impl Strategy<Value = Vec<String>> {\n\n proptest::collection::vec(path_names(), 4)\n\n}\n\n\n\nproptest! {\n\n // Verify that our FS implementation behaves according to the `ModelFileSystem`.\n\n #[test]\n\n fn model_equivalence(ops in actions()) {\n\n let mut model: ModelFS = Default::default();\n\n let mut totest: fs::MemFS = Default::default();\n\n\n\n use TestAction::*;\n\n for action in ops {\n\n match action {\n\n Read(mnode, offset, len) => {\n\n\n\n let mut buffer1: Vec<u8> = Vec::with_capacity(len);\n\n let mut buffer2: Vec<u8> = Vec::with_capacity(len);\n\n\n\n let rmodel = model.read(mnode, &mut UserSlice::from_slice(buffer1.as_mut_slice()), offset);\n", "file_path": "kernel/src/fs/test.rs", "rank": 81, "score": 196048.38367997948 }, { "content": "pub fn init() {\n\n unsafe {\n\n io::outb(PORT1 + 1, 0x00); // Disable all interrupts\n\n io::outb(PORT1 + 3, 0x80); // Enable DLAB (set baud rate divisor)\n\n io::outb(PORT1 + 0, 0x01); // Set divisor to 1 (lo byte) 115200 baud\n\n io::outb(PORT1 + 1, 0x00); // (hi byte)\n\n io::outb(PORT1 + 3, 0x03); // 8 bits, no parity, one stop bit\n\n io::outb(PORT1 + 2, 0xC7); // Enable FIFO, clear them, with 14-byte threshold\n\n io::outb(PORT1 + 1, 0x01); // Enable receive data IRQ\n\n\n\n io::outb(PORT2 + 1, 0x00); // Disable all interrupts\n\n io::outb(PORT2 + 3, 0x80); // Enable DLAB (set baud rate divisor)\n\n io::outb(PORT2 + 0, 0x01); // Set divisor to 1 (lo byte) 115200 baud\n\n io::outb(PORT2 + 1, 0x00); // (hi byte)\n\n io::outb(PORT2 + 3, 0x03); // 8 bits, no parity, one stop bit\n\n io::outb(PORT2 + 2, 0xC7); // Enable FIFO, clear them, with 14-byte threshold\n\n io::outb(PORT2 + 1, 0x01); // Enable receive data IRQ\n\n }\n\n debug!(\"serial initialized\");\n\n}\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 82, "score": 193328.36963635386 }, { "content": "#[inline(never)]\n\npub fn cause_pfault() {\n\n use super::memory::{paddr_to_kernel_vaddr, PAddr};\n\n\n\n unsafe {\n\n let paddr = PAddr::from(0xdeadbeefu64);\n\n let kernel_vaddr = paddr_to_kernel_vaddr(paddr);\n\n let ptr: *mut u64 = kernel_vaddr.as_mut_ptr();\n\n debug!(\"before causing the pfault\");\n\n let val = *ptr;\n\n assert!(val != 0);\n\n }\n\n}\n\n\n\n#[cfg(any(\n\n feature = \"test-gpfault-early\",\n\n all(feature = \"integration-test\", feature = \"test-gpfault\")\n\n))]\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 83, "score": 189911.38923357252 }, { "content": "pub fn cause_gpfault() {\n\n // Note that int!(13) doesn't work in qemu. It doesn't push an error code properly for it.\n\n // So we cause a GP by loading garbage in the ss segment register.\n\n use x86::segmentation::{load_ss, SegmentSelector};\n\n unsafe {\n\n load_ss(SegmentSelector::new(99, x86::Ring::Ring3));\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 84, "score": 189905.6627546649 }, { "content": "/// Find out how many pages we require to load the memory map\n\n/// into it.\n\n///\n\n/// Plan for some 32 more descriptors than originally estimated,\n\n/// due to UEFI API crazyness. Also round to page-size.\n\nfn estimate_memory_map_size(st: &SystemTable<Boot>) -> (usize, usize) {\n\n let mm_size_estimate = st.boot_services().memory_map_size();\n\n // Plan for some 32 more descriptors than originally estimated,\n\n // due to UEFI API crazyness, round to page-size\n\n let sz = round_up!(\n\n mm_size_estimate + 32 * mem::size_of::<MemoryDescriptor>(),\n\n BASE_PAGE_SIZE\n\n );\n\n assert_eq!(sz % BASE_PAGE_SIZE, 0, \"Not multiple of page-size.\");\n\n\n\n (sz, sz / mem::size_of::<MemoryDescriptor>())\n\n}\n\n\n", "file_path": "bootloader/src/main.rs", "rank": 85, "score": 187426.50810942205 }, { "content": "#[cfg(feature = \"test-double-fault\")]\n\npub fn cause_double_fault() {\n\n unsafe {\n\n x86::int!(0x8);\n\n }\n\n}\n\n\n\n/// Verify that we're actually using the fault-stack\n\n/// as part of the test\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 86, "score": 186653.69091062725 }, { "content": "#[cfg(feature = \"test-double-fault\")]\n\npub fn assert_being_on_fault_stack() {\n\n let (low, high) = super::kcb::get_kcb().arch.fault_stack_range();\n\n let rsp = x86::current::registers::rsp();\n\n debug_assert!(\n\n rsp >= low && rsp <= high,\n\n \"We're not using the `unrecoverable_fault_stack`.\"\n\n );\n\n}\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 87, "score": 186653.69091062725 }, { "content": "#[allow(unused)]\n\nfn debug_print_syscall(function: u64, arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) {\n\n sprint!(\"syscall: {:?}\", SystemCall::new(function));\n\n\n\n match SystemCall::new(function) {\n\n SystemCall::System => {\n\n sprintln!(\n\n \" {:?} {} {} {} {}\",\n\n SystemOperation::from(arg1),\n\n arg2,\n\n arg3,\n\n arg4,\n\n arg5\n\n );\n\n }\n\n SystemCall::Process => {\n\n sprintln!(\n\n \" {:?} {} {} {} {}\",\n\n ProcessOperation::from(arg1),\n\n arg2,\n\n arg3,\n", "file_path": "kernel/src/arch/x86_64/syscall.rs", "rank": 88, "score": 186348.70127423306 }, { "content": "/// Renders directed graph `g` in DOT syntax.\n\n/// (Simple wrapper around `render_opts` that passes a default set of options.)\n\npub fn render<'a, N, E, G>(g: &'a G)\n\nwhere\n\n N: Clone + 'a,\n\n E: Clone + 'a,\n\n G: Labeller<'a, Node = N, Edge = E> + GraphWalk<'a, Node = N, Edge = E>,\n\n{\n\n render_opts(g, &[])\n\n}\n\n\n", "file_path": "kernel/src/graphviz.rs", "rank": 89, "score": 186151.31560725463 }, { "content": "/// Runs the TLB shootdown protocol.\n\n///\n\n/// Takes the `TlbFlushHandle` and figures out what cores it needs to send an IPI to.\n\n/// It divides IPIs into clusters to avoid overhead of sending IPIs individually.\n\n/// Finally, waits until all cores have acknowledged the IPI before it returns.\n\npub fn shootdown(handle: TlbFlushHandle) {\n\n let my_gtid = {\n\n let kcb = super::kcb::get_kcb();\n\n kcb.arch.id()\n\n };\n\n\n\n // We support up to 16 IPI clusters, this will address `16*16 = 256` cores\n\n // Cluster ID (LDR[31:16]) is the address of the destination cluster\n\n // We pre-configure the upper half (cluster ID) of LDR here in the SmallVec\n\n // by initializing the elements\n\n let mut cluster_destination: SmallVec<[u32; 16]> = smallvec![\n\n 0 << 16,\n\n 1 << 16,\n\n 2 << 16,\n\n 3 << 16,\n\n 4 << 16,\n\n 5 << 16,\n\n 6 << 16,\n\n 7 << 16,\n\n 8 << 16,\n", "file_path": "kernel/src/arch/x86_64/tlb.rs", "rank": 90, "score": 184545.5907770862 }, { "content": "#[cfg(target_os = \"none\")]\n\n#[cfg_attr(target_os = \"none\", panic_handler)]\n\n#[no_mangle]\n\npub fn panic_impl(info: &PanicInfo) -> ! {\n\n sprint!(\n\n \"System panic encountered (On H/W thread {})\",\n\n topology::MACHINE_TOPOLOGY.current_thread().id\n\n );\n\n\n\n if let Some(message) = info.message() {\n\n sprint!(\": '{}'\", message);\n\n }\n\n if let Some(location) = info.location() {\n\n sprintln!(\" in {}:{}\", location.file(), location.line());\n\n } else {\n\n sprintln!(\"\");\n\n }\n\n\n\n // We need memory allocation for a backtrace, can't do that without a KCB\n\n kcb::try_get_kcb().map(|k| {\n\n // If we're already panicking, it usually doesn't help to panic more\n\n if !k.in_panic_mode {\n\n // Make sure we use the e{early, emergency} memory allocator for backtracing\n", "file_path": "kernel/src/panic.rs", "rank": 91, "score": 178106.0242773286 }, { "content": "/// Start the test harness.\n\npub fn test_start(ntests: usize) {\n\n sprintln!(\"running {} tests (using KVM support)\", ntests)\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 92, "score": 178071.62143318664 }, { "content": "fn wait_for_sigterm(args: &RunnerArgs, r: Result<WaitStatus>, output: String) {\n\n match r {\n\n Ok(WaitStatus::Signaled(_, SIGTERM, _)) => { /* This is what we expect */ }\n\n Ok(WaitStatus::Exited(_, code)) => {\n\n let exit_status: ExitStatus = code.into();\n\n log_qemu_out(args, output);\n\n panic!(\"Unexpected exit code from QEMU: {}\", exit_status);\n\n }\n\n Err(e) => {\n\n log_qemu_out(args, output);\n\n panic!(\"Qemu testing failed: {}\", e);\n\n }\n\n e => {\n\n log_qemu_out(args, output);\n\n panic!(\n\n \"Something weird happened to the Qemu process, please investigate: {:?}\",\n\n e\n\n );\n\n }\n\n };\n\n}\n\n\n", "file_path": "kernel/tests/integration-test.rs", "rank": 93, "score": 176810.44242134734 }, { "content": "pub fn rumpkern_unsched(nlocks: &mut i32, mtx: Option<&Mutex>) {\n\n let upcalls = HYPERUPCALLS.load(Ordering::Relaxed) as *const RumpHyperUpcalls;\n\n\n\n let mtx = mtx.map_or(ptr::null(), |mtx| mtx as *const Mutex);\n\n unsafe {\n\n trace!(\n\n \"rumpkern_unsched {} {:p} lwp={:p} upcalls = {:p}\",\n\n nlocks,\n\n mtx,\n\n threads::rumpuser_curlwp(),\n\n upcalls\n\n );\n\n (*upcalls).hyp_backend_unschedule.unwrap()(0, nlocks as *mut c_int, mtx as *const u64);\n\n }\n\n}\n\n\n", "file_path": "lib/vibrio/src/rumprt/mod.rs", "rank": 95, "score": 175172.92148983912 }, { "content": "/// Shutdown the processor.\n\n///\n\n/// Currently we only support the debug exit method from qemu, which conveniently\n\n/// allows us to supply an exit code for testing purposes.\n\npub fn shutdown(val: ExitReason) -> ! {\n\n unsafe {\n\n // For QEMU with debug-exit,iobase=0xf4,iosize=0x04\n\n // qemu will call: exit((val << 1) | 1);\n\n io::outb(0xf4, val as u8);\n\n }\n\n\n\n // For CI run.py bare-metal execution, parses exit code\n\n // (Do not change this line without adjusting run.py)\n\n sprintln!(\"[shutdown-request] {}\", val as u8);\n\n\n\n // TODO(bare-metal): Do some ACPI magic to shutdown things\n\n\n\n // In case this doesn't work we hang.\n\n loop {\n\n unsafe { x86::halt() };\n\n }\n\n}\n\n\n\n#[cfg(any(\n\n feature = \"test-pfault-early\",\n\n all(feature = \"integration-test\", feature = \"test-pfault\")\n\n))]\n", "file_path": "kernel/src/arch/x86_64/debug.rs", "rank": 96, "score": 174961.31330869216 }, { "content": "/// Shutdown the process.\n\npub fn shutdown(val: ExitReason) -> ! {\n\n sprintln!(\"Shutdown {:?}\", val);\n\n\n\n unsafe {\n\n libc::exit(val as i32);\n\n }\n\n}\n", "file_path": "kernel/src/arch/unix/debug.rs", "rank": 97, "score": 174956.9871156697 }, { "content": "fn check_for_successful_exit(args: &RunnerArgs, r: Result<WaitStatus>, output: String) {\n\n check_for_exit(ExitStatus::Success, args, r, output);\n\n}\n\n\n", "file_path": "kernel/tests/integration-test.rs", "rank": 98, "score": 174351.3116232988 }, { "content": "pub fn test_summary(passed: usize, failed: usize, ignored: usize) {\n\n sprintln!(\n\n \"\\ntest result: {} {} passed; {} failed; {} ignored\",\n\n if failed == 0 { \"OK\" } else { \"FAILED\" },\n\n passed,\n\n failed,\n\n ignored\n\n );\n\n\n\n if failed != 0 {\n\n //std::process::exit(101);\n\n }\n\n}\n\n\n", "file_path": "lib/ctest/src/lib.rs", "rank": 99, "score": 174098.26703084708 } ]
Rust
src/cfg/global/project.rs
vincent-herlemont/short
805aa75a4605eb9b587c82135ccc8e8883df1192
use crate::cfg::global::setup::{GlobalProjectSetupCfg, SetupName}; use crate::cfg::SetupsCfg; use anyhow::{Context, Result}; use serde::de::{MapAccess, Visitor}; use serde::export::Formatter; use serde::ser::SerializeMap; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::cell::RefCell; use std::fmt; use std::path::{Path, PathBuf}; use std::rc::Rc; type EnvName = String; #[derive(Debug, Serialize, Deserialize)] struct CurrentSetup { #[serde(rename = "setup", skip_serializing_if = "Option::is_none")] pub setup_name: Option<String>, #[serde(rename = "env", skip_serializing_if = "Option::is_none")] pub env_name: Option<EnvName>, } impl CurrentSetup { pub fn new() -> Self { Self { setup_name: None, env_name: None, } } } impl Default for CurrentSetup { fn default() -> Self { Self::new() } } #[derive(Debug, Serialize, Deserialize)] pub struct GlobalProjectCfg { file: PathBuf, #[serde(skip_serializing_if = "Option::is_none")] current: Option<CurrentSetup>, setups: GlobalProjectSetupsCfg, } #[derive(Debug)] pub struct GlobalProjectSetupsCfg(Rc<RefCell<Vec<Rc<RefCell<GlobalProjectSetupCfg>>>>>); impl GlobalProjectSetupsCfg { pub fn new() -> Self { Self(Rc::new(RefCell::new(vec![]))) } pub fn add(&mut self, global_setup_cfg: GlobalProjectSetupCfg) { let mut global_setups_cfg = self.0.borrow_mut(); if global_setups_cfg .iter() .find(|lsc| { let lsc = lsc.borrow(); lsc.name() == global_setup_cfg.name() }) .is_none() { global_setups_cfg.push(Rc::new(RefCell::new(global_setup_cfg))) } } } impl Serialize for GlobalProjectSetupsCfg { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { let vec = &self.0.borrow(); let mut seq = serializer.serialize_map(Some(vec.len()))?; for global_setup_cfg in vec.iter() { let global_setup_cfg = global_setup_cfg.borrow(); let name = global_setup_cfg.name(); seq.serialize_entry(name, &*global_setup_cfg)?; } seq.end() } } impl<'de> Deserialize<'de> for GlobalProjectSetupsCfg { fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error> where D: Deserializer<'de>, { struct InnerVisitor; impl<'de> Visitor<'de> for InnerVisitor { type Value = GlobalProjectSetupsCfg; fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { formatter.write_str("incorrect list of global setup cfg") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: MapAccess<'de>, { let mut global_setups_cfg = GlobalProjectSetupsCfg::new(); while let Some((setup_name, mut global_setup_cfg)) = map.next_entry::<SetupName, GlobalProjectSetupCfg>()? { global_setup_cfg.set_name(setup_name); global_setups_cfg.add(global_setup_cfg); } Ok(global_setups_cfg) } } deserializer.deserialize_map(InnerVisitor) } } impl GlobalProjectCfg { pub fn new(file: &PathBuf) -> Result<Self> { let mut gp = GlobalProjectCfg { file: PathBuf::new(), current: None, setups: GlobalProjectSetupsCfg::new(), }; gp.set_file(file)?; Ok(gp) } pub fn set_file(&mut self, file: &PathBuf) -> Result<()> { if !file.is_absolute() { return Err(anyhow!(format!( "project file path can not be relative {}", file.to_string_lossy() ))); } if let None = file.file_name() { return Err(anyhow!(format!("project file has no name"))); } self.file = file.clone(); Ok(()) } pub fn file(&self) -> &PathBuf { &self.file } pub fn dir(&self) -> Result<&Path> { self.file.parent().context(format!( "fail to found parent directory of project `{}`", self.file.to_string_lossy() )) } pub fn set_current_setup_name(&mut self, setup_name: SetupName) { self.current.get_or_insert(CurrentSetup::new()).setup_name = Some(setup_name); } pub fn current_setup_name(&self) -> Option<&SetupName> { self.current .as_ref() .map_or(None, |current| current.setup_name.as_ref()) } pub fn set_current_env_name(&mut self, env_name: EnvName) { self.current.get_or_insert(CurrentSetup::new()).env_name = Some(env_name); } pub fn unset_current_env_name(&mut self) { self.current.get_or_insert(CurrentSetup::new()).env_name = None; } pub fn current_env_name(&self) -> Option<&EnvName> { self.current .as_ref() .map_or(None, |current| current.env_name.as_ref()) } pub fn unset_current_setup(&mut self) { self.current = None; } } impl SetupsCfg for GlobalProjectCfg { type Setup = GlobalProjectSetupCfg; fn get_setups(&self) -> Rc<RefCell<Vec<Rc<RefCell<Self::Setup>>>>> { Rc::clone(&self.setups.0) } } impl PartialEq<PathBuf> for GlobalProjectCfg { fn eq(&self, path_buf: &PathBuf) -> bool { self.file().eq(path_buf) } } impl PartialEq<GlobalProjectCfg> for PathBuf { fn eq(&self, path_buf: &GlobalProjectCfg) -> bool { self.eq(&path_buf.file) } } #[cfg(test)] mod test { use std::path::PathBuf; use crate::cfg::global::project::GlobalProjectCfg; use crate::cfg::global::setup::GlobalProjectSetupCfg; use crate::cfg::SetupsCfg; #[test] fn deserialization_serialization_cfg() { let content = r"--- file: path/to/file current: setup: setup_1 setups: test_1: {}"; let cfg = serde_yaml::from_str::<GlobalProjectCfg>(content).unwrap(); let r = serde_yaml::to_string(&cfg).unwrap(); assert_eq!(content, r); } #[test] fn global_update_private_env_dir() { let setup_cfg = GlobalProjectSetupCfg::new("setup".into()); let mut project_cfg = GlobalProjectCfg::new(&"/project".into()).unwrap(); project_cfg.add_setup(setup_cfg); assert!(project_cfg.get_setups().borrow().iter().count().eq(&1)); { let setup_cfg = project_cfg.get_setup(&"setup".into()).unwrap(); setup_cfg .borrow_mut() .set_private_env_dir("/private_env".into()) .unwrap(); } let global_project_setup_cfg_1 = project_cfg.get_setup(&"setup".into()).unwrap(); assert_eq!( global_project_setup_cfg_1 .borrow() .private_env_dir() .unwrap(), &PathBuf::from("/private_env") ); project_cfg.remove_by_name_setup(&"setup".into()); assert!(project_cfg.get_setup(&"setup".into()).is_none()); } }
use crate::cfg::global::setup::{GlobalProjectSetupCfg, SetupName}; use crate::cfg::SetupsCfg; use anyhow::{Context, Result}; use serde::de::{MapAccess, Visitor}; use serde::export::Formatter; use serde::ser::SerializeMap; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::cell::RefCell; use std::fmt; use std::path::{Path, PathBuf}; use std::rc::Rc; type EnvName = String; #[derive(Debug, Serialize, Deserialize)] struct CurrentSetup { #[serde(rename = "setup", skip_serializing_if = "Option::is_none")] pub setup_name: Option<String>, #[serde(rename = "env", skip_serializing_if = "Option::is_none")] pub env_name: Option<EnvName>, } impl CurrentSetup { pub fn new() -> Self { Self { setup_name: None, env_name: None, } } } impl Default for CurrentSetup { fn default() -> Self { Self::new() } } #[derive(Debug, Serialize, Deserialize)] pub struct GlobalProjectCfg { file: PathBuf, #[serde(skip_serializing_if = "Option::is_none")] current: Option<CurrentSetup>, setups: GlobalProjectSetupsCfg, } #[derive(Debug)] pub struct GlobalProjectSetupsCfg(Rc<RefCell<Vec<Rc<RefCell<GlobalProjectSetupCfg>>>>>); impl GlobalProjectSetupsCfg { pub fn new() -> Self { Self(Rc::new(RefCell::new(vec![]))) } pub fn add(&mut self, global_setup_cfg: GlobalProjectSetupCfg) { let mut global_setups_cfg = self.0.borrow_mut(); if global_setups_cfg .iter() .find(|lsc| { let lsc = lsc.borrow(); lsc.name() == globa
} impl Serialize for GlobalProjectSetupsCfg { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { let vec = &self.0.borrow(); let mut seq = serializer.serialize_map(Some(vec.len()))?; for global_setup_cfg in vec.iter() { let global_setup_cfg = global_setup_cfg.borrow(); let name = global_setup_cfg.name(); seq.serialize_entry(name, &*global_setup_cfg)?; } seq.end() } } impl<'de> Deserialize<'de> for GlobalProjectSetupsCfg { fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error> where D: Deserializer<'de>, { struct InnerVisitor; impl<'de> Visitor<'de> for InnerVisitor { type Value = GlobalProjectSetupsCfg; fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { formatter.write_str("incorrect list of global setup cfg") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: MapAccess<'de>, { let mut global_setups_cfg = GlobalProjectSetupsCfg::new(); while let Some((setup_name, mut global_setup_cfg)) = map.next_entry::<SetupName, GlobalProjectSetupCfg>()? { global_setup_cfg.set_name(setup_name); global_setups_cfg.add(global_setup_cfg); } Ok(global_setups_cfg) } } deserializer.deserialize_map(InnerVisitor) } } impl GlobalProjectCfg { pub fn new(file: &PathBuf) -> Result<Self> { let mut gp = GlobalProjectCfg { file: PathBuf::new(), current: None, setups: GlobalProjectSetupsCfg::new(), }; gp.set_file(file)?; Ok(gp) } pub fn set_file(&mut self, file: &PathBuf) -> Result<()> { if !file.is_absolute() { return Err(anyhow!(format!( "project file path can not be relative {}", file.to_string_lossy() ))); } if let None = file.file_name() { return Err(anyhow!(format!("project file has no name"))); } self.file = file.clone(); Ok(()) } pub fn file(&self) -> &PathBuf { &self.file } pub fn dir(&self) -> Result<&Path> { self.file.parent().context(format!( "fail to found parent directory of project `{}`", self.file.to_string_lossy() )) } pub fn set_current_setup_name(&mut self, setup_name: SetupName) { self.current.get_or_insert(CurrentSetup::new()).setup_name = Some(setup_name); } pub fn current_setup_name(&self) -> Option<&SetupName> { self.current .as_ref() .map_or(None, |current| current.setup_name.as_ref()) } pub fn set_current_env_name(&mut self, env_name: EnvName) { self.current.get_or_insert(CurrentSetup::new()).env_name = Some(env_name); } pub fn unset_current_env_name(&mut self) { self.current.get_or_insert(CurrentSetup::new()).env_name = None; } pub fn current_env_name(&self) -> Option<&EnvName> { self.current .as_ref() .map_or(None, |current| current.env_name.as_ref()) } pub fn unset_current_setup(&mut self) { self.current = None; } } impl SetupsCfg for GlobalProjectCfg { type Setup = GlobalProjectSetupCfg; fn get_setups(&self) -> Rc<RefCell<Vec<Rc<RefCell<Self::Setup>>>>> { Rc::clone(&self.setups.0) } } impl PartialEq<PathBuf> for GlobalProjectCfg { fn eq(&self, path_buf: &PathBuf) -> bool { self.file().eq(path_buf) } } impl PartialEq<GlobalProjectCfg> for PathBuf { fn eq(&self, path_buf: &GlobalProjectCfg) -> bool { self.eq(&path_buf.file) } } #[cfg(test)] mod test { use std::path::PathBuf; use crate::cfg::global::project::GlobalProjectCfg; use crate::cfg::global::setup::GlobalProjectSetupCfg; use crate::cfg::SetupsCfg; #[test] fn deserialization_serialization_cfg() { let content = r"--- file: path/to/file current: setup: setup_1 setups: test_1: {}"; let cfg = serde_yaml::from_str::<GlobalProjectCfg>(content).unwrap(); let r = serde_yaml::to_string(&cfg).unwrap(); assert_eq!(content, r); } #[test] fn global_update_private_env_dir() { let setup_cfg = GlobalProjectSetupCfg::new("setup".into()); let mut project_cfg = GlobalProjectCfg::new(&"/project".into()).unwrap(); project_cfg.add_setup(setup_cfg); assert!(project_cfg.get_setups().borrow().iter().count().eq(&1)); { let setup_cfg = project_cfg.get_setup(&"setup".into()).unwrap(); setup_cfg .borrow_mut() .set_private_env_dir("/private_env".into()) .unwrap(); } let global_project_setup_cfg_1 = project_cfg.get_setup(&"setup".into()).unwrap(); assert_eq!( global_project_setup_cfg_1 .borrow() .private_env_dir() .unwrap(), &PathBuf::from("/private_env") ); project_cfg.remove_by_name_setup(&"setup".into()); assert!(project_cfg.get_setup(&"setup".into()).is_none()); } }
l_setup_cfg.name() }) .is_none() { global_setups_cfg.push(Rc::new(RefCell::new(global_setup_cfg))) } }
function_block-function_prefixed
[ { "content": "pub fn run_as_stream(file: &PathBuf, vars: &Vec<EnvVar>, args: &Vec<String>) -> Result<Output> {\n\n let file = file.canonicalize()?;\n\n let mut command = Command::new(&file);\n\n\n\n for env_var in vars.iter() {\n\n command.env(env_var.var().to_env_var(), env_var.env_value().to_string());\n\n }\n\n\n\n if let Some(parent) = file.parent() {\n\n command.current_dir(parent);\n\n }\n\n\n\n let mut child = command\n\n .stdout(Stdio::piped())\n\n .stdin(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .args(args)\n\n .spawn()\n\n .context(format!(\"command {} fail\", &file.to_string_lossy()))?;\n\n\n", "file_path": "src/run_file/mod.rs", "rank": 0, "score": 286283.6596318084 }, { "content": "pub fn selected_envs(app: &ArgMatches, setup: &Setup, settings: &Settings) -> Result<Vec<Env>> {\n\n let mut selected_env_names = vec![];\n\n\n\n if let Ok(env_name) = settings.env() {\n\n selected_env_names.push(env_name.to_owned());\n\n }\n\n if let Some(mut envs) = app.values_of_lossy(\"environments\") {\n\n selected_env_names.append(&mut envs);\n\n }\n\n\n\n let envs: Vec<_> = setup.envs().into_iter().filter_map(|r| r.ok()).collect();\n\n let recent_env = Env::recent(&envs)?;\n\n let sync_settings = SyncSettings::new(app);\n\n let mut envs = sync_workflow(recent_env, envs, sync_settings)?;\n\n envs.sort();\n\n let envs: Vec<_> = envs\n\n .into_iter()\n\n .filter(|env| {\n\n if let Ok(name) = env.name() {\n\n selected_env_names\n", "file_path": "src/cli/selected_envs.rs", "rank": 1, "score": 272486.4251672083 }, { "content": "pub fn read_dir(dir: &PathBuf) -> Vec<Result<Env>> {\n\n let mut envs = vec![];\n\n if let Ok(entries) = fs::read_dir(dir) {\n\n for entry in entries {\n\n if let Ok(entry) = entry {\n\n if !entry.path().is_file() {\n\n continue;\n\n }\n\n\n\n // Ignore files that not start by \".\"\n\n if let Ok(file_name) = entry.file_name().into_string() {\n\n if !file_name.starts_with(\".\") {\n\n continue;\n\n }\n\n } else {\n\n continue;\n\n }\n\n\n\n let path = entry.path();\n\n let env = Env::from_file_reader(&path)\n\n .context(format!(\"fail to read read `{:?}` \", &path));\n\n envs.append(&mut vec![env]);\n\n }\n\n }\n\n }\n\n envs\n\n}\n", "file_path": "src/env_file/read_dir.rs", "rank": 2, "score": 272331.0222010446 }, { "content": "pub fn find_in_parents(dir: PathBuf, file_name: String) -> Result<PathBuf> {\n\n let file_path = dir.join(&file_name);\n\n if file_path.exists() {\n\n Ok(file_path)\n\n } else {\n\n let parent_path = dir\n\n .parent()\n\n .context(\"root directory reached\")?\n\n .to_path_buf();\n\n find_in_parents(parent_path, file_name)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::fs::read_to_string;\n\n use std::path::Path;\n\n\n\n use cli_integration_test::IntegrationTestEnvironment;\n\n use predicates::prelude::Predicate;\n", "file_path": "src/utils/find.rs", "rank": 3, "score": 262895.20833562955 }, { "content": "pub fn set_exec_permision(file: &PathBuf) -> Result<()> {\n\n let file = file.canonicalize()?;\n\n let permissions = Permissions::from_mode(0o755);\n\n set_permissions(file, permissions)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use cli_integration_test::IntegrationTestEnvironment;\n\n\n\n use crate::cfg::{ArrayVar, ArrayVars, Vars};\n\n use crate::run_file::file::{File, Kind};\n\n use crate::run_file::kind::ShScript;\n\n\n\n #[test]\n\n fn file_append() {\n\n let mut file = File {\n\n path: \"\".into(),\n\n content: \"\".into(),\n", "file_path": "src/run_file/file.rs", "rank": 4, "score": 262254.55199413875 }, { "content": "pub fn generate_array_env_var(env: &Env, array_var: &ArrayVar) -> Result<EnvVar> {\n\n let re = Regex::new(array_var.pattern().as_str())?;\n\n let mut array_var_value: Vec<env_file::Var> = vec![];\n\n for var in env.iter() {\n\n if re.is_match(var.name()) {\n\n array_var_value.push(var.clone());\n\n }\n\n }\n\n Ok((\n\n array_var.var().clone(),\n\n EnvValue::ArrayVar((array_var.clone(), array_var_value)),\n\n )\n\n .into())\n\n}\n\n\n", "file_path": "src/run_file/var.rs", "rank": 5, "score": 249278.00967459168 }, { "content": "pub fn get_global_cfg(file: &PathBuf) -> Result<FileCfg<GlobalCfg>> {\n\n FileCfg::load(file)\n\n}\n\n\n\nimpl From<LocalCfg> for FileCfg<LocalCfg> {\n\n fn from(cfg: LocalCfg) -> Self {\n\n Self { cfg, file: None }\n\n }\n\n}\n\n\n\nimpl From<GlobalCfg> for FileCfg<GlobalCfg> {\n\n fn from(cfg: GlobalCfg) -> Self {\n\n Self { cfg, file: None }\n\n }\n\n}\n\n\n\nimpl<C> Display for FileCfg<C>\n\nwhere\n\n C: Serialize + DeserializeOwned,\n\n{\n", "file_path": "src/cfg/file/mod.rs", "rank": 6, "score": 242002.03694262248 }, { "content": "pub fn get_local_cfg(file: &PathBuf) -> Result<FileCfg<LocalCfg>> {\n\n let dir = file.parent().context(format!(\n\n \"fail to reach directory of local cfg file {}\",\n\n file.to_string_lossy()\n\n ))?;\n\n let file_name = file\n\n .file_name()\n\n .context(format!(\n\n \"fail te get file name of local cfg file {}\",\n\n file.to_string_lossy()\n\n ))?\n\n .to_str()\n\n .context(format!(\n\n \"cfg file name mut be contain only utf-8 char : {}\",\n\n file.to_string_lossy()\n\n ))?\n\n .to_string();\n\n let path = find_in_parents(dir.to_path_buf(), file_name).context(\"fail to found local cfg\")?;\n\n FileCfg::load(&path)\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 7, "score": 242002.03694262248 }, { "content": "pub fn new_local_cfg(dir: &PathBuf) -> Result<FileCfg<LocalCfg>> {\n\n let local_cfg_file = local_cfg_file(dir);\n\n\n\n if let Ok(_) = get_local_cfg(&local_cfg_file) {\n\n return Err(anyhow!(\"local cfg {:?} already exist\", local_cfg_file));\n\n }\n\n\n\n FileCfg::new(&local_cfg_file, LocalCfg::new())\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 8, "score": 241459.94927522872 }, { "content": "pub fn env_new(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let mut settings = get_settings(app, &cfg);\n\n let sync_settings = SyncSettings::new(&app);\n\n\n\n let setup_name = settings.setup()?;\n\n let env_name: String = app.value_of(\"name\").unwrap().into();\n\n let private = app.is_present(\"private\");\n\n\n\n let setup = cfg.current_setup(setup_name)?;\n\n let mut envs = setup.envs().into_iter().filter_map(|r| r.ok()).collect();\n\n let recent_env = Env::recent(&envs);\n\n\n\n let new_env = env_new_workflow(&cfg, &setup_name, &env_name, &private, &false)?;\n\n envs.push(new_env.clone());\n\n\n\n if let Ok(recent_env) = recent_env {\n", "file_path": "src/cli/commands/new.rs", "rank": 9, "score": 240255.94819602752 }, { "content": "pub fn load_or_new_global_cfg(dir: &PathBuf) -> Result<FileCfg<GlobalCfg>> {\n\n let global_dir = global_cfg_directory(dir);\n\n let global_cfg_file = global_dir.join(GLOBAL_FILE_NAME.to_string());\n\n\n\n // TODO : Think about return error relative to syntax or log it.\n\n let global = get_global_cfg(&global_cfg_file).map_or(\n\n FileCfg::new(&global_cfg_file, GlobalCfg::new()).context(format!(\n\n \"fail to create new global cfg file {:?}\",\n\n global_cfg_file\n\n ))?,\n\n |v| v,\n\n );\n\n\n\n Ok(global)\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 10, "score": 238495.85602790245 }, { "content": "pub fn r#use(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let mut settings: Settings = (&cfg).into();\n\n if app.is_present(\"environment\") {\n\n if let Some(setup) = app.value_of_lossy(\"setup_or_environment\") {\n\n settings.set_setup(setup.to_string());\n\n }\n\n if let Some(env) = app.value_of_lossy(\"environment\") {\n\n settings.set_env(env.to_string());\n\n }\n\n } else {\n\n if let Some(setup_or_env) = app.value_of_lossy(\"setup_or_environment\") {\n\n if settings.env().is_ok() {\n\n settings.set_env(setup_or_env.to_string());\n\n } else {\n\n settings.set_setup(setup_or_env.to_string());\n\n }\n", "file_path": "src/cli/commands/use.rs", "rank": 11, "score": 234257.49720288973 }, { "content": "pub fn path_from_env_name<P: AsRef<Path>>(dir: P, env_name: &String) -> PathBuf {\n\n dir.as_ref()\n\n .to_path_buf()\n\n .join(PathBuf::from(format!(\".{}\", env_name)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Cursor;\n\n use std::path::PathBuf;\n\n\n\n use crate::env_file::Env;\n\n\n\n #[test]\n\n fn name_from() {\n\n let file = PathBuf::from(\"/test-env\");\n\n let env: Env = file.into();\n\n let file_name = env.file_name().unwrap();\n\n assert_eq!(file_name, \"test-env\");\n\n }\n", "file_path": "src/env_file/mod.rs", "rank": 12, "score": 232585.239183812 }, { "content": "pub fn var_name(array_var: &ArrayVar, var: &env_file::Var) -> String {\n\n match array_var.case() {\n\n VarCase::CamelCase => var.name().to_camel_case(),\n\n VarCase::KebabCase => var.name().to_kebab_case(),\n\n VarCase::SnakeCase => var.name().to_snake_case(),\n\n VarCase::ShoutySnakeCase => var.name().to_shouty_snake_case(),\n\n VarCase::MixedCase => var.name().to_mixed_case(),\n\n VarCase::TitleCase => var.name().to_title_case(),\n\n VarCase::None => var.name().to_owned(),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EnvVar(VarName, EnvValue);\n\n\n\npub const ENV_ENVIRONMENT_VAR: &'static str = \"short_env\";\n\npub const ENV_SETUP_VAR: &'static str = \"short_setup\";\n\n\n\nimpl EnvVar {\n\n pub fn var(&self) -> &VarName {\n", "file_path": "src/run_file/var.rs", "rank": 13, "score": 215625.8394208241 }, { "content": "pub fn load_local_cfg(dir: &PathBuf) -> Result<FileCfg<LocalCfg>> {\n\n let local_cfg_file = local_cfg_file(dir);\n\n\n\n let local_cfg = get_local_cfg(&local_cfg_file).context(format!(\n\n \"cfg file not found {}\",\n\n local_cfg_file.to_string_lossy()\n\n ))?;\n\n\n\n Ok(local_cfg)\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 14, "score": 211884.15940657374 }, { "content": "pub fn envs(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let envs = selected_envs(app, &setup, &settings)?;\n\n\n\n let is_current_env = |env: &Env| {\n\n if let Ok(current_env) = settings.env() {\n\n if let Ok(env_name) = env.name() {\n\n if *current_env == env_name {\n\n return true;\n\n }\n\n }\n\n }\n", "file_path": "src/cli/commands/envs.rs", "rank": 15, "score": 209988.01756970864 }, { "content": "pub fn generate_env_var(env: &Env, var: &VarName) -> EnvVar {\n\n env.iter()\n\n .find_map(|env_var| {\n\n if env_var.name() == &var.to_env_var() {\n\n Some((var.clone(), EnvValue::Var(env_var.clone())).into())\n\n } else {\n\n None\n\n }\n\n })\n\n .map_or(\n\n (\n\n var.clone(),\n\n EnvValue::Var(env_file::Var::new(var.to_string(), \"\")),\n\n )\n\n .into(),\n\n |e| e,\n\n )\n\n}\n\n\n", "file_path": "src/run_file/var.rs", "rank": 16, "score": 207838.2280188023 }, { "content": "pub fn unuse_workflow(cfg: &Cfg) -> Result<()> {\n\n let global_project = cfg.current_project()?;\n\n let mut global_project = global_project.borrow_mut();\n\n global_project.unset_current_setup();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/use.rs", "rank": 17, "score": 200912.01076269784 }, { "content": "pub fn use_workflow(cfg: &Cfg, settings: &Settings) -> Result<()> {\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n let global_project = cfg.current_project()?;\n\n let mut global_project = global_project.borrow_mut();\n\n let setup_name = settings.setup()?;\n\n global_project.set_current_setup_name(setup_name.to_owned());\n\n if let Ok(env_name) = settings.env() {\n\n setup\n\n .env_file(env_name)\n\n .context(format!(\"fail to found env {:?}\", env_name))?;\n\n global_project.set_current_env_name(env_name.to_owned());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/use.rs", "rank": 18, "score": 199257.53483457596 }, { "content": "pub fn env_new_workflow(\n\n cfg: &Cfg,\n\n setup_name: &String,\n\n env_name: &String,\n\n private: &bool,\n\n example: &bool,\n\n) -> Result<Env> {\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let retrieve_env_is_not_exists = |dir: PathBuf| -> Result<Env> {\n\n let env = path_from_env_name(dir, env_name);\n\n let mut env: Env = env.into();\n\n if *example {\n\n env.add(\"VAR1\", \"VALUE1\");\n\n env.add(\"VAR2\", \"VALUE2\");\n\n }\n\n if env.file().exists() {\n\n return Err(CliError::EnvFileAlreadyExists(env.file().clone(), env.clone()).into());\n\n } else {\n\n Ok(env)\n", "file_path": "src/cli/commands/new.rs", "rank": 19, "score": 198231.58691313537 }, { "content": "pub fn env_dir(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n if let Some(env_dir) = app.value_of(\"env_dir\") {\n\n set(cfg, settings, env_dir.into())\n\n } else if app.is_present(\"unset\") {\n\n unset(cfg, settings)\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/cli/commands/dir.rs", "rank": 20, "score": 197078.9371657289 }, { "content": "pub fn env_edit(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n let env_name = settings.env()?;\n\n\n\n let editor = app.value_of(\"editor\");\n\n let sync_settings = SyncSettings::new(app);\n\n\n\n let setup = cfg.current_setup(settings.setup()?)?;\n\n let env_file = setup.env_file(env_name)?;\n\n\n\n let command = |editor: &str| Command::new(editor).arg(&env_file).status();\n\n let exist_code = if let Some(editor) = editor {\n\n command(editor)?\n\n } else if let Ok(editor) = env::var(\"EDITOR\") {\n\n command(editor.as_str())?\n\n } else {\n", "file_path": "src/cli/commands/edit.rs", "rank": 21, "score": 197078.9371657289 }, { "content": "pub fn env_pdir(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n if let Some(env_dir) = app.value_of(\"env_dir\") {\n\n set(cfg, settings, env_dir.into())\n\n } else if app.is_present(\"unset\") {\n\n unset(cfg, settings)\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/cli/commands/pdir.rs", "rank": 22, "score": 197078.9371657289 }, { "content": "pub fn env_sync(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n let sync_settings = SyncSettings::new(app);\n\n\n\n let setup = cfg.current_setup(settings.setup()?)?;\n\n let envs = setup.envs();\n\n let envs: Vec<_> = envs.into_iter().filter_map(|r| r.ok()).collect();\n\n\n\n let recent_env = Env::recent(&envs)?;\n\n\n\n sync_workflow(recent_env, envs, sync_settings)?;\n\n\n\n success(\"files synchronized\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/sync.rs", "rank": 23, "score": 197078.9371657289 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct PrivateEnvDir(#[serde(deserialize_with = \"deserialize_private_env_dir\")] PathBuf);\n\n\n\nimpl AsRef<PathBuf> for PrivateEnvDir {\n\n fn as_ref(&self) -> &PathBuf {\n\n &self.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GlobalProjectSetupCfg {\n\n #[serde(skip)]\n\n name: SetupName,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n private_env_dir: Option<PrivateEnvDir>,\n\n}\n\n\n\nimpl GlobalProjectSetupCfg {\n\n pub fn new(name: SetupName) -> Self {\n\n Self {\n", "file_path": "src/cfg/global/setup.rs", "rank": 24, "score": 194866.57767665037 }, { "content": "fn deserialize_private_env_dir<'de, D>(deserializer: D) -> stdResult<PathBuf, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct InnerVisitor;\n\n\n\n impl<'de> Visitor<'de> for InnerVisitor {\n\n type Value = PathBuf;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"incorrect private_env_dir\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n let dir: PathBuf = v.into();\n\n if dir.is_relative() {\n\n Err(E::invalid_value(\n", "file_path": "src/cfg/global/setup.rs", "rank": 25, "score": 187364.2600274901 }, { "content": "pub fn confirm<W, E>(mut writer: W, question: &str, e: Vec<E>) -> Result<E>\n\nwhere\n\n W: io::Write,\n\n E: EnumConfirm + Clone + Debug,\n\n{\n\n let mut write_question_line = || writeln!(writer, \"{} : {}\", question, &e.to_string()).unwrap();\n\n\n\n write_question_line();\n\n enable_raw_mode()?;\n\n let e = loop {\n\n if let Event::Key(event) = read()? {\n\n if event == KeyEvent::new(KeyCode::Char('c'), KeyModifiers::CONTROL)\n\n || event == KeyEvent::new(KeyCode::Char('z'), KeyModifiers::CONTROL)\n\n {\n\n disable_raw_mode()?;\n\n bail!(UserStopSync);\n\n }\n\n\n\n if let Some(e) = e.iter().find_map(|e| {\n\n if event == KeyCode::Char(e.to_char()).into() {\n", "file_path": "src/cli/terminal/confirm.rs", "rank": 26, "score": 181446.18363735714 }, { "content": "pub fn generate_env_vars<AV>(\n\n env: &Env,\n\n array_vars: AV,\n\n vars: Option<Rc<RefCell<Vars>>>,\n\n) -> Result<Vec<EnvVar>>\n\nwhere\n\n AV: Deref<Target = ArrayVars>,\n\n{\n\n let mut env_vars: Vec<EnvVar> = vec![];\n\n\n\n for array_var in array_vars.as_ref().iter() {\n\n let env_var = generate_array_env_var(env, array_var)?;\n\n env_vars.append(&mut vec![env_var]);\n\n }\n\n\n\n if let Some(vars) = vars {\n\n let vars = vars.borrow();\n\n for var in vars.as_ref().iter() {\n\n let env_var = generate_env_var(env, var);\n\n env_vars.append(&mut vec![env_var]);\n", "file_path": "src/run_file/var.rs", "rank": 27, "score": 181418.41565493465 }, { "content": "fn env_file_time(env: &Env) -> ModificationTime {\n\n let file = env.file();\n\n modification_time(file)\n\n}\n\n\n\nimpl Env {\n\n pub fn recent(envs: &Vec<Env>) -> Result<Env> {\n\n envs.iter()\n\n .fold(None, |last_env, next_env| match (last_env, next_env) {\n\n (None, next_env) => Some(next_env.clone()),\n\n (Some(last_env), next_env) => {\n\n let last_env_modification_time = env_file_time(&last_env);\n\n let next_env_modification_time = env_file_time(next_env);\n\n if last_env_modification_time < next_env_modification_time {\n\n Some((*next_env).clone())\n\n } else {\n\n Some(last_env)\n\n }\n\n }\n\n })\n\n .context(\"fail to found the most recent env file\")\n\n }\n\n}\n", "file_path": "src/env_file/recent.rs", "rank": 29, "score": 177692.23422498914 }, { "content": "pub fn get_cfg() -> Result<Cfg> {\n\n let (local_dir, global_dir) = reach_directories()?;\n\n\n\n Cfg::load_local(global_dir, local_dir).context(\"fail to load cfg \\\"short.yaml\\\"\")\n\n}\n\n\n", "file_path": "src/cli/cfg.rs", "rank": 30, "score": 175379.11100785923 }, { "content": "pub fn create_cfg() -> Result<Cfg> {\n\n let (local_dir, global_dir) = reach_directories()?;\n\n\n\n Cfg::create_local(global_dir, local_dir).context(\"fail to create cfg\")\n\n}\n", "file_path": "src/cli/cfg.rs", "rank": 31, "score": 175379.11100785923 }, { "content": "pub fn modification_time(file: &Path) -> ModificationTime {\n\n match file_time(file) {\n\n (modification_time, _) => modification_time,\n\n }\n\n}\n", "file_path": "src/utils/file_time.rs", "rank": 32, "score": 175338.60040438082 }, { "content": "pub fn create_time(file: &Path) -> CreateTime {\n\n match file_time(file) {\n\n (_, Some(create_time)) => create_time,\n\n (modification_time, None) => modification_time,\n\n }\n\n}\n\n\n", "file_path": "src/utils/file_time.rs", "rank": 33, "score": 175338.60040438082 }, { "content": "type SetupName = String;\n\n\n\n#[derive(Error, Debug)]\n\npub enum CliError {\n\n #[error(\"`{0:?}` not found for `{1}`\")]\n\n EnvDirNotFound(PathBuf, SetupName, #[source] std::io::Error),\n\n #[error(\"open editor fail\")]\n\n OpenEditorFail,\n\n #[error(\"bad input `{0}` try again\")]\n\n #[deprecated]\n\n ConfirmBadInputTryAgain(String),\n\n #[error(\"you have not allowed to delete var `{0}`:`{1}` in {2}\")]\n\n DeleteVarNowAllowed(String, String, String),\n\n #[error(\"env must be sync, please change it manually or run \\\"short env sync\\\"\")]\n\n EnvFileMustBeSync,\n\n #[error(\"env file `{0:?}` already exists\")]\n\n EnvFileAlreadyExists(PathBuf, Env),\n\n #[error(\"user stop sync\")]\n\n UserStopSync,\n\n #[error(\"Unknown error\")]\n\n UnknownError(#[from] anyhow::Error),\n\n}\n", "file_path": "src/cli/error.rs", "rank": 34, "score": 175076.977035322 }, { "content": "type SheBang = String;\n\n\n\npub const SHEBANG_BASH: &'static str = \"#!/bin/bash\";\n\n\n\n#[derive(Debug)]\n\npub struct BashScript(SheBang);\n\nimpl Default for BashScript {\n\n fn default() -> Self {\n\n Self {\n\n 0: SHEBANG_BASH.into(),\n\n }\n\n }\n\n}\n\nimpl Generate for BashScript {\n\n fn generate<AV, V>(&self, array_vars: AV, vars: V) -> Result<String>\n\n where\n\n AV: Deref<Target = ArrayVars>,\n\n V: Deref<Target = Vars>,\n\n {\n\n let mut content = String::new();\n", "file_path": "src/run_file/kind/bash.rs", "rank": 36, "score": 171428.9542272063 }, { "content": "type SheBang = String;\n\n\n\npub const SHEBANG_SH: &'static str = \"#!/bin/sh\";\n\n#[derive(Debug)]\n\npub struct ShScript(SheBang);\n\nimpl Default for ShScript {\n\n fn default() -> Self {\n\n Self {\n\n 0: SHEBANG_SH.into(),\n\n }\n\n }\n\n}\n\nimpl Generate for ShScript {\n\n fn generate<AV, V>(&self, array_vars: AV, vars: V) -> Result<String>\n\n where\n\n AV: Deref<Target = ArrayVars>,\n\n V: Deref<Target = Vars>,\n\n {\n\n let mut content = String::new();\n\n writeln!(content, \"{}\", self.0)?;\n", "file_path": "src/run_file/kind/sh.rs", "rank": 37, "score": 171428.9542272063 }, { "content": "pub fn file_time(file: &Path) -> (ModificationTime, Option<CreateTime>) {\n\n let metadata = fs::metadata(file).unwrap();\n\n (\n\n FileTime::from_last_modification_time(&metadata),\n\n FileTime::from_creation_time(&metadata),\n\n )\n\n}\n\n\n", "file_path": "src/utils/file_time.rs", "rank": 38, "score": 168416.09937825863 }, { "content": "pub fn global_cfg_directory(dir: &PathBuf) -> PathBuf {\n\n let global_cfg_dir = var(\"SHORT_GLOBAL_CFG_DIR\").map_or(\".short/\".to_string(), |v| v);\n\n dir.join(global_cfg_dir)\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 39, "score": 168342.7677056774 }, { "content": "pub fn generate(app: &ArgMatches) -> Result<()> {\n\n if app.is_present(\"list\") {\n\n display_template_list()?;\n\n return Ok(());\n\n }\n\n\n\n let generate_settings = GenerateSettings::new(app);\n\n\n\n if app.is_present(\"template\") {\n\n generate_template_workflow(app, &generate_settings)\n\n } else {\n\n generate_empty_workflow(app, &generate_settings)\n\n }\n\n}\n\n\n", "file_path": "src/cli/commands/generate.rs", "rank": 40, "score": 167051.33744533296 }, { "content": "pub fn run(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n let args = app.values_of_lossy(\"args\").unwrap_or(vec![]);\n\n\n\n let setup_name = settings.setup()?;\n\n let env = settings.env()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let script_file = setup.local_cfg_run_file()?;\n\n let env = setup.env(&env)?;\n\n\n\n let local_setup = setup.local_setup().unwrap();\n\n let local_setup = local_setup.borrow();\n\n let array_vars = local_setup.array_vars().unwrap_or_default();\n\n let vars = local_setup.vars();\n", "file_path": "src/cli/commands/run.rs", "rank": 41, "score": 167051.33744533296 }, { "content": "pub fn ls(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n let project = cfg.current_project()?;\n\n let project = project.borrow();\n\n let local_setups = cfg.current_setups()?;\n\n\n\n for local_setup in local_setups {\n\n let setup_name = local_setup.name()?;\n\n\n\n let local_setup_cfg = local_setup.local_setup().unwrap();\n\n let local_setup_cfg = local_setup_cfg.borrow();\n\n let run_file = local_setup_cfg.file();\n\n\n\n let mut selected = false;\n\n let mut colored = false;\n", "file_path": "src/cli/commands/ls.rs", "rank": 42, "score": 167051.33744533296 }, { "content": "pub fn rename(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n\n\n let mut settings = get_settings(&app, &cfg);\n\n\n\n let last_setup_name = app.value_of(\"last_setup_name\").unwrap();\n\n let new_setup_name = app.value_of(\"new_setup_name\").unwrap();\n\n\n\n let setup = cfg.current_setup(&last_setup_name.to_string())?;\n\n\n\n setup.rename(&new_setup_name.to_string())?;\n\n\n\n if let Ok(current_setup_name) = settings.setup() {\n\n if last_setup_name == current_setup_name {\n\n settings.set_setup(new_setup_name.to_string());\n\n use_workflow(&cfg, &settings)?;\n\n }\n\n }\n\n\n\n cfg.save()?;\n\n\n\n success(\"setup renamed\");\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/rename.rs", "rank": 43, "score": 167051.33744533296 }, { "content": "pub fn show(args: &ArgMatches) -> Result<()> {\n\n if args.is_present(\"display_setup\") {\n\n if let Ok(settings) = cfg() {\n\n if let Ok(env) = settings.setup() {\n\n print!(\"{}\", env);\n\n }\n\n }\n\n } else if args.is_present(\"display_env\") {\n\n if let Ok(settings) = cfg() {\n\n if let Ok(env) = settings.env() {\n\n print!(\"{}\", env);\n\n }\n\n }\n\n } else if args.is_present(\"format\") {\n\n if let Ok(settings) = cfg() {\n\n let format = args\n\n .value_of_lossy(\"format\")\n\n .map(|c| c.into_owned())\n\n .unwrap_or(DEFAULT_SHOW_FORMAT.into());\n\n\n", "file_path": "src/cli/commands/show.rs", "rank": 44, "score": 167051.33744533296 }, { "content": "pub fn init(_app: &ArgMatches) -> Result<()> {\n\n let cfg = create_cfg()?;\n\n cfg.save()?;\n\n success(\"project initialed\");\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/init.rs", "rank": 45, "score": 167051.33744533296 }, { "content": "pub fn vars(app: &ArgMatches) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let cfg = cfg;\n\n\n\n let settings = get_settings(app, &cfg);\n\n\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let envs = selected_envs(app, &setup, &settings)?;\n\n\n\n let is_current_env = |env: &Env| {\n\n if let Ok(current_env) = settings.env() {\n\n if let Ok(env_name) = env.name() {\n\n if *current_env == env_name {\n\n return true;\n\n }\n\n }\n\n }\n", "file_path": "src/cli/commands/vars.rs", "rank": 46, "score": 167051.33744533296 }, { "content": "type VarFormat = String;\n", "file_path": "src/cfg/local/setup_array_vars.rs", "rank": 47, "score": 166711.82148108722 }, { "content": "type VarDelimiter = String;\n\n\n\n#[derive(EnumIter, EnumString, EnumProperty, Debug, Clone, Eq, PartialEq, AsRefStr)]\n\npub enum VarCase {\n\n #[strum(\n\n serialize = \"None\",\n\n serialize = \"none\",\n\n serialize = \"false\",\n\n serialize = \"\",\n\n props(deserialize = \"\")\n\n )]\n\n None,\n\n #[strum(\n\n serialize = \"camelcase\",\n\n serialize = \"CamelCase\",\n\n props(deserialize = \"CamelCase\")\n\n )]\n\n CamelCase,\n\n #[strum(\n\n serialize = \"snakecase\",\n", "file_path": "src/cfg/local/setup_array_vars.rs", "rank": 48, "score": 166711.82148108722 }, { "content": "type VarPattern = String;\n", "file_path": "src/cfg/local/setup_array_vars.rs", "rank": 49, "score": 166711.82148108722 }, { "content": "pub fn reach_directories() -> Result<(LocalDir, GlobalDir)> {\n\n let local_dir = current_dir().context(\"fail to found current directory\")?;\n\n let global_dir = home_dir().context(\"fail to found home directory\")?;\n\n\n\n Ok((local_dir, global_dir))\n\n}\n\n\n", "file_path": "src/cli/cfg.rs", "rank": 50, "score": 164692.02063563652 }, { "content": "fn save(file: &Path, crate_info: &CrateInfo) -> Result<()> {\n\n let content = to_string(crate_info)?;\n\n write_all_dir(&file, &content)?;\n\n Ok(())\n\n}\n\n\n\npub type UpdateMessage = String;\n\n\n", "file_path": "src/utils/check_update.rs", "rank": 51, "score": 161149.2351860276 }, { "content": "fn set(cfg: Cfg, settings: Settings, env_dir: PathBuf) -> Result<()> {\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let global_setup = setup.global_setup().unwrap();\n\n let mut global_setup = global_setup.borrow_mut();\n\n let private_env_dir = if env_dir.is_relative() {\n\n current_dir()?.join(&env_dir).canonicalize()\n\n } else {\n\n env_dir.canonicalize()\n\n };\n\n let private_env_dir = private_env_dir\n\n .map_err(|err| CliError::EnvDirNotFound(env_dir.clone(), setup_name.clone(), err))?;\n\n global_setup.set_private_env_dir(private_env_dir.clone())?;\n\n drop(global_setup);\n\n\n\n cfg.save()?;\n\n\n\n success(format!(\"private env directory set to `{:?}`\", private_env_dir).as_str());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/pdir.rs", "rank": 52, "score": 151578.14914031714 }, { "content": "fn set(cfg: Cfg, settings: Settings, env_dir: PathBuf) -> Result<()> {\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let local_setup = setup.local_setup().unwrap();\n\n let mut local_setup = local_setup.borrow_mut();\n\n local_setup.set_public_env_dir(env_dir.clone());\n\n drop(local_setup);\n\n\n\n let public_env_dir = setup.envs_public_dir()?;\n\n public_env_dir\n\n .canonicalize()\n\n .map_err(|err| CliError::EnvDirNotFound(env_dir.clone(), setup_name.clone(), err))?;\n\n\n\n cfg.save()?;\n\n\n\n success(format!(\"env directory set to `{:?}`\", public_env_dir).as_str());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/dir.rs", "rank": 53, "score": 151578.14914031714 }, { "content": "pub fn write_all_dir<P, S>(path: P, content: S) -> Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n S: AsRef<str>,\n\n{\n\n let path = path.as_ref();\n\n let content = content.as_ref();\n\n if let Some(path) = path.parent() {\n\n if !path.exists() {\n\n create_all(path, false)?;\n\n }\n\n }\n\n write_all(path, content)?;\n\n Ok(())\n\n}\n", "file_path": "src/utils/write_all/mod.rs", "rank": 54, "score": 147768.4475495848 }, { "content": "#[test]\n\nfn cmd_use_one_shot_update_setup() {\n\n let mut e = init(\"cmd_use_one_shot_update_setup\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n array_vars: {}\n\n setup_2:\n\n file: run.sh\n\n array_vars: {}\n\n \"#,\n\n );\n\n\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n\n r#\"\n\nprojects:\n", "file_path": "tests/use.rs", "rank": 55, "score": 135696.4484466585 }, { "content": "#[test]\n\nfn cmd_use_one_shot_update_env() {\n\n let mut e = init(\"cmd_use_one_shot_update_env\");\n\n e.add_file(PROJECT_ENV_EXAMPLE_1_FILE, r#\"VAR1=VALUE1\"#);\n\n e.add_file(PROJECT_ENV_EXAMPLE_2_FILE, r#\"VAR1=VALUE1\"#);\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n array_vars: {}\n\n setup_2:\n\n file: run.sh\n\n array_vars: {}\n\n \"#,\n\n );\n\n\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n", "file_path": "tests/use.rs", "rank": 56, "score": 135491.30115935547 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct DeserializeArrayVarTruncate {\n\n pattern: VarPattern,\n\n #[serde(skip_serializing_if = \"VarCase::is_none\", default = \"VarCase::default\")]\n\n case: VarCase,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n format: Option<VarFormat>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n delimiter: Option<VarDelimiter>,\n\n}\n\n\n\nimpl From<ArrayVar> for DeserializeArrayVarTruncate {\n\n fn from(av: ArrayVar) -> Self {\n\n Self {\n\n pattern: av.pattern,\n\n case: av.case,\n\n format: av.format,\n\n delimiter: av.delimiter,\n\n }\n\n }\n\n}\n", "file_path": "src/cfg/local/setup_array_vars.rs", "rank": 57, "score": 134985.4413579866 }, { "content": "#[test]\n\nfn cmd_show_no_setup_no_env() {\n\n let mut e = init(\"cmd_show_no_setup_no_env\");\n\n\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups: {}\n\n \"#,\n\n );\n\n e.setup();\n\n\n\n let mut command = e.command(BIN_NAME).unwrap();\n\n let r = command\n\n .env(\"RUST_LOG\", \"debug\")\n\n .arg(\"show\")\n\n .assert()\n\n .to_string();\n\n\n\n assert!(contains(\"no setup is configured. You can use\").eval(&r));\n\n\n", "file_path": "tests/show.rs", "rank": 58, "score": 130915.66383909767 }, { "content": "type LocalCfgFile = PathBuf;\n\n\n\npub const GLOBAL_FILE_NAME: &'static str = \"cfg.yaml\";\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GlobalCfg {\n\n projects: Vec<Rc<RefCell<GlobalProjectCfg>>>,\n\n}\n\n\n\nimpl GlobalCfg {\n\n pub fn new() -> Self {\n\n Self { projects: vec![] }\n\n }\n\n\n\n pub fn add_project(&mut self, project: GlobalProjectCfg) -> Result<()> {\n\n if let Err(err) = self.get_project_by_file(&project.file()) {\n\n match err.downcast_ref::<CfgError>() {\n\n Some(CfgError::ProjectNotFound(_)) => {\n\n self.projects\n\n .append(&mut vec![Rc::new(RefCell::new(project))]);\n", "file_path": "src/cfg/global/mod.rs", "rank": 59, "score": 130810.25980352506 }, { "content": "fn main() -> Result<()> {\n\n env_logger::init();\n\n info!(\"BIN_NAME {}\", BIN_NAME);\n\n info!(\"VERSION v{}\", VERSION);\n\n\n\n Ok(run()?)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 126978.59230066519 }, { "content": "fn run() -> Result<()> {\n\n let files_kind = Kind::iter()\n\n .map(|e| e.as_ref().to_string())\n\n .collect::<Vec<_>>();\n\n let files_kind = files_kind.iter().map(|e| e.as_str()).collect::<Vec<_>>();\n\n let files_kind = files_kind.as_slice();\n\n\n\n let setup_arg = Arg::with_name(\"setup\")\n\n .long(\"setup\")\n\n .short(\"s\")\n\n .takes_value(true)\n\n .help(\"Set up name\");\n\n let environment_arg = Arg::with_name(\"environment\")\n\n .long(\"env\")\n\n .short(\"e\")\n\n .takes_value(true)\n\n .help(\"Environment name\");\n\n let environments_arg = Arg::with_name(\"environments\")\n\n .help(\"Environments, you can provides several for compare to each other.\")\n\n .long(\"envs\")\n", "file_path": "src/main.rs", "rank": 61, "score": 126978.59230066519 }, { "content": "fn local_cfg_file(dir: &PathBuf) -> PathBuf {\n\n let local_cfg_file = var(\"SHORT_LOCAL_CFG_FILE\").map_or(\"short.yaml\".to_string(), |v| v);\n\n dir.join(local_cfg_file)\n\n}\n\n\n", "file_path": "src/cfg/file/mod.rs", "rank": 62, "score": 125568.95045779229 }, { "content": "#[test]\n\nfn cmd_generate_with_file_env_directory() {\n\n let mut e = init(\"cmd_generate_with_file_env_directory\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups: {}\n\n \"#,\n\n );\n\n e.setup();\n\n\n\n let mut command = e.command(BIN_NAME).unwrap();\n\n let r = command\n\n .env(\"RUST_LOG\", \"debug\")\n\n .arg(\"generate\")\n\n .arg(\"setup_1\")\n\n .arg(\"example1\")\n\n .arg(\"bash\")\n\n .args(&[\"-e\", \"public_env\"])\n\n .assert()\n\n .to_string();\n\n\n\n assert!(contains(\"generate setup\").eval(&r));\n\n\n\n assert!(e.file_exists(\"project/public_env/.example1\"));\n\n}\n\n\n", "file_path": "tests/generate_empty.rs", "rank": 63, "score": 124618.04986690474 }, { "content": "pub fn sync_workflow(\n\n source_env: Env,\n\n envs: Vec<Env>,\n\n sync_settings: SyncSettings,\n\n) -> Result<Vec<Env>> {\n\n let source_env = if let Some(file) = sync_settings.file.as_ref() {\n\n Env::from_file_reader(file)?\n\n } else {\n\n source_env\n\n };\n\n let envs: Vec<_> = envs.into_iter().map(|env| RefCell::new(env)).collect();\n\n\n\n let sync_settings = Rc::new(sync_settings);\n\n\n\n for env_cell in envs.iter() {\n\n let mut env = env_cell.borrow_mut();\n\n if env.file() == source_env.file() {\n\n continue;\n\n }\n\n let env_name = Rc::new(env.name()?);\n", "file_path": "src/cli/commands/sync.rs", "rank": 64, "score": 119905.2934502725 }, { "content": "fn display_template_list() -> Result<()> {\n\n let registry_tmp = TempDir::new(\"registry\")?;\n\n let registry = Registry::checkout(registry_tmp.path())?;\n\n let mut render_table = Table::new();\n\n render_table.set_format(*format::consts::FORMAT_CLEAN);\n\n\n\n for template in registry.index() {\n\n if template.name().eq(\"test\") {\n\n // Remove test repository\n\n continue;\n\n }\n\n render_table.add_row(Row::new(vec![\n\n Cell::new(template.name()),\n\n Cell::new(template.url()),\n\n ]));\n\n }\n\n\n\n println!(\"{}\", render_table.to_string());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/commands/generate.rs", "rank": 65, "score": 119078.17199717532 }, { "content": "fn cfg() -> Result<Settings> {\n\n let mut cfg = get_cfg()?;\n\n cfg.sync_local_to_global()?;\n\n let settings: Settings = (&cfg).into();\n\n Ok(settings)\n\n}\n\n\n", "file_path": "src/cli/commands/show.rs", "rank": 66, "score": 118392.67664445925 }, { "content": "pub fn check_update(dir: &Path, current_version: &CrateInfo, ttl: &i64) -> Option<UpdateMessage> {\n\n let filename = file_name(dir);\n\n\n\n if !filename.exists() {\n\n if let Ok(_) = save(&filename, &current_version) {\n\n return None;\n\n }\n\n }\n\n\n\n if !is_checked(&filename, ttl) {\n\n if let Ok(latest_version) = latest() {\n\n if latest_version > *current_version {\n\n if let Ok(_) = remove_file(&filename) {\n\n save(&filename, &latest_version).unwrap();\n\n }\n\n return Some(format!(\n\n r#\"\n\n {love} {t1} {version} {t2}\n\n For update you can use the this command :\n\n \n", "file_path": "src/utils/check_update.rs", "rank": 67, "score": 117928.67267205796 }, { "content": "pub fn is_cli_colorized() -> bool {\n\n if let Some(_) = option_env!(\"NO_COLOR\") {\n\n return false;\n\n }\n\n if let Some(clicolor_force) = option_env!(\"CLICOLOR_FORCE\") {\n\n if clicolor_force != \"0\" {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "src/utils/colorize.rs", "rank": 68, "score": 117442.95621938049 }, { "content": "use crate::env_file::entry::Entry;\n\nuse crate::env_file::{Env, Var};\n\n\n\n#[derive(Debug)]\n\npub struct EnvIterator<'a> {\n\n env: &'a Env,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> EnvIterator<'a> {\n\n pub fn new(env: &'a Env) -> Self {\n\n Self { env, index: 0 }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for EnvIterator<'a> {\n\n type Item = &'a Var;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(var) = self.env.entries.get(self.index) {\n", "file_path": "src/env_file/iter.rs", "rank": 69, "score": 116954.05987092436 }, { "content": " self.index += 1;\n\n if let Entry::Var(var) = var {\n\n return Some(&var);\n\n } else {\n\n return self.next();\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::env_file::Env;\n\n\n\n #[test]\n\n fn env_iterator() {\n\n let mut env = Env::new(\"\".into());\n\n env.add(\"name1\", \"value1\");\n\n env.add_empty_line();\n", "file_path": "src/env_file/iter.rs", "rank": 70, "score": 116943.33544166798 }, { "content": " env.add(\"name2\", \"value2\");\n\n\n\n let mut iter = env.iter();\n\n\n\n if let Some(var) = iter.next() {\n\n assert_eq!(var.name(), \"name1\");\n\n assert_eq!(var.value(), \"value1\");\n\n } else {\n\n assert!(false);\n\n }\n\n\n\n if let Some(var) = iter.next() {\n\n assert_eq!(var.name(), \"name2\");\n\n assert_eq!(var.value(), \"value2\");\n\n } else {\n\n assert!(false);\n\n }\n\n\n\n assert!(iter.next().is_none());\n\n }\n\n}\n", "file_path": "src/env_file/iter.rs", "rank": 71, "score": 116933.38030990763 }, { "content": "fn latest() -> Result<CrateInfo> {\n\n let client = Client::builder().timeout(Duration::from_secs(2)).build()?;\n\n let body = client.get(GITHUB_CRATES_INDEX_FILE_URL).send()?.text()?;\n\n let mut output: Option<CrateInfo> = None;\n\n for row in body.lines() {\n\n if let Ok(tmp_crate_info) = from_str::<CrateInfo>(row) {\n\n if let Some(crate_info) = output.as_ref() {\n\n if *crate_info < tmp_crate_info {\n\n output = Some(tmp_crate_info.clone());\n\n }\n\n } else {\n\n output = Some(tmp_crate_info.clone());\n\n }\n\n }\n\n }\n\n output.context(\"no crate info\")\n\n}\n\n\n", "file_path": "src/utils/check_update.rs", "rank": 72, "score": 116572.66978758163 }, { "content": "fn terminal(settings: Settings) -> Result<()> {\n\n match (settings.setup(), settings.env()) {\n\n (Ok(setup), Ok(env)) => {\n\n good_info(format!(\"your current setup is `{}`:`{}`\", setup.bold(), env.bold()).as_str())\n\n }\n\n (Err(_), Ok(env)) => bad_info(\n\n format!(\n\n \"no setup is configured with `{}` env . You can use \\\"short use {0:?} <env>\\\"\",\n\n env.bold()\n\n )\n\n .as_str(),\n\n ),\n\n (Ok(setup), Err(_)) => bad_info(\n\n format!(\n\n \"no env is configured for `{}` . You can use \\\"short use <setup> <env>\\\"\",\n\n setup.bold()\n\n )\n\n .as_str(),\n\n ),\n\n (Err(_), Err(_)) => {\n\n bad_info(\"no setup is configured. You can use \\\"short use <setup> <env>\\\"\")\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/show.rs", "rank": 73, "score": 114308.36655297271 }, { "content": "pub fn message(msg: &str) {\n\n println!(\"{}\", msg);\n\n}\n\n\n", "file_path": "src/cli/terminal/message.rs", "rank": 74, "score": 113401.20846571444 }, { "content": "pub fn success(msg: &str) {\n\n println!(\"{} {}\", emoji::CHECK, msg)\n\n}\n\n\n", "file_path": "src/cli/terminal/message.rs", "rank": 75, "score": 113401.20846571444 }, { "content": "pub fn info(msg: &str) {\n\n println!(\"{} {}\", emoji::RIGHT_POINTER, msg)\n\n}\n\n\n", "file_path": "src/cli/terminal/message.rs", "rank": 76, "score": 113401.20846571444 }, { "content": "pub trait SetupCfg {\n\n fn name(&self) -> &String;\n\n\n\n fn set_name(&mut self, name: String);\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Setup {\n\n local_cfg_file: Option<PathBuf>,\n\n local_setup: Weak<RefCell<LocalSetupCfg>>,\n\n global_setup: Weak<RefCell<GlobalProjectSetupCfg>>,\n\n}\n\n\n\nimpl Setup {\n\n pub fn new() -> Self {\n\n Self {\n\n local_cfg_file: None,\n\n local_setup: Weak::default(),\n\n global_setup: Weak::default(),\n\n }\n", "file_path": "src/cfg/setup.rs", "rank": 77, "score": 111807.66636512495 }, { "content": "pub trait SetupsCfg {\n\n type Setup: SetupCfg;\n\n\n\n fn add_setup(&mut self, setup: Self::Setup) {\n\n if let None = self.get_setup(setup.name()) {\n\n self.get_setups()\n\n .borrow_mut()\n\n .append(&mut vec![Rc::new(RefCell::new(setup))])\n\n }\n\n }\n\n\n\n fn remove_by_name_setup(&mut self, name: &String) {\n\n self.get_setups().borrow_mut().retain(|setup| {\n\n let setup = setup.borrow();\n\n setup.name() != name\n\n });\n\n }\n\n\n\n fn get_setup(&self, name: &String) -> Option<Rc<RefCell<Self::Setup>>> {\n\n self.get_setups()\n\n .borrow()\n\n .iter()\n\n .find(|setup| setup.borrow().name() == name)\n\n .map(|setup| Rc::clone(setup))\n\n }\n\n\n\n fn get_setups(&self) -> Rc<RefCell<Vec<Rc<RefCell<Self::Setup>>>>>;\n\n}\n\n\n", "file_path": "src/cfg/setup.rs", "rank": 78, "score": 111807.66636512495 }, { "content": "pub fn bad_info(msg: &str) {\n\n println!(\"{} {}\", emoji::PERSON_POUTING, msg)\n\n}\n", "file_path": "src/cli/terminal/message.rs", "rank": 79, "score": 111675.117049483 }, { "content": "pub fn good_info(msg: &str) {\n\n println!(\"{} {}\", emoji::PERSON_TIPPING_HANG, msg)\n\n}\n\n\n", "file_path": "src/cli/terminal/message.rs", "rank": 80, "score": 111675.117049483 }, { "content": "fn file_name(dir: &Path) -> PathBuf {\n\n dir.join(LAST_CRATE_INFO_FILE)\n\n}\n\n\n", "file_path": "src/utils/check_update.rs", "rank": 81, "score": 111386.63865065883 }, { "content": "fn is_checked(file: &Path, ttl: &i64) -> bool {\n\n if file.exists() {\n\n let current_time = filetime::FileTime::now();\n\n let create_time = create_time(&file);\n\n if create_time.seconds() + ttl > current_time.seconds() {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/utils/check_update.rs", "rank": 82, "score": 107805.75848472546 }, { "content": "#[test]\n\nfn cmd_use() {\n\n let mut e = init(\"cmd_use\");\n\n\n\n e.add_file(PROJECT_ENV_EXAMPLE_1_FILE, r#\"VAR1=VALUE1\"#);\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n array_vars: {}\n\n \"#,\n\n );\n\n e.setup();\n\n\n\n let mut command = e.command(BIN_NAME).unwrap();\n\n let r = command\n\n .env(\"RUST_LOG\", \"debug\")\n\n .arg(\"use\")\n\n .arg(\"setup_1\")\n", "file_path": "tests/use.rs", "rank": 83, "score": 107658.99203184916 }, { "content": "#[test]\n\nfn cmd_envs_multiple_envs() {\n\n let mut e = init(\"cmd_var\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n \"#,\n\n );\n\n e.add_file(\n\n PROJECT_ENV_EXAMPLE_1_FILE,\n\n r#\"VAR_A=VALUE1\n\nVAR_B=VALUE1\n\n\"#,\n\n );\n\n e.add_file(\n\n PROJECT_ENV_EXAMPLE_2_FILE,\n\n r#\"VAR_A=VALUE2\n\nVAR_B=VALUE2\n", "file_path": "tests/envs.rs", "rank": 84, "score": 107146.33843446415 }, { "content": "fn unset(cfg: Cfg, settings: Settings) -> Result<()> {\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let local_setup = setup.local_setup().unwrap();\n\n let mut local_setup = local_setup.borrow_mut();\n\n local_setup.unset_public_env_dir()?;\n\n drop(local_setup);\n\n\n\n cfg.save()?;\n\n\n\n success(format!(\"env directory unset\").as_str());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/dir.rs", "rank": 85, "score": 107001.28551247183 }, { "content": "fn unset(cfg: Cfg, settings: Settings) -> Result<()> {\n\n let setup_name = settings.setup()?;\n\n let setup = cfg.current_setup(setup_name)?;\n\n\n\n let global_setup = setup.global_setup().unwrap();\n\n let mut global_setup = global_setup.borrow_mut();\n\n global_setup.unset_private_env_dir()?;\n\n drop(global_setup);\n\n\n\n cfg.save()?;\n\n\n\n success(format!(\"private env directory unset\").as_str());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cli/commands/pdir.rs", "rank": 86, "score": 107001.28551247183 }, { "content": "#[test]\n\nfn cmd_new_public() {\n\n let mut e = init(\"cmd_env_new_public\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n public_env_dir: env\n\n \"#,\n\n );\n\n e.setup();\n\n\n\n let mut command = e.command(BIN_NAME).unwrap();\n\n let r = command\n\n .env(\"RUST_LOG\", \"debug\")\n\n .arg(\"new\")\n\n .arg(\"example1\")\n\n .args(vec![\"-s\", \"setup_1\"])\n\n .assert()\n\n .to_string();\n\n\n\n assert!(contains(\"env `example1` created\").eval(&r));\n\n\n\n e.file_exists(PathBuf::from(PROJECT_ENV_DIR).join(\".example1\"));\n\n}\n\n\n", "file_path": "tests/new.rs", "rank": 87, "score": 105810.698464107 }, { "content": "#[test]\n\nfn cmd_new_private() {\n\n let mut e = init(\"cmd_env_new_private\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n \"#,\n\n );\n\n\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n\n r#\"\n\nprojects:\n\n - file: {file}\n\n current:\n\n setup: setup_1\n\n setups:\n", "file_path": "tests/new.rs", "rank": 88, "score": 105810.698464107 }, { "content": "#[test]\n\nfn cmd_use_with_private() {\n\n let mut e = init(\"cmd_use_with_private\");\n\n e.add_file(PROJECT_ENV_EXAMPLE_1_FILE, r#\"VAR1=VALUE1\"#);\n\n e.add_file(PRIVATE_ENV_DEV_FILE, r#\"VAR1=VALUE1\"#);\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n array_vars: {}\n\n \"#,\n\n );\n\n\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n\n r#\"\n\nprojects:\n\n - file: {file}\n", "file_path": "tests/use.rs", "rank": 89, "score": 105779.9498277671 }, { "content": "#[test]\n\nfn cmd_new_public_with_sync() {\n\n let mut e = init(\"cmd_env_new_public_with_sync\");\n\n let initial_env_file = PathBuf::from(PROJECT_ENV_DIR).join(\".initial\");\n\n let initial_env_content = r#\"VAR1=VAR1\n\n\"#;\n\n e.add_file(&initial_env_file, initial_env_content);\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n public_env_dir: env\n\n \"#,\n\n );\n\n e.setup();\n\n\n\n let mut command = e.command(BIN_NAME).unwrap();\n\n let r = command\n\n .env(\"RUST_LOG\", \"debug\")\n", "file_path": "tests/new.rs", "rank": 90, "score": 104015.46230474088 }, { "content": "pub fn get_settings(app: &ArgMatches, cfg: &Cfg) -> Settings {\n\n let mut settings: Settings = cfg.into();\n\n\n\n if let Some(setup) = app.value_of_lossy(\"setup\") {\n\n settings.set_setup(setup.to_string());\n\n info!(\"setup {:?}\", setup);\n\n }\n\n if let Some(env) = app.value_of_lossy(\"environment\") {\n\n settings.set_env(env.to_string());\n\n info!(\"env {:?}\", env);\n\n }\n\n settings\n\n}\n\n\n\nimpl From<&Cfg> for Settings {\n\n fn from(cfg: &Cfg) -> Self {\n\n if let Ok(current_project_ref) = cfg.current_project() {\n\n let mut settings = Settings::new();\n\n\n\n // Remove env is not exists.\n", "file_path": "src/cli/settings.rs", "rank": 91, "score": 101423.54924461106 }, { "content": "#[test]\n\nfn cmd_new_duplicate_cross_public_private() {\n\n let mut e = init(\"cmd_env_new_public_with_sync\");\n\n e.add_file(PRIVATE_ENV_DEV_FILE, \"\");\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n \"#,\n\n );\n\n\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n\n r#\"\n\nprojects:\n\n - file: {file}\n\n current:\n\n setup: setup_1\n", "file_path": "tests/new.rs", "rank": 92, "score": 100655.27647211048 }, { "content": "pub trait Generate {\n\n fn generate<AV, V>(&self, array_vars: AV, vars: V) -> Result<String>\n\n where\n\n AV: Deref<Target = ArrayVars>,\n\n V: Deref<Target = Vars>;\n\n\n\n fn update_local_setup_cfg(&self, local_setup_cfg: &mut LocalSetupCfg) -> Result<()>;\n\n}\n\n\n\n#[derive(EnumString, AsRefStr, EnumIter, EnumCount, Debug)]\n\n#[strum(serialize_all = \"snake_case\")]\n\npub enum Kind {\n\n #[strum(serialize = \"sh\", props(deserialize = \"sh\"))]\n\n Sh(ShScript),\n\n #[strum(serialize = \"bash\", props(deserialize = \"bash\"))]\n\n Bash(BashScript),\n\n}\n\n\n\nimpl Generate for Kind {\n\n fn generate<AV, V>(&self, array_vars: AV, vars: V) -> Result<String>\n", "file_path": "src/run_file/kind/mod.rs", "rank": 93, "score": 100029.06550808737 }, { "content": "fn generate_empty_workflow(app: &ArgMatches, generate_settings: &GenerateSettings) -> Result<()> {\n\n let mut cfg = get_cfg()?;\n\n let setup_name: String = generate_settings.setup_name.clone();\n\n let target_directory: PathBuf = app\n\n .value_of(\"target_directory\")\n\n .unwrap_or(setup_name.as_str())\n\n .into();\n\n let kind_file = app.value_of(\"kind\").context(\"kind of file is required\")?;\n\n let public_env_directory = app.value_of(\"public_env_directory\");\n\n let env_name = app.value_of(\"env_name\").unwrap().to_string();\n\n let setup_file = {\n\n let mut setup_file: PathBuf = app.value_of(\"file\").unwrap_or(\"run.sh\").into();\n\n if app.is_present(\"target_directory\") {\n\n setup_file = target_directory.join(setup_file);\n\n }\n\n setup_file\n\n };\n\n let private = app.is_present(\"private\");\n\n\n\n let mut local_setup_cfg = LocalSetupCfg::new(setup_name.clone(), setup_file.clone());\n", "file_path": "src/cli/commands/generate.rs", "rank": 94, "score": 99842.12056544007 }, { "content": "pub trait ToStringEnumConfirm {\n\n fn to_string(&self) -> String;\n\n}\n\n\n\nimpl<E> ToStringEnumConfirm for Vec<E>\n\nwhere\n\n E: EnumConfirm,\n\n{\n\n fn to_string(&self) -> String {\n\n let mut buf = String::new();\n\n write!(&mut buf, \"[\").unwrap();\n\n for (i, e) in self.iter().enumerate() {\n\n if i > 0 {\n\n write!(&mut buf, \",\").unwrap();\n\n }\n\n write!(&mut buf, \"{}\", e.to_char().to_string().bold()).unwrap();\n\n }\n\n write!(&mut buf, \"]\").unwrap();\n\n buf\n\n }\n", "file_path": "src/cli/terminal/confirm.rs", "rank": 95, "score": 99480.48761026307 }, { "content": "type GlobalDir = PathBuf;\n\n\n", "file_path": "src/cli/cfg.rs", "rank": 96, "score": 99111.14226238294 }, { "content": "type LocalDir = PathBuf;\n", "file_path": "src/cli/cfg.rs", "rank": 97, "score": 99111.14226238294 }, { "content": "pub fn init<L: AsRef<str>>(label: L) -> IntegrationTestEnvironment {\n\n let mut e = IntegrationTestEnvironment::new(label);\n\n e.add_dir(HOME_DIR);\n\n e.add_dir(PROJECT_DIR);\n\n e.add_dir(PRIVATE_ENV_DIR);\n\n e.add_dir(TMP_DIR);\n\n e.set_cfg_command_callback(|root_path, mut command| {\n\n command.current_dir(root_path.join(PROJECT_DIR));\n\n command.env(\"NO_COLOR\", \"1\");\n\n command.env(\"HOME\", root_path.join(HOME_DIR));\n\n\n\n #[cfg(unix)]\n\n command.env(\"TMPDIR\", root_path.join(TMP_DIR));\n\n\n\n #[cfg(windows)]\n\n command.env(\"TMP\", &root_path.join(TMP_DIR));\n\n\n\n command\n\n });\n\n e\n\n}\n", "file_path": "tests/test_utils.rs", "rank": 98, "score": 98446.92322732153 }, { "content": "#[test]\n\nfn cmd_rename_with_use() {\n\n let mut e = init(\"cmd_rename_with_use\");\n\n\n\n e.add_file(\n\n PROJECT_CFG_FILE,\n\n r#\"\n\nsetups:\n\n setup_1:\n\n file: run.sh\n\n array_vars: {}\n\n \"#,\n\n );\n\n e.add_file(\n\n HOME_CFG_FILE,\n\n format!(\n\n r#\"---\n\nprojects:\n\n - file: {file}\n\n current:\n\n setup: setup_1\n", "file_path": "tests/rename.rs", "rank": 99, "score": 96755.04713489783 } ]
Rust
src/repl.rs
wylee/feint
2ad7c43ae575684859996da2ce560168c82140d3
use std::path::Path; use rustyline::error::ReadlineError; use crate::compiler::CompilationErrKind; use crate::parser::ParseErrKind; use crate::result::ExitResult; use crate::scanner::ScanErrKind; use crate::util::Location; use crate::vm::{execute, execute_text, Inst, RuntimeErrKind, VMState, VM}; pub fn run(history_path: Option<&Path>, dis: bool, debug: bool) -> ExitResult { let mut repl = Repl::new(history_path, VM::default(), dis, debug); repl.run() } struct Repl<'a> { reader: rustyline::Editor<()>, history_path: Option<&'a Path>, vm: VM, dis: bool, debug: bool, } impl<'a> Repl<'a> { fn new(history_path: Option<&'a Path>, vm: VM, dis: bool, debug: bool) -> Self { Repl { reader: rustyline::Editor::<()>::new(), history_path, vm, dis, debug } } fn run(&mut self) -> ExitResult { println!("Welcome to the FeInt REPL (read/eval/print loop)"); println!("Type a line of code, then hit Enter to evaluate it"); self.load_history(); println!("Type .exit or .quit to exit"); loop { match self.read_line("→ ", true) { Ok(None) => { () } Ok(Some(input)) => { match self.eval(input.as_str(), false) { Some(result) => { self.vm.halt(); break result; } None => (), } } Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => { self.vm.halt(); break Ok(None); } Err(err) => { self.vm.halt(); break Err((1, format!("Could not read line: {}", err))); } } } } fn read_line( &mut self, prompt: &str, trim_blank: bool, ) -> Result<Option<String>, ReadlineError> { match self.reader.readline(prompt) { Ok(input) if trim_blank && input.trim().len() == 0 => Ok(None), Ok(input) => Ok(Some(input)), Err(err) => Err(err), } } fn eval(&mut self, text: &str, bail: bool) -> Option<ExitResult> { self.add_history_entry(text); let result = match text.trim() { "?" | ".help" => { eprintln!("{:=>72}", ""); eprintln!("FeInt Help"); eprintln!("{:->72}", ""); eprintln!(".help -> show help"); eprintln!(".exit -> exit"); eprintln!(".stack -> show VM stack (top first)"); eprintln!("{:=>72}", ""); return None; } ".exit" | ".quit" => return Some(Ok(None)), ".stack" => { self.vm.display_stack(); return None; } _ => execute_text(&mut self.vm, text, self.dis, self.debug), }; if let Ok(vm_state) = result { let var = "_"; let mut instructions = vec![Inst::AssignVar(var.to_owned())]; if let Some(&index) = self.vm.peek() { if index != 0 { instructions.push(Inst::Print); } } if let Err(err) = execute(&mut self.vm, instructions, false, false) { if let RuntimeErrKind::NotEnoughValuesOnStack(_) = err.kind { let instructions = vec![Inst::Push(0), Inst::AssignVar(var.to_owned())]; if let Err(err) = execute(&mut self.vm, instructions, false, false) { eprintln!( "ERROR: Could not assign _ to top of stack or to nil:\n{}", err ); } } } return self.vm_state_to_exit_result(vm_state); } let err = result.unwrap_err(); if self.handle_execution_err(err.kind, bail) { let mut input = text.to_owned(); let mut blank_line_count = 0; loop { match self.read_line("+ ", false) { Ok(None) => unreachable!(), Ok(Some(new_input)) if new_input == "" => { input.push('\n'); if blank_line_count > 0 { break self.eval(input.as_str(), true); } blank_line_count += 1; } Ok(Some(new_input)) => { input.push('\n'); input.push_str(new_input.as_str()); if blank_line_count > 0 { break self.eval(input.as_str(), true); } blank_line_count = 0; } Err(err) => break Some(Err((2, format!("{}", err)))), } } } else { None } } fn vm_state_to_exit_result(&self, vm_state: VMState) -> Option<ExitResult> { match vm_state { VMState::Idle => None, VMState::Halted(0) => None, VMState::Halted(code) => { Some(Err((code, format!("Halted abnormally: {}", code)))) } } } fn handle_execution_err(&mut self, kind: RuntimeErrKind, bail: bool) -> bool { let message = match kind { RuntimeErrKind::CompilationError(err) => { return self.handle_compilation_err(err.kind, bail); } RuntimeErrKind::ParseError(err) => { return self.handle_parse_err(err.kind, bail); } RuntimeErrKind::TypeError(message) => { format!("{}", message) } err => { format!("Unhandled execution error: {:?}", err) } }; eprintln!("{}", message); false } fn handle_compilation_err(&mut self, kind: CompilationErrKind, bail: bool) -> bool { let message = match kind { err => format!("Unhandled compilation error: {:?}", err), }; eprintln!("{}", message); false } fn handle_parse_err(&mut self, kind: ParseErrKind, bail: bool) -> bool { match kind { ParseErrKind::ScanErr(err) => { return self.handle_scan_err(err.kind, err.location, bail); } ParseErrKind::UnexpectedToken(token) => { let loc = token.start; eprintln!("{: >width$}^", "", width = loc.col + 1); eprintln!("Parse error: unhandled token at {}: {:?}", loc, token.token); } ParseErrKind::ExpectedBlock(loc) => { if bail { eprintln!("{: >width$}^", "", width = loc.col + 1); eprintln!("Parse error: expected indented block at {}", loc); } else { return true; } } err => { eprintln!("Unhandled parse error: {:?}", err); } } false } fn handle_scan_err( &mut self, kind: ScanErrKind, loc: Location, bail: bool, ) -> bool { match kind { ScanErrKind::UnexpectedCharacter(c) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!( "Syntax error: unexpected character at column {}: '{}'", col, c ); } ScanErrKind::UnmatchedOpeningBracket(_) | ScanErrKind::UnterminatedString(_) => { return true; } ScanErrKind::InvalidIndent(num_spaces) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: invalid indent with {} spaces (should be a multiple of 4)", num_spaces); } ScanErrKind::UnexpectedIndent(_) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: unexpected indent"); } ScanErrKind::WhitespaceAfterIndent | ScanErrKind::UnexpectedWhitespace => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: unexpected whitespace"); } err => { eprintln!("Unhandled scan error at {}: {:?}", loc, err); } } false } fn load_history(&mut self) { match self.history_path { Some(path) => { println!("REPL history will be saved to {}", path.to_string_lossy()); match self.reader.load_history(path) { Ok(_) => (), Err(err) => eprintln!("Could not load REPL history: {}", err), } } None => (), } } fn add_history_entry(&mut self, input: &str) { match self.history_path { Some(path) => { self.reader.add_history_entry(input); match self.reader.save_history(path) { Ok(_) => (), Err(err) => eprintln!("Could not save REPL history: {}", err), } } None => (), } } } #[cfg(test)] mod tests { use super::*; #[test] fn eval_empty() { eval(""); } #[test] fn eval_arithmetic() { eval("2 * (3 + 4)"); } #[test] fn eval_string() { eval("\"abc\""); } #[test] fn eval_multiline_string() { eval("\"a \nb c\""); } fn new<'a>() -> Repl<'a> { let vm = VM::default(); Repl::new(None, vm, false, false) } fn eval(input: &str) { let mut runner = new(); match runner.eval(input, true) { Some(Ok(string)) => assert!(false), Some(Err((code, string))) => assert!(false), None => assert!(true), } } }
use std::path::Path; use rustyline::error::ReadlineError; use crate::compiler::CompilationErrKind; use crate::parser::ParseErrKind; use crate::result::ExitResult; use crate::scanner::ScanErrKind; use crate::util::Location; use crate::vm::{execute, execute_text, Inst, RuntimeErrKind, VMState, VM}; pub fn run(history_path: Option<&Path>, dis: bool, debug: bool) -> ExitResult { let mut repl = Repl::new(history_path, VM::default(), dis, debug); repl.run() } struct Repl<'a> { reader: rustyline::Editor<()>, history_path: Option<&'a Path>, vm: VM, dis: bool, debug: bool, } impl<'a> Repl<'a> { fn new(history_path: Option<&'a Path>, vm: VM, dis: bool, debug: bool) -> Self { Repl { reader: rustyline::Editor::<()>::new(), history_path, vm, dis, debug } } fn run(&mut self) -> ExitResult { println!("Welcome to the FeInt REPL (read/eval/print loop)"); println!("Type a line of code, then hit Enter to evaluate it"); self.load_history(); println!("Type .exit or .quit to exit"); loop { match self.read_line("→ ", true) { Ok(None) => { () } Ok(Some(input)) => { match self.eval(input.as_str(), false) { Some(result) => { self.vm.halt(); break result; } None => (), } } Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => { self.vm.halt(); break Ok(None); } Err(err) => { self.vm.halt(); break Err((1, format!("Could not read line: {}", err))); } } } } fn read_line( &mut self, prompt: &str, trim_blank: bool, ) -> Result<Option<String>, ReadlineError> { match self.reader.readline(prompt) { Ok(input) if trim_blank && input.trim().len() == 0 => Ok(None), Ok(input) => Ok(Some(input)), Err(err) => Err(err), } } fn eval(&mut self, text: &str, bail: bool) -> Option<ExitResult> { self.add_history_entry(text); let result = match text.trim() { "?" | ".help" => { eprintln!("{:=>72}", ""); eprintln!("FeInt Help"); eprintln!("{:->72}", ""); eprintln!(".help -> show help"); eprintln!(".exit -> exit"); eprintln!(".stack -> show VM stack (top first)"); eprintln!("{:=>72}", ""); return None; } ".exit" | ".quit" => return Some(Ok(None)), ".stack" => { self.vm.display_stack(); return None; } _ => execute_text(&mut self.vm, text, self.dis, self.debug), }; if let Ok(vm_state) = result { let var = "_"; let mut instructions = vec![Inst::AssignVar(var.to_owned())]; if let Some(&index) = self.vm.peek() { if index != 0 { instructions.push(Inst::Print); } } if let Err(err) = execute(&mut self.vm, instructions, false, false) { if let RuntimeErrKind::NotEnoughValuesOnStack(_) = err.kind { let instructions = vec![Inst::Push(0), Inst::AssignVar(var.to_owned())]; if let Err(err) = execute(&mut self.vm, instructions, false, false) { eprintln!( "ERROR: Could not assign _ to top of stack or to nil:\n{}", err ); } } } return self.vm_state_to_exit_result(vm_state); } let err = result.unwrap_err(); if self.handle_execution_err(err.kind, bail) { let mut input = text.to_owned(); let mut blank_line_count = 0; loop { match self.read_line("+ ", false) { Ok(None) => unreachable!(), Ok(Some(new_input)) if new_input == "" => { input.push('\n'); if blank_line_count > 0 { break self.eval(input.as_str(), true); } blank_line_count += 1; } Ok(Some(new_input)) => { input.push('\n'); input.push_str(new_input.as_str()); if blank_line_count > 0 { break self.eval(input.as_str(), true); } blank_line_count = 0; } Err(err) => break Some(Err((2, format!("{}", err)))), } } } else { None } } fn vm_state_to_exit_result(&self, vm_state: VMState) -> Option<ExitResult> { match vm_state { VMState::Idle => None, VMState::Halted(0) => None, VMState::Halted(code) => { Some(Err((code, format!("Halted abnormally: {}", code)))) } } } fn handle_execution_err(&mut self, kind: RuntimeErrKind, bail: bool) -> bool { let message = match kind { RuntimeErrKind::CompilationError(err) => { return self.handle_compilation_err(err.kind, bail); } RuntimeErrKind::ParseError(err) => { return self.handle_parse_err(err.kind, bail); } RuntimeErrKind::TypeError(message) => { format!("{}", message) } err => { format!("Unhandled execution error: {:?}", err) } }; eprintln!("{}", message); false } fn handle_compilation_err(&mut self, kind: Compilatio
fn handle_parse_err(&mut self, kind: ParseErrKind, bail: bool) -> bool { match kind { ParseErrKind::ScanErr(err) => { return self.handle_scan_err(err.kind, err.location, bail); } ParseErrKind::UnexpectedToken(token) => { let loc = token.start; eprintln!("{: >width$}^", "", width = loc.col + 1); eprintln!("Parse error: unhandled token at {}: {:?}", loc, token.token); } ParseErrKind::ExpectedBlock(loc) => { if bail { eprintln!("{: >width$}^", "", width = loc.col + 1); eprintln!("Parse error: expected indented block at {}", loc); } else { return true; } } err => { eprintln!("Unhandled parse error: {:?}", err); } } false } fn handle_scan_err( &mut self, kind: ScanErrKind, loc: Location, bail: bool, ) -> bool { match kind { ScanErrKind::UnexpectedCharacter(c) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!( "Syntax error: unexpected character at column {}: '{}'", col, c ); } ScanErrKind::UnmatchedOpeningBracket(_) | ScanErrKind::UnterminatedString(_) => { return true; } ScanErrKind::InvalidIndent(num_spaces) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: invalid indent with {} spaces (should be a multiple of 4)", num_spaces); } ScanErrKind::UnexpectedIndent(_) => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: unexpected indent"); } ScanErrKind::WhitespaceAfterIndent | ScanErrKind::UnexpectedWhitespace => { let col = loc.col; eprintln!("{: >width$}^", "", width = col + 1); eprintln!("Syntax error: unexpected whitespace"); } err => { eprintln!("Unhandled scan error at {}: {:?}", loc, err); } } false } fn load_history(&mut self) { match self.history_path { Some(path) => { println!("REPL history will be saved to {}", path.to_string_lossy()); match self.reader.load_history(path) { Ok(_) => (), Err(err) => eprintln!("Could not load REPL history: {}", err), } } None => (), } } fn add_history_entry(&mut self, input: &str) { match self.history_path { Some(path) => { self.reader.add_history_entry(input); match self.reader.save_history(path) { Ok(_) => (), Err(err) => eprintln!("Could not save REPL history: {}", err), } } None => (), } } } #[cfg(test)] mod tests { use super::*; #[test] fn eval_empty() { eval(""); } #[test] fn eval_arithmetic() { eval("2 * (3 + 4)"); } #[test] fn eval_string() { eval("\"abc\""); } #[test] fn eval_multiline_string() { eval("\"a \nb c\""); } fn new<'a>() -> Repl<'a> { let vm = VM::default(); Repl::new(None, vm, false, false) } fn eval(input: &str) { let mut runner = new(); match runner.eval(input, true) { Some(Ok(string)) => assert!(false), Some(Err((code, string))) => assert!(false), None => assert!(true), } } }
nErrKind, bail: bool) -> bool { let message = match kind { err => format!("Unhandled compilation error: {:?}", err), }; eprintln!("{}", message); false }
function_block-function_prefixed
[ { "content": "/// Execute source text.\n\npub fn execute_text(vm: &mut VM, text: &str, dis: bool, debug: bool) -> ExeResult {\n\n execute_parse_result(vm, parser::parse_text(text, debug), dis, debug)\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 0, "score": 421688.4762652945 }, { "content": "/// Execute source from file.\n\npub fn execute_file(vm: &mut VM, path: &str, dis: bool, debug: bool) -> ExeResult {\n\n execute_parse_result(vm, parser::parse_file(path, debug), dis, debug)\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 1, "score": 396211.1683872788 }, { "content": "/// Run text source.\n\npub fn run_text(text: &str, dis: bool, debug: bool) -> ExitResult {\n\n let mut vm = VM::default();\n\n let mut runner = Runner::new(debug);\n\n runner.exit(vm::execute_text(&mut vm, text, dis, debug))\n\n}\n\n\n", "file_path": "src/run.rs", "rank": 2, "score": 352029.9792518962 }, { "content": "pub fn execute(vm: &mut VM, instructions: Chunk, dis: bool, debug: bool) -> ExeResult {\n\n let result = if cfg!(debug_assertions) {\n\n if dis {\n\n eprintln!(\"{:=<72}\", \"INSTRUCTIONS \");\n\n } else if debug {\n\n eprintln!(\"{:=<72}\", \"OUTPUT \");\n\n }\n\n vm.execute(instructions, dis)\n\n } else if dis {\n\n eprintln!(\"{:=<72}\", \"INSTRUCTIONS \");\n\n let result = vm.dis_list(&instructions);\n\n eprintln!(\"NOTE: Full disassembly is only available in debug builds\");\n\n result\n\n } else {\n\n if debug {\n\n eprintln!(\"{:=<72}\", \"OUTPUT \");\n\n }\n\n vm.execute(instructions, false)\n\n };\n\n if debug {\n", "file_path": "src/vm/vm.rs", "rank": 3, "score": 347083.6944975556 }, { "content": "/// Execute source from stdin.\n\npub fn execute_stdin(vm: &mut VM, dis: bool, debug: bool) -> ExeResult {\n\n execute_parse_result(vm, parser::parse_stdin(debug), dis, debug)\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 5, "score": 334925.0324108298 }, { "content": "/// Run source from file.\n\npub fn run_file(file_path: &str, dis: bool, debug: bool) -> ExitResult {\n\n let mut vm = VM::default();\n\n let mut runner = Runner::new(debug);\n\n runner.exit(vm::execute_file(&mut vm, file_path, dis, debug))\n\n}\n\n\n", "file_path": "src/run.rs", "rank": 6, "score": 318866.65218092536 }, { "content": "/// Scan the text into tokens, parse the tokens, and return the\n\n/// resulting AST or error.\n\npub fn parse_text(text: &str, debug: bool) -> ParseResult {\n\n let scanner = Scanner::<Cursor<&str>>::from_text(text);\n\n let mut parser = Parser::new(scanner.into_iter());\n\n handle_result(parser.parse(), debug)\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 7, "score": 305968.3243440103 }, { "content": "/// Create a scanner from the specified text, scan the text, and return\n\n/// the resulting tokens or error.\n\npub fn scan_text(text: &str, debug: bool) -> ScanTokensResult {\n\n let scanner = Scanner::<Cursor<&str>>::from_text(text);\n\n handle_result(scanner.collect(), debug)\n\n}\n\n\n", "file_path": "src/scanner/scanner.rs", "rank": 8, "score": 300782.6822665151 }, { "content": "/// Scan the file into tokens, parse the tokens, and return the\n\n/// resulting AST or error.\n\npub fn parse_file(file_path: &str, debug: bool) -> ParseResult {\n\n let result = Scanner::<BufReader<File>>::from_file(file_path);\n\n let scanner = match result {\n\n Ok(scanner) => scanner,\n\n Err(err) => {\n\n return Err(ParseErr::new(ParseErrKind::CouldNotOpenSourceFile(\n\n file_path.to_string(),\n\n err.to_string(),\n\n )));\n\n }\n\n };\n\n let mut parser = Parser::new(scanner.into_iter());\n\n handle_result(parser.parse(), debug)\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 9, "score": 271436.997828218 }, { "content": "/// Read and run source from stdin.\n\npub fn run_stdin(dis: bool, debug: bool) -> ExitResult {\n\n let mut vm = VM::default();\n\n let mut runner = Runner::new(debug);\n\n runner.exit(vm::execute_stdin(&mut vm, dis, debug))\n\n}\n\n\n", "file_path": "src/run.rs", "rank": 10, "score": 271076.4340278512 }, { "content": "/// Create a scanner from the specified file, scan its text, and return\n\n/// the resulting tokens or error.\n\npub fn scan_file(file_path: &str, debug: bool) -> ScanTokensResult {\n\n let result = Scanner::<BufReader<File>>::from_file(file_path);\n\n let scanner = match result {\n\n Ok(scanner) => scanner,\n\n Err(err) => {\n\n return Err(ScanErr::new(\n\n ScanErrKind::CouldNotOpenSourceFile(\n\n file_path.to_string(),\n\n err.to_string(),\n\n ),\n\n Location::new(0, 0),\n\n ));\n\n }\n\n };\n\n handle_result(scanner.collect(), debug)\n\n}\n\n\n", "file_path": "src/scanner/scanner.rs", "rank": 11, "score": 266488.0589334557 }, { "content": "/// Compile AST to VM instructions.\n\npub fn compile(vm: &mut VM, program: ast::Program, _debug: bool) -> CompilationResult {\n\n let mut visitor = Visitor::new(&mut vm.ctx);\n\n visitor.visit_program(program)?;\n\n Ok(visitor.instructions)\n\n}\n\n\n\n// Visitor -------------------------------------------------------------\n\n\n", "file_path": "src/compiler/compiler.rs", "rank": 12, "score": 264753.0561235745 }, { "content": "/// Scan text and assume success, returning tokens in unwrapped form.\n\n/// Panic on error. Mainly useful for testing.\n\npub fn scan_optimistic(text: &str, debug: bool) -> Vec<TokenWithLocation> {\n\n match scan_text(text, debug) {\n\n Ok(tokens) => tokens,\n\n Err(err) => panic!(\"Scan failed unexpectedly: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "src/scanner/scanner.rs", "rank": 13, "score": 259034.56015926518 }, { "content": "pub fn scan(string: &str) -> Result<Vec<Token>, FormatStringErr> {\n\n let mut tokens: Vec<Token> = Vec::new();\n\n let mut chars = string.chars();\n\n let mut peek_chars = string.chars();\n\n\n\n // Current position in format string\n\n let mut pos = 0usize;\n\n\n\n // Current non-group part\n\n let mut part = String::with_capacity(32);\n\n\n\n // Current expression inside group\n\n let mut expr = String::with_capacity(32);\n\n\n\n peek_chars.next();\n\n\n\n while let Some(c) = chars.next() {\n\n let d = peek_chars.next();\n\n\n\n match (c, d) {\n", "file_path": "src/format.rs", "rank": 14, "score": 238990.56224700797 }, { "content": "/// Scan text from stdin into tokens, parse the tokens, and return the\n\n/// resulting AST or error.\n\npub fn parse_stdin(debug: bool) -> ParseResult {\n\n let scanner = Scanner::<BufReader<io::Stdin>>::from_stdin();\n\n let mut parser = Parser::new(scanner.into_iter());\n\n handle_result(parser.parse(), debug)\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 15, "score": 218631.76353118633 }, { "content": "/// Create a scanner from stdin, scan the text into tokens, and return\n\n/// the resulting tokens or error.\n\npub fn scan_stdin(debug: bool) -> ScanTokensResult {\n\n let scanner = Scanner::<BufReader<io::Stdin>>::from_stdin();\n\n handle_result(scanner.collect(), debug)\n\n}\n\n\n", "file_path": "src/scanner/scanner.rs", "rank": 16, "score": 214456.28596376476 }, { "content": "/// Execute parse result.\n\npub fn execute_parse_result(\n\n vm: &mut VM,\n\n result: ParseResult,\n\n dis: bool,\n\n debug: bool,\n\n) -> ExeResult {\n\n match result {\n\n Ok(program) => execute_program(vm, program, dis, debug),\n\n Err(err) => Err(RuntimeErr::new(RuntimeErrKind::ParseError(err))),\n\n }\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 17, "score": 208366.648227803 }, { "content": "fn handle_result(result: ParseResult, debug: bool) -> ParseResult {\n\n result.map(|program| {\n\n if debug {\n\n eprintln!(\"{:=<72}\", \"AST \");\n\n eprintln!(\"{:?}\", program);\n\n };\n\n program\n\n })\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 18, "score": 199091.54033668627 }, { "content": "fn handle_result(result: ScanTokensResult, debug: bool) -> ScanTokensResult {\n\n result.map(|tokens| {\n\n if debug {\n\n for token in tokens.iter() {\n\n eprintln!(\"{:?}\", token);\n\n }\n\n }\n\n tokens\n\n })\n\n}\n\n\n\npub struct Scanner<T: BufRead> {\n\n /// This is the source code that's being scanned. T can be anything\n\n /// that implements the BufRead trait (e.g., a Cursor wrapping some\n\n /// text or a BufReader wrapping an open file).\n\n source: Source<T>,\n\n /// Temporary storage for tokens. This is mainly needed to handle\n\n /// the complexity of indents, because there are cases where\n\n /// multiple tokens will need to be emitted.\n\n queue: VecDeque<TokenWithLocation>,\n", "file_path": "src/scanner/scanner.rs", "rank": 19, "score": 193704.83754752084 }, { "content": "/// Parse tokens and return the resulting AST or error.\n\npub fn parse_tokens(tokens: Vec<TokenWithLocation>, debug: bool) -> ParseResult {\n\n let scanner: Vec<ScanResult> = vec![];\n\n let mut parser = Parser::new(scanner.into_iter());\n\n parser.lookahead_queue.extend(tokens);\n\n handle_result(parser.parse(), debug)\n\n}\n\n\n", "file_path": "src/parser/parser.rs", "rank": 20, "score": 192171.7962325702 }, { "content": "/// Exit with non-zero and error message.\n\nfn error_exit(code: i32, message: String) {\n\n eprintln!(\"{}\", message);\n\n process::exit(code);\n\n}\n", "file_path": "src/main.rs", "rank": 21, "score": 190455.70292998664 }, { "content": "/// Create a new VM and execute AST program.\n\npub fn execute_program(\n\n vm: &mut VM,\n\n program: ast::Program,\n\n dis: bool,\n\n debug: bool,\n\n) -> ExeResult {\n\n match compile(vm, program, debug) {\n\n Ok(instructions) => execute(vm, instructions, dis, debug),\n\n Err(err) => Err(RuntimeErr::new(RuntimeErrKind::CompilationError(err))),\n\n }\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 22, "score": 180804.98131867248 }, { "content": "/// Return true if the token represents a right-associate operator.\n\npub fn is_right_associative(token: &Token) -> bool {\n\n match token {\n\n Token::Caret => true, // a ^ b (exponentiation)\n\n Token::Equal => true, // a = b = c (assignment)\n\n _ => false,\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n", "file_path": "src/parser/precedence.rs", "rank": 23, "score": 142950.9525179202 }, { "content": "/// Exit 0 with optional message.\n\nfn exit(message: Option<String>) {\n\n if message.is_some() {\n\n println!(\"{}\", message.unwrap());\n\n }\n\n process::exit(0);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 136725.0458885074 }, { "content": "/// Compare Int and Float for equality.\n\npub fn eq_int_float(int: &Int, float: &Float) -> bool {\n\n let float_val = float.value();\n\n if float_val.fract() == 0.0 {\n\n let int_val = int.value();\n\n let float_as_int = BigInt::from_f64(*float_val).unwrap();\n\n float_as_int == *int_val\n\n } else {\n\n false\n\n }\n\n}\n", "file_path": "src/types/util.rs", "rank": 26, "score": 129791.27003812292 }, { "content": "#[test]\n\nfn scan_unexpected_indent_on_first_line() {\n\n let source = \" abc = 1\";\n\n let result = scan_text(source, true);\n\n assert!(result.is_err());\n\n match result.unwrap_err() {\n\n ScanErr { kind: ScanErrKind::UnexpectedIndent(1), location } => {\n\n assert_eq!(location.line, 1);\n\n assert_eq!(location.col, 1);\n\n }\n\n err => assert!(false, \"Unexpected error: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 27, "score": 110878.89683645661 }, { "content": "type VisitResult = Result<(), CompilationErr>;\n\n\n", "file_path": "src/compiler/compiler.rs", "rank": 28, "score": 95692.43069233844 }, { "content": "/// Get binary precedence of token.\n\npub fn get_binary_precedence(token: &Token) -> u8 {\n\n get_operator_precedence(token).1\n\n}\n\n\n", "file_path": "src/parser/precedence.rs", "rank": 29, "score": 91134.42488299799 }, { "content": "/// Get unary precedence of token.\n\npub fn get_unary_precedence(token: &Token) -> u8 {\n\n get_operator_precedence(token).0\n\n}\n\n\n", "file_path": "src/parser/precedence.rs", "rank": 30, "score": 91134.42488299799 }, { "content": "/// Get the default history path, which is either ~/.feint_history or,\n\n/// if the user's home directory can't be located, ./.feint_history.\n\nfn default_history_path() -> PathBuf {\n\n let home = dirs::home_dir();\n\n let base_path = home.unwrap_or_default();\n\n let history_path_buf = base_path.join(\".feint_history\");\n\n history_path_buf\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 89927.22161858415 }, { "content": "/// Return the unary *and* binary precedence of the specified token,\n\n/// which may be 0 for either or both. 0 indicates that the token is\n\n/// not an operator of the respective type.\n\n///\n\n/// TODO: I'm not sure this is the best way to define this mapping.\n\n/// Would a static hash map be better? One issue with that is\n\n/// that Token can't be used as a hash map key, since it's not\n\n/// hashable. That could probably be \"fixed\", but it would be\n\n/// more complicated than this.\n\npub fn get_operator_precedence(token: &Token) -> (u8, u8) {\n\n match token {\n\n Token::Comma // a, b\n\n | Token::Equal => (0, 1), // a = b\n\n \n\n // TODO: This was added for use in ternary expressions, but\n\n // it doesn't work with if/else blocks. Maybe just use\n\n // `cond ? yes : no`?\n\n // | Token::If => (0, 2), // if ...\n\n \n\n | Token::Or => (0, 3), // a || b\n\n | Token::And => (0, 4), // a && b\n\n\n\n | Token::Is // a == b\n\n | Token::EqualEqual // a == b\n\n | Token::NotEqual => (0, 5), // a != b\n\n \n\n | Token::Plus // +a, a + b\n\n | Token::Minus => (9, 6), // -a, a - b\n\n \n", "file_path": "src/parser/precedence.rs", "rank": 32, "score": 87617.07148513604 }, { "content": "#[test]\n\nfn parse_simple_assignment() {\n\n // R\n\n // |\n\n // n=\n\n // |\n\n // 1\n\n let result = parse_text(\"n = 1\", true);\n\n if let Ok(program) = result {\n\n assert_eq!(program.statements.len(), 1);\n\n // TODO: More checks\n\n } else {\n\n assert!(false, \"Program failed to parse: {:?}\", result);\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 33, "score": 83914.59172762648 }, { "content": "#[test]\n\nfn parse_assign_to_addition() {\n\n let result = parse_text(\"n = 1 + 2\", true);\n\n if let Ok(program) = result {\n\n assert_eq!(program.statements.len(), 1);\n\n eprintln!(\"{:?}\", program);\n\n // TODO: More checks\n\n } else {\n\n assert!(false, \"Program failed to parse: {:?}\", result);\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 34, "score": 83914.59172762648 }, { "content": "struct Parser<I: Iterator<Item = ScanResult>> {\n\n current_token: Option<TokenWithLocation>,\n\n token_stream: Peekable<I>,\n\n lookahead_queue: VecDeque<TokenWithLocation>,\n\n}\n\n\n\nimpl<I: Iterator<Item = ScanResult>> Parser<I> {\n\n fn new(token_iter: I) -> Self {\n\n Self {\n\n current_token: None,\n\n token_stream: token_iter.peekable(),\n\n lookahead_queue: VecDeque::new(),\n\n }\n\n }\n\n\n\n // Parse entry point -----------------------------------------------\n\n\n\n /// A program is a list of statements.\n\n fn parse(&mut self) -> ParseResult {\n\n let statements = self.statements()?;\n", "file_path": "src/parser/parser.rs", "rank": 35, "score": 83271.9516345869 }, { "content": " DeclareVar(String),\n\n AssignVar(String),\n\n LoadVar(String),\n\n ScopeStart,\n\n ScopeEnd(usize),\n\n Print, // Print value at top of stack\n\n Return,\n\n Halt(i32),\n\n\n\n InternalError(String),\n\n}\n", "file_path": "src/vm/inst.rs", "rank": 36, "score": 73108.84585764782 }, { "content": "use crate::util::{BinaryOperator, UnaryOperator};\n\n\n\npub type Chunk = Vec<Inst>;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Inst {\n\n NoOp,\n\n Push(usize),\n\n Pop,\n\n\n\n // Jump unconditionally\n\n Jump(usize),\n\n\n\n // If top of stack is true, jump to first address\n\n // Otherwise, jump to second address\n\n JumpIfElse(usize, usize),\n\n\n\n UnaryOp(UnaryOperator),\n\n BinaryOp(BinaryOperator),\n\n LoadConst(usize),\n", "file_path": "src/vm/inst.rs", "rank": 37, "score": 73107.94001677148 }, { "content": "pub struct RuntimeErr {\n\n pub kind: RuntimeErrKind,\n\n}\n\n\n\nimpl RuntimeErr {\n\n pub fn new(kind: RuntimeErrKind) -> Self {\n\n Self { kind }\n\n }\n\n\n\n pub fn new_type_error<S: Into<String>>(message: S) -> Self {\n\n Self::new(RuntimeErrKind::TypeError(message.into()))\n\n }\n\n}\n\n\n\nimpl fmt::Display for RuntimeErr {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", self.kind)\n\n }\n\n}\n\n\n", "file_path": "src/vm/result.rs", "rank": 38, "score": 72609.76644296787 }, { "content": "use std::fmt;\n\nuse std::fmt::Formatter;\n\n\n\nuse crate::compiler::CompilationErr;\n\nuse crate::parser::ParseErr;\n\nuse crate::types::ObjectRef;\n\n\n\npub type ExeResult = Result<VMState, RuntimeErr>;\n\npub type RuntimeResult = Result<ObjectRef, RuntimeErr>;\n\npub type RuntimeBoolResult = Result<bool, RuntimeErr>;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum VMState {\n\n Idle,\n\n Halted(i32),\n\n}\n\n\n\n// Runtime errors ------------------------------------------------------\n\n\n\n#[derive(Debug)]\n", "file_path": "src/vm/result.rs", "rank": 39, "score": 72609.53903985002 }, { "content": "#[derive(Debug)]\n\npub enum RuntimeErrKind {\n\n EmptyStack,\n\n ObjectNotFound(usize),\n\n NotEnoughValuesOnStack(String),\n\n ParseError(ParseErr),\n\n CompilationError(CompilationErr),\n\n UnhandledInstruction(String),\n\n AttributeDoesNotExist(String),\n\n AttributeCannotBeSet(String),\n\n TypeError(String),\n\n NameError(String),\n\n SyntaxError(String),\n\n}\n\n\n\nimpl fmt::Display for RuntimeErrKind {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n", "file_path": "src/vm/result.rs", "rank": 40, "score": 72607.76228061684 }, { "content": "type ExitData = (i32, String);\n\n\n\n/// Result type used by top level runners.\n\n///\n\n/// On success, Ok(None) or OK(Some(message: String)) should be\n\n/// returned. In both cases, the program will exit with error code 0. In\n\n/// the latter case, the specified message will be printed to stdout\n\n/// just before exiting.\n\n///\n\n/// On error, Err((code: i32, message: String)) should be returned. Note\n\n/// that on error, a message is *always* required. The program will\n\n/// print the specified message to stderr and then exit with the\n\n/// specified error code.\n\npub(crate) type ExitResult = Result<Option<String>, ExitData>;\n", "file_path": "src/result.rs", "rank": 41, "score": 65858.52131795438 }, { "content": "struct Runner {\n\n debug: bool,\n\n}\n\n\n\nimpl Runner {\n\n fn new(debug: bool) -> Self {\n\n Runner { debug }\n\n }\n\n\n\n /// Take result from VM execution and return an appropriate exit\n\n /// result.\n\n fn exit(&mut self, result: ExeResult) -> ExitResult {\n\n match result {\n\n Ok(vm_state) => self.vm_state_to_exit_result(vm_state),\n\n Err(err) => self.handle_execution_err(err.kind),\n\n }\n\n }\n\n\n\n /// Convert VM state to exit result.\n\n fn vm_state_to_exit_result(&self, vm_state: VMState) -> ExitResult {\n", "file_path": "src/run.rs", "rank": 42, "score": 65015.02280085105 }, { "content": "struct Visitor<'a> {\n\n ctx: &'a mut RuntimeContext,\n\n instructions: Chunk,\n\n scope_tree: ScopeTree,\n\n}\n\n\n\nimpl<'a> Visitor<'a> {\n\n fn new(ctx: &'a mut RuntimeContext) -> Self {\n\n Self { ctx, instructions: Chunk::new(), scope_tree: ScopeTree::new() }\n\n }\n\n\n\n // Utilities -------------------------------------------------------\n\n\n\n fn err(&self, message: String) -> VisitResult {\n\n Err(CompilationErr::new(CompilationErrKind::VisitError(message)))\n\n }\n\n\n\n fn push(&mut self, inst: Inst) {\n\n self.instructions.push(inst);\n\n }\n", "file_path": "src/compiler/compiler.rs", "rank": 43, "score": 61690.71038352857 }, { "content": "a = \"b\"\n", "file_path": "python.py", "rank": 44, "score": 60178.59942772999 }, { "content": "/// Represents an instance of some type (AKA \"class\").\n\npub trait Object {\n\n fn class(&self) -> &TypeRef;\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn id(&self) -> usize {\n\n let p = self as *const Self;\n\n let p = p as *const () as usize;\n\n p\n\n }\n\n\n\n fn name(&self) -> RustString {\n\n self.class().name().to_owned()\n\n }\n\n\n\n // Unary operations ------------------------------------------------\n\n\n\n make_unary_op!(negate, \"-\", RuntimeResult);\n\n make_unary_op!(as_bool, \"!!\", RuntimeBoolResult);\n\n\n\n fn not(&self, ctx: &RuntimeContext) -> RuntimeBoolResult {\n", "file_path": "src/types/object.rs", "rank": 45, "score": 57804.56848072939 }, { "content": "/// Interpret a file if one is specified. Otherwise, run the REPL.\n\nfn main() {\n\n let app = Command::new(\"FeInt\")\n\n .version(\"0.0.0\")\n\n .arg(\n\n Arg::new(\"FILE_NAME\")\n\n .index(1)\n\n .required(false)\n\n .conflicts_with(\"code\")\n\n .help(\"Script file to run (use - to read from stdin)\"),\n\n )\n\n .arg(\n\n Arg::new(\"code\")\n\n .short('c')\n\n .long(\"code\")\n\n .required(false)\n\n .conflicts_with(\"FILE_NAME\")\n\n .takes_value(true)\n\n .help(\"Code to run\"),\n\n )\n\n .arg(\n", "file_path": "src/main.rs", "rank": 46, "score": 54372.407503401504 }, { "content": "/// Methods that aren't \"object safe\"\n\npub trait ObjectExt: Object {\n\n fn is(&self, other: &Self) -> bool {\n\n self.class().is(&other.class()) && self.id() == other.id()\n\n }\n\n}\n\n\n\nimpl<T: Object + ?Sized> ObjectExt for T {}\n\n\n\n// Display -------------------------------------------------------------\n\n\n\n/// Downcast Object to concrete type/object and display that.\n\nmacro_rules! write_instance {\n\n ( $f:ident, $a:ident, $($A:ty),+ ) => { $(\n\n if let Some(a) = $a.as_any().downcast_ref::<$A>() {\n\n return write!($f, \"{}\", a);\n\n }\n\n )+ };\n\n}\n\n\n\nmacro_rules! debug_instance {\n", "file_path": "src/types/object.rs", "rank": 47, "score": 54006.12969798061 }, { "content": "# Template Package Docs\n\n\n\nTODO!\n", "file_path": "template/docs/index.md", "rank": 48, "score": 52189.07372188714 }, { "content": "/// Check token returned by scanner against expected token.\n\nfn check_token(\n\n actual: Option<&TokenWithLocation>,\n\n expected: Token,\n\n start_line: usize,\n\n start_col: usize,\n\n end_line: usize,\n\n end_col: usize,\n\n) {\n\n let start = Location::new(start_line, start_col);\n\n let end = Location::new(end_line, end_col);\n\n assert_eq!(actual, Some(&TokenWithLocation::new(expected, start, end)));\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 49, "score": 51853.67793696339 }, { "content": "/// Compare attributes for equality. The attribute maps are first\n\n/// checked to see if they have the same number of entries. Then, the\n\n/// keys are checked to see if they're all the same. If they are, only\n\n/// then are the values checked for equality.\n\nfn attributes_equal(\n\n lhs: &Attributes,\n\n rhs: &Attributes,\n\n ctx: &RuntimeContext,\n\n) -> RuntimeBoolResult {\n\n if !(lhs.len() == rhs.len() && lhs.keys().all(|k| rhs.contains_key(k))) {\n\n return Ok(false);\n\n }\n\n for (k, v) in lhs.iter() {\n\n if !v.is_equal(rhs[k].clone(), ctx)? {\n\n return Ok(false);\n\n }\n\n }\n\n Ok(true)\n\n}\n\n\n\n// Display -------------------------------------------------------------\n\n\n\nimpl fmt::Display for ComplexObject {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/types/complex.rs", "rank": 50, "score": 51853.24103888826 }, { "content": "#[test]\n\nfn test_custom() {\n\n let ctx = RuntimeContext::default();\n\n\n\n let type_1 = Rc::new(Type::new(\"test\", \"Custom1\"));\n\n let mut obj_1 = ComplexObject::new(type_1);\n\n let value_1 = ctx.builtins.new_int(1);\n\n obj_1.set_attribute(\"value\", value_1).expect(\"Could not set attribute\");\n\n\n\n let type_2 = Rc::new(Type::new(\"test\", \"Custom2\"));\n\n let mut obj_2 = ComplexObject::new(type_2);\n\n let value_2 = ctx.builtins.new_int(1);\n\n obj_2.set_attribute(\"value\", value_2).expect(\"Could not set attribute\");\n\n\n\n // FIXME: ???\n\n assert!(obj_1.is_equal(Rc::new(obj_2), &ctx).expect(\"Could not compare objects\"))\n\n}\n", "file_path": "src/types/tests.rs", "rank": 51, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_brackets() {\n\n let source = \"\n\n\n\na = [\n\n 1,\n\n# comment\n\n 2,\n\n] # another comment\n\n\n\nb = 3\n\n\";\n\n let tokens = scan_optimistic(source, true);\n\n let mut tokens = tokens.iter();\n\n\n\n check_token(tokens.next(), Token::Ident(\"a\".to_string()), 3, 1, 3, 1);\n\n check_token(tokens.next(), Token::Equal, 3, 3, 3, 3);\n\n check_token(tokens.next(), Token::LBracket, 3, 5, 3, 5);\n\n check_token(tokens.next(), Token::Int(BigInt::from(1)), 4, 4, 4, 4);\n\n check_token(tokens.next(), Token::Comma, 4, 5, 4, 5);\n\n check_token(tokens.next(), Token::Int(BigInt::from(2)), 6, 3, 6, 3);\n\n check_token(tokens.next(), Token::Comma, 6, 4, 6, 4);\n\n check_token(tokens.next(), Token::RBracket, 7, 1, 7, 1);\n\n check_token(tokens.next(), Token::EndOfStatement, 7, 21, 7, 21);\n\n check_token(tokens.next(), Token::Ident(\"b\".to_string()), 9, 1, 9, 1);\n\n check_token(tokens.next(), Token::Equal, 9, 3, 9, 3);\n\n check_token(tokens.next(), Token::Int(BigInt::from(3)), 9, 5, 9, 5);\n\n check_token(tokens.next(), Token::EndOfStatement, 9, 6, 9, 6);\n\n assert!(tokens.next().is_none());\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 52, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_unknown() {\n\n let source = \"{\";\n\n match scan_text(source, true) {\n\n Ok(tokens) => assert!(false),\n\n Err(err) => match err {\n\n ScanErr { kind: ScanErrKind::UnexpectedCharacter(c), location } => {\n\n assert_eq!(c, '{');\n\n assert_eq!(location.line, 1);\n\n assert_eq!(location.col, 1);\n\n }\n\n _ => assert!(false),\n\n },\n\n }\n\n}\n\n\n\n// Utilities -------------------------------------------------------\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 53, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn parse_func() {\n\n let source = \"\\\n\nadd (x, y, z) -> \n\n x + y + z\n\n\n\nadd(1, 2, 3)\n\n\";\n\n let result = parse_text(source, true);\n\n if let Ok(program) = result {\n\n } else {\n\n assert!(false, \"Function def failed to parse: {:?}\", result.unwrap_err());\n\n }\n\n}\n", "file_path": "src/parser/tests.rs", "rank": 54, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn parse_empty() {\n\n let result = parse_text(\"\", true);\n\n if let Ok(program) = result {\n\n assert_eq!(program.statements.len(), 0);\n\n } else {\n\n assert!(false, \"Program failed to parse: {:?}\", result);\n\n }\n\n}\n\n\n\n#[test]\n\n #[rustfmt::skip]\n\n fn parse_int() {\n\n let result = parse_text(\"1\", true);\n\n assert!(result.is_ok());\n\n let program = result.unwrap();\n\n let statements = program.statements;\n\n assert_eq!(statements.len(), 1);\n\n let statement = statements.first().unwrap();\n\n assert_eq!(\n\n *statement,\n", "file_path": "src/parser/tests.rs", "rank": 55, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn parse_precedence() {\n\n let result = parse_text(\"1 + 2 + 3\", true);\n\n if let Ok(program) = result {\n\n assert_eq!(program.statements.len(), 1);\n\n } else {\n\n assert!(false, \"Program failed to parse\");\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 56, "score": 51849.54652822869 }, { "content": "#[test]\n\n#[rustfmt::skip]\n\nfn create_ast() {\n\n let program = Program::new(vec![\n\n // 1 + 2\n\n Statement::new_expr(\n\n Expr::new_binary_op(\n\n Expr::new_literal(\n\n Literal::new_int(BigInt::from(1))\n\n ),\n\n \"+\",\n\n Expr::new_literal(\n\n Literal::new_int(BigInt::from(2))\n\n ),\n\n )\n\n ),\n\n // 1 - 1\n\n Statement::new_expr(\n\n Expr::new_binary_op(\n\n Expr::new_literal(\n\n Literal::new_int(BigInt::from(1))\n\n ),\n\n \"-\",\n\n Expr::new_literal(\n\n Literal::new_int(BigInt::from(1))\n\n ),\n\n )\n\n )\n\n ]);\n\n println!(\"{:?}\", program);\n\n}\n", "file_path": "src/ast/tests.rs", "rank": 57, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn parse_add() {\n\n // R\n\n // |\n\n // +\n\n // / \\\n\n // 1 2\n\n let result = parse_text(\"1 + 2\", true);\n\n assert!(result.is_ok(), \"{:?}\", result);\n\n let program = result.unwrap();\n\n let statements = program.statements;\n\n assert_eq!(statements.len(), 1);\n\n let statement = statements.first().unwrap();\n\n\n\n assert_eq!(\n\n *statement,\n\n ast::Statement {\n\n kind: ast::StatementKind::Expr(\n\n // 1 + 2\n\n ast::Expr {\n\n kind: ast::ExprKind::BinaryOp(\n", "file_path": "src/parser/tests.rs", "rank": 58, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_indents() {\n\n let source = \"\\\n\nf (x) -> # 1\n\n x # 2\n\n 1 # 3\n\n # 4\n\n # 5\n\ng (y) -> # 6\n\n y # 7\\\n\n\";\n\n let tokens = scan_optimistic(source, true);\n\n let mut tokens = tokens.iter();\n\n\n\n // f\n\n check_token(tokens.next(), Token::Ident(\"f\".to_string()), 1, 1, 1, 1);\n\n check_token(tokens.next(), Token::LParen, 1, 3, 1, 3);\n\n check_token(tokens.next(), Token::Ident(\"x\".to_string()), 1, 4, 1, 4);\n\n check_token(tokens.next(), Token::RParen, 1, 5, 1, 5);\n\n check_token(tokens.next(), Token::FuncStart, 1, 7, 1, 8);\n\n check_token(tokens.next(), Token::EndOfStatement, 1, 14, 1, 14);\n", "file_path": "src/scanner/tests.rs", "rank": 59, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_float() {\n\n let tokens = scan_optimistic(\"123.1\", true);\n\n assert_eq!(tokens.len(), 2);\n\n check_token(tokens.get(0), Token::Float(123.1 as f64), 1, 1, 1, 5);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 6, 1, 6);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 60, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_int() {\n\n let tokens = scan_optimistic(\"123\", true);\n\n assert_eq!(tokens.len(), 2);\n\n check_token(tokens.get(0), Token::Int(BigInt::from(123)), 1, 1, 1, 3);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 4, 1, 4);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 61, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn scan_empty() {\n\n let tokens = scan_optimistic(\"\", true);\n\n assert_eq!(tokens.len(), 0);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 62, "score": 51849.54652822869 }, { "content": "#[test]\n\nfn test_float() {\n\n let ctx = RuntimeContext::default();\n\n\n\n let float1 = ctx.builtins.new_float(0.0);\n\n let float2 = ctx.builtins.new_float(0.0);\n\n let float3 = ctx.builtins.new_float(1.0);\n\n\n\n assert!(float1.class().is(&float2.class()));\n\n assert!(float2.class().is(&float3.class()));\n\n\n\n assert_ne!(float1.id(), float2.id());\n\n assert_ne!(float2.id(), float3.id());\n\n\n\n assert!(float1.is_equal(float2, &ctx).unwrap());\n\n assert!(!float1.is_equal(float3, &ctx).unwrap());\n\n}\n\n\n", "file_path": "src/types/tests.rs", "rank": 63, "score": 51849.54652822869 }, { "content": "fn check_string_token(\n\n actual: Option<&TokenWithLocation>,\n\n expected_string: &str,\n\n expected_start_line: usize,\n\n expected_start_col: usize,\n\n expected_end_line: usize,\n\n expected_end_col: usize,\n\n) {\n\n assert!(actual.is_some());\n\n match actual {\n\n Some(TokenWithLocation {\n\n token: Token::String(actual_string),\n\n start: Location { line: actual_start_line, col: actual_start_col },\n\n end: Location { line: actual_end_line, col: actual_end_col },\n\n }) => {\n\n assert_eq!(actual_string, expected_string);\n\n assert_eq!(actual_start_line, &expected_start_line);\n\n assert_eq!(actual_start_col, &expected_start_col);\n\n assert_eq!(actual_end_line, &expected_end_line);\n\n assert_eq!(actual_end_col, &expected_end_col);\n\n }\n\n _ => assert!(false),\n\n }\n\n}\n", "file_path": "src/scanner/tests.rs", "rank": 64, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_string_unclosed() {\n\n let source = \"\\\"abc\";\n\n match scan_text(source, true) {\n\n Err(err) => match err {\n\n ScanErr { kind: ScanErrKind::UnterminatedString(string), location } => {\n\n assert_eq!(string, source.to_string());\n\n assert_eq!(location, Location::new(1, 1));\n\n let new_source = source.to_string() + \"\\\"\";\n\n match scan_text(new_source.as_str(), true) {\n\n Ok(tokens) => {\n\n assert_eq!(tokens.len(), 2);\n\n check_string_token(tokens.get(0), \"abc\", 1, 1, 1, 5);\n\n }\n\n _ => assert!(false),\n\n }\n\n }\n\n _ => assert!(false),\n\n },\n\n _ => assert!(false),\n\n };\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 65, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_binary_number() {\n\n let tokens = scan_optimistic(\"0b11\", true); // = 3\n\n assert_eq!(tokens.len(), 2);\n\n check_token(tokens.get(0), Token::Int(BigInt::from(3)), 1, 1, 1, 4);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 5, 1, 5);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 66, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_float_with_e_and_no_sign() {\n\n let tokens = scan_optimistic(\"123.1e1\", true);\n\n assert_eq!(tokens.len(), 2);\n\n let expected = Token::Float(123.1E+1);\n\n check_token(tokens.get(0), expected, 1, 1, 1, 7);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 8, 1, 8);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 67, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_float_with_e_and_sign() {\n\n let tokens = scan_optimistic(\"123.1e+1\", true);\n\n assert_eq!(tokens.len(), 2);\n\n let expected = Token::Float(123.1E+1);\n\n check_token(tokens.get(0), expected, 1, 1, 1, 8);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 9, 1, 9);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 68, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_string_with_newline() {\n\n // \"abc\n\n // \"\n\n let source = \"\\\"abc\\n\\\"\";\n\n let tokens = scan_optimistic(source, true);\n\n assert_eq!(tokens.len(), 2);\n\n check_string_token(tokens.get(0), \"abc\\n\", 1, 1, 2, 1);\n\n check_token(tokens.get(1), Token::EndOfStatement, 2, 2, 2, 2);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 69, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn parse_simple_program() {\n\n // ROOT\n\n // / \\\n\n // a= b=\n\n // | |\n\n // 1 +\n\n // / \\\n\n // a 1\n\n let result = parse_text(\"a = 1\\nb = a + 2\\n\", true);\n\n if let Ok(program) = result {\n\n assert_eq!(program.statements.len(), 2);\n\n // TODO: More checks\n\n } else {\n\n assert!(false, \"Program failed to parse\");\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 70, "score": 50718.294220708456 }, { "content": "#[test]\n\nfn scan_string_with_embedded_quote() {\n\n // \"\\\"abc\"\n\n let source = \"\\\"\\\\\\\"abc\\\"\";\n\n let tokens = scan_optimistic(source, true);\n\n assert_eq!(tokens.len(), 2);\n\n check_string_token(tokens.get(0), \"\\\"abc\", 1, 1, 1, 7);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 8, 1, 8);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 71, "score": 49661.249136837454 }, { "content": "#[test]\n\nfn scan_string_with_escaped_chars() {\n\n let tokens = scan_optimistic(\"\\\"\\\\0\\\\a\\\\b\\\\n\\\\'\\\\\\\"\\\"\", true);\n\n assert_eq!(tokens.len(), 2);\n\n // NOTE: We could put a backslash before the single quote in\n\n // the expected string, but Rust seems to treat \\' and '\n\n // as the same.\n\n check_string_token(tokens.get(0), \"\\0\\x07\\x08\\n'\\\"\", 1, 1, 1, 14);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 15, 1, 15);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 72, "score": 49661.249136837454 }, { "content": "#[test]\n\nfn scan_string_with_many_newlines() {\n\n // \" a\n\n // b\n\n //\n\n // c\n\n //\n\n //\n\n // \"\n\n let source = \"\\\" a\\nb\\n\\nc\\n\\n\\n \\\"\";\n\n let tokens = scan_optimistic(source, true);\n\n assert_eq!(tokens.len(), 2);\n\n check_string_token(tokens.get(0), \" a\\nb\\n\\nc\\n\\n\\n \", 1, 1, 7, 3);\n\n check_token(tokens.get(1), Token::EndOfStatement, 7, 4, 7, 4);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 73, "score": 49661.249136837454 }, { "content": "#[test]\n\nfn test_compare_float_to_int() {\n\n let ctx = RuntimeContext::default();\n\n let float = ctx.builtins.new_float(1.0);\n\n let int = ctx.builtins.new_int(1u8);\n\n assert!(float.is_equal(int.clone(), &ctx).unwrap());\n\n assert!(int.is_equal(float.clone(), &ctx).unwrap());\n\n}\n\n\n", "file_path": "src/types/tests.rs", "rank": 74, "score": 49661.249136837454 }, { "content": "#[test]\n\nfn scan_string_with_escaped_regular_char() {\n\n let tokens = scan_optimistic(\"\\\"ab\\\\c\\\"\", true);\n\n assert_eq!(tokens.len(), 2);\n\n check_string_token(tokens.get(0), \"ab\\\\c\", 1, 1, 1, 6);\n\n check_token(tokens.get(1), Token::EndOfStatement, 1, 7, 1, 7);\n\n}\n\n\n", "file_path": "src/scanner/tests.rs", "rank": 75, "score": 48671.341454889436 }, { "content": " None => {\n\n self.err(RuntimeErrKind::EmptyStack)?;\n\n }\n\n },\n\n Inst::Return => {\n\n // TODO: Implement actual return\n\n match self.pop() {\n\n Some(v) => println!(\"{}\", v),\n\n None => eprintln!(\"Stack is empty!\"),\n\n }\n\n }\n\n Inst::Halt(code) => {\n\n self.halt();\n\n #[cfg(debug_assertions)]\n\n self.dis(dis, ip, &instructions);\n\n break Ok(VMState::Halted(*code));\n\n }\n\n Inst::InternalError(message) => {\n\n self.halt();\n\n eprintln!(\"INTERNAL ERROR: {}\", message);\n", "file_path": "src/vm/vm.rs", "rank": 76, "score": 44545.77374089634 }, { "content": " for (i, index) in self.stack.iter().enumerate() {\n\n let obj = self.ctx.get_obj(*index);\n\n match obj {\n\n Some(obj) => {\n\n eprintln!(\"{:0>4} ({}) -> {:?}\", i, index, obj)\n\n }\n\n None => eprintln!(\"{:0>4} ({}) -> [NOT AN OBJECT]\", i, index),\n\n }\n\n }\n\n }\n\n\n\n // Disassembler ----------------------------------------------------\n\n //\n\n // This is done here because we need the VM context in order to show\n\n // more useful info like jump targets, values, etc.\n\n\n\n /// Disassemble a list of instructions.\n\n pub fn dis_list(&mut self, instructions: &Chunk) -> ExeResult {\n\n for (ip, _) in instructions.iter().enumerate() {\n\n self.dis(true, ip, instructions);\n", "file_path": "src/vm/vm.rs", "rank": 77, "score": 44534.37880818743 }, { "content": "impl VM {\n\n pub fn new(ctx: RuntimeContext) -> Self {\n\n VM { ctx, stack: Stack::new(), call_stack: Stack::new() }\n\n }\n\n\n\n pub fn halt(&mut self) {\n\n // TODO: Not sure what this should do or if it's even needed\n\n }\n\n\n\n /// Execute the specified instructions and return the VM's state. If\n\n /// a HALT instruction isn't encountered, the VM will go \"idle\"; it\n\n /// will maintain its internal state and await further instructions.\n\n /// When a HALT instruction is encountered, the VM's state will be\n\n /// cleared; it can be \"restarted\" by passing more instructions to\n\n /// execute.\n\n fn execute(&mut self, instructions: Chunk, dis: bool) -> ExeResult {\n\n let mut ip: usize = 0;\n\n\n\n loop {\n\n match &instructions[ip] {\n", "file_path": "src/vm/vm.rs", "rank": 78, "score": 44531.34895720225 }, { "content": " }\n\n Inst::AssignVar(name) => {\n\n if let Some(i) = self.pop() {\n\n self.ctx.assign_var(name, i);\n\n self.push(i);\n\n } else {\n\n let message = format!(\"Assignment\");\n\n self.err(RuntimeErrKind::NotEnoughValuesOnStack(message))?;\n\n };\n\n }\n\n Inst::LoadVar(name) => {\n\n if let Some(&index) = self.ctx.get_obj_index(name) {\n\n self.format_strings(index)?;\n\n self.push(index);\n\n } else {\n\n self.err(RuntimeErrKind::NameError(format!(\n\n \"Name not found: {}\",\n\n name\n\n )))?;\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 79, "score": 44530.01240680671 }, { "content": " };\n\n let index = self.ctx.constants.add(value);\n\n self.push(index);\n\n } else {\n\n let message = format!(\"Binary op: {}\", op);\n\n self.err(RuntimeErrKind::NotEnoughValuesOnStack(message))?;\n\n };\n\n }\n\n Inst::ScopeStart => {\n\n self.ctx.enter_scope();\n\n }\n\n Inst::ScopeEnd(count) => {\n\n for _ in 0..*count {\n\n self.ctx.exit_scope();\n\n }\n\n }\n\n Inst::Print => match self.stack.peek() {\n\n Some(index) => {\n\n let val = self.ctx.constants.get(*index).unwrap();\n\n let mut print;\n", "file_path": "src/vm/vm.rs", "rank": 80, "score": 44528.97980577023 }, { "content": " let obj = self.ctx.constants.get(i).unwrap();\n\n if obj.as_bool(&self.ctx)? {\n\n ip = *if_addr;\n\n } else {\n\n ip = *else_addr;\n\n }\n\n } else {\n\n self.err(RuntimeErrKind::EmptyStack)?;\n\n };\n\n continue;\n\n }\n\n Inst::LoadConst(index) => {\n\n self.format_strings(*index)?;\n\n self.push(*index);\n\n }\n\n Inst::DeclareVar(name) => {\n\n // NOTE: Currently, declaration and assignment are\n\n // the same thing, so declaration doesn't\n\n // do anything particularly useful ATM.\n\n self.ctx.declare_var(name.as_str());\n", "file_path": "src/vm/vm.rs", "rank": 81, "score": 44527.28922541548 }, { "content": " break Ok(VMState::Halted(-1));\n\n }\n\n inst => {\n\n let message = format!(\"{:?}\", inst);\n\n self.err(RuntimeErrKind::UnhandledInstruction(message))?;\n\n }\n\n }\n\n\n\n #[cfg(debug_assertions)]\n\n if instructions[ip] != Inst::Print {\n\n self.dis(dis, ip, &instructions);\n\n }\n\n\n\n ip += 1;\n\n\n\n if ip == instructions.len() {\n\n break Ok(VMState::Idle);\n\n }\n\n }\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 82, "score": 44526.70294720013 }, { "content": " }\n\n Ok(VMState::Halted(0))\n\n }\n\n\n\n /// Disassemble a single instruction. The `flag` arg is so that\n\n /// we don't have to wrap every call in `if dis { self.dis(...) }`.\n\n pub fn dis(&self, flag: bool, ip: usize, instructions: &Chunk) {\n\n if flag {\n\n let inst = &instructions[ip];\n\n let formatted = self.format_instruction(instructions, inst);\n\n eprintln!(\"{:0>4} {}\", ip, formatted);\n\n }\n\n }\n\n\n\n fn format_instruction(&self, instructions: &Chunk, inst: &Inst) -> RustString {\n\n use Inst::*;\n\n match inst {\n\n NoOp => format!(\"NOOP\"),\n\n Push(index) => {\n\n let obj = self.ctx.get_obj(*index).unwrap();\n", "file_path": "src/vm/vm.rs", "rank": 83, "score": 44524.19078748042 }, { "content": "//! The FeInt virtual machine. When it's created, it's initialized and\n\n//! then, implicitly, goes idle until it's passed some instructions to\n\n//! execute. After instructions are executed, it goes back into idle\n\n//! mode.\n\nuse std::fmt;\n\nuse std::rc::Rc;\n\n\n\nuse crate::ast;\n\nuse crate::compiler::compile;\n\nuse crate::parser::{self, ParseResult};\n\nuse crate::util::{BinaryOperator, Stack, UnaryOperator};\n\n\n\nuse super::context::RuntimeContext;\n\nuse super::frame::Frame;\n\nuse super::inst::{Chunk, Inst};\n\nuse super::result::{ExeResult, RuntimeErr, RuntimeErrKind, VMState};\n\nuse crate::types::{ObjectRef, String, Tuple};\n\n\n", "file_path": "src/vm/vm.rs", "rank": 84, "score": 44523.587258513166 }, { "content": " eprintln!(\"{:=<72}\", \"STACK \");\n\n vm.display_stack();\n\n eprintln!(\"{:=<72}\", \"VM STATE \");\n\n eprintln!(\"{:?}\", result);\n\n }\n\n result\n\n}\n\n\n\npub struct VM {\n\n pub ctx: RuntimeContext,\n\n stack: Stack<usize>,\n\n call_stack: Stack<Frame>,\n\n}\n\n\n\nimpl Default for VM {\n\n fn default() -> Self {\n\n VM::new(RuntimeContext::default())\n\n }\n\n}\n\n\n", "file_path": "src/vm/vm.rs", "rank": 85, "score": 44520.982432678065 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::types::Builtins;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn execute_simple_program() {\n\n let builtins = Builtins::new();\n\n let mut vm = VM::default();\n\n let i = vm.ctx.constants.add(vm.ctx.builtins.new_int(1));\n\n let j = vm.ctx.constants.add(vm.ctx.builtins.new_int(2));\n\n let instructions: Chunk = vec![\n\n Inst::LoadConst(i),\n\n Inst::LoadConst(j),\n\n Inst::BinaryOp(BinaryOperator::Add),\n\n Inst::Print,\n\n Inst::Halt(0),\n\n ];\n\n if let Ok(result) = vm.execute(instructions, false) {\n\n assert_eq!(result, VMState::Halted(0));\n\n }\n\n }\n\n}\n", "file_path": "src/vm/vm.rs", "rank": 86, "score": 44520.52373067159 }, { "content": " /// puts the items in \"logical\" order instead of having to\n\n /// remember to swap them in them in the calling code.\n\n fn pop_top_two(&mut self) -> Option<(usize, usize)> {\n\n let stack = &mut self.stack;\n\n match (stack.pop(), stack.pop()) {\n\n (Some(top), Some(next)) => Some((next, top)),\n\n (Some(top), None) => {\n\n stack.push(top);\n\n None\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n // Call stack ------------------------------------------------------\n\n\n\n fn push_frame(&mut self, frame: Frame) {\n\n self.call_stack.push(frame);\n\n }\n\n\n", "file_path": "src/vm/vm.rs", "rank": 87, "score": 44519.42262076863 }, { "content": " }\n\n Inst::UnaryOp(op) => {\n\n if let Some(i) = self.pop() {\n\n let a = self.ctx.constants.get(i).unwrap();\n\n let value = match op {\n\n UnaryOperator::Plus => a.clone(), // no-op\n\n UnaryOperator::Negate => a.negate(&self.ctx)?,\n\n op => {\n\n // Operators that return bool\n\n let result = match op {\n\n UnaryOperator::AsBool => a.as_bool(&self.ctx)?,\n\n UnaryOperator::Not => a.not(&self.ctx)?,\n\n _ => unreachable!(),\n\n };\n\n self.push(if result { 1 } else { 2 });\n\n #[cfg(debug_assertions)]\n\n self.dis(dis, ip, &instructions);\n\n ip += 1;\n\n continue;\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 88, "score": 44518.081812554134 }, { "content": " new_items.push(item.clone());\n\n }\n\n } else {\n\n new_items.push(item.clone());\n\n }\n\n }\n\n if num_formatted > 0 {\n\n let new_tuple = self.ctx.builtins.new_tuple(new_items);\n\n self.ctx.constants.replace(const_index, new_tuple);\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Show the contents of the stack (top first).\n\n pub fn display_stack(&self) {\n\n if self.stack.is_empty() {\n\n return eprintln!(\"[EMPTY]\");\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 89, "score": 44517.97638063902 }, { "content": " Inst::NoOp => {\n\n // do nothing\n\n }\n\n Inst::Push(value) => {\n\n self.push(*value);\n\n }\n\n Inst::Pop => {\n\n if self.stack.is_empty() {\n\n self.err(RuntimeErrKind::EmptyStack)?;\n\n }\n\n self.pop();\n\n }\n\n Inst::Jump(address) => {\n\n #[cfg(debug_assertions)]\n\n self.dis(dis, ip, &instructions);\n\n ip = *address;\n\n continue;\n\n }\n\n Inst::JumpIfElse(if_addr, else_addr) => {\n\n if let Some(i) = self.pop() {\n", "file_path": "src/vm/vm.rs", "rank": 90, "score": 44517.204269720714 }, { "content": " };\n\n let index = self.ctx.constants.add(value);\n\n self.push(index);\n\n } else {\n\n let message = format!(\"Unary op: {}\", op);\n\n self.err(RuntimeErrKind::NotEnoughValuesOnStack(message))?;\n\n };\n\n }\n\n Inst::BinaryOp(op) => {\n\n if let Some((i, j)) = self.pop_top_two() {\n\n let a = self.ctx.constants.get(i).unwrap();\n\n let b = self.ctx.constants.get(j).unwrap();\n\n let b = b.clone();\n\n let value = match op {\n\n BinaryOperator::Pow => a.pow(b, &self.ctx)?,\n\n BinaryOperator::Mul => a.mul(b, &self.ctx)?,\n\n BinaryOperator::Div => a.div(b, &self.ctx)?,\n\n BinaryOperator::FloorDiv => a.floor_div(b, &self.ctx)?,\n\n BinaryOperator::Mod => a.modulo(b, &self.ctx)?,\n\n BinaryOperator::Add => a.add(b, &self.ctx)?,\n", "file_path": "src/vm/vm.rs", "rank": 91, "score": 44517.05992643788 }, { "content": " BinaryOp(operator) => self.format_aligned(\"BINARY_OP\", operator),\n\n ScopeStart => format!(\"SCOPE_START\"),\n\n ScopeEnd(count) => self.format_aligned(\"SCOPE_END\", count),\n\n Print => match self.peek() {\n\n Some(index) => {\n\n let obj = self.ctx.get_obj(*index).unwrap();\n\n self.format_aligned(\"PRINT\", format!(\"{} ({:?})\", index, obj))\n\n }\n\n None => self.format_aligned(\"PRINT\", \"[EMPTY]\"),\n\n },\n\n Return => format!(\"RETURN\"),\n\n Halt(code) => self.format_aligned(\"HALT\", code),\n\n other => format!(\"{:?}\", other),\n\n }\n\n }\n\n\n\n /// Return a nicely formatted string of instructions.\n\n fn format_aligned<T: fmt::Display>(&self, name: &str, value: T) -> RustString {\n\n format!(\"{: <w$}{: <x$}\", name, value, w = 16, x = 4)\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 92, "score": 44516.084053789054 }, { "content": " fn pop_frame(&mut self) -> Option<Frame> {\n\n self.call_stack.pop()\n\n }\n\n\n\n // Utilities -------------------------------------------------------\n\n\n\n fn err(&self, kind: RuntimeErrKind) -> ExeResult {\n\n Err(RuntimeErr::new(kind))\n\n }\n\n\n\n /// Format strings.\n\n ///\n\n /// This is called whenever an object is loaded (constant or var).\n\n /// If the object isn't a $ string or a tuple, this does nothing.\n\n ///\n\n /// If the object a $ string, it will be formatted and the formatted\n\n /// value will *replace* the original constant value.\n\n ///\n\n /// If the object a tuple, any $ string items will be formatted. If\n\n /// any $ strings are present, the original tuple will be *replaced*\n", "file_path": "src/vm/vm.rs", "rank": 93, "score": 44514.27134247972 }, { "content": " BinaryOperator::Sub => a.sub(b, &self.ctx)?,\n\n op => {\n\n // Operators that return bool\n\n let result = match op {\n\n BinaryOperator::IsEqual => {\n\n a.is_equal(b, &self.ctx)?\n\n }\n\n BinaryOperator::NotEqual => {\n\n a.not_equal(b, &self.ctx)?\n\n }\n\n BinaryOperator::And => a.and(b, &self.ctx)?,\n\n BinaryOperator::Or => a.or(b, &self.ctx)?,\n\n _ => unreachable!(),\n\n };\n\n self.push(if result { 1 } else { 2 });\n\n ip += 1;\n\n #[cfg(debug_assertions)]\n\n self.dis(dis, ip, &instructions);\n\n continue;\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 94, "score": 44514.1771168961 }, { "content": " /// with a new tuple containing the formatted values.\n\n fn format_strings(&mut self, const_index: usize) -> Result<(), RuntimeErr> {\n\n if let Some(obj) = self.ctx.get_obj(const_index) {\n\n if let Some(string) = obj.as_any().downcast_ref::<String>() {\n\n if string.is_format_string() {\n\n let formatted = string.format(self)?;\n\n let formatted = Rc::new(formatted);\n\n self.ctx.constants.replace(const_index, formatted);\n\n }\n\n }\n\n if let Some(tuple) = obj.as_any().downcast_ref::<Tuple>() {\n\n let mut new_items: Vec<ObjectRef> = Vec::new();\n\n let mut num_formatted = 0;\n\n for item in tuple.items() {\n\n if let Some(string) = item.as_any().downcast_ref::<String>() {\n\n if string.is_format_string() {\n\n let formatted = string.format(self)?;\n\n new_items.push(Rc::new(formatted));\n\n num_formatted += 1;\n\n } else {\n", "file_path": "src/vm/vm.rs", "rank": 95, "score": 44511.61975507657 }, { "content": "\n\n // Stack -----------------------------------------------------------\n\n\n\n fn push(&mut self, item: usize) {\n\n self.stack.push(item);\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<usize> {\n\n self.stack.pop()\n\n }\n\n\n\n pub fn peek(&self) -> Option<&usize> {\n\n self.stack.peek()\n\n }\n\n\n\n /// Pop top two items from stack *if* the stack has at least two\n\n /// items. If it doesn't, the stack remains unmodified.\n\n ///\n\n /// NOTE: The second item down the stack will be *first* and the\n\n /// first item will be *second* in the returned tuple. This\n", "file_path": "src/vm/vm.rs", "rank": 96, "score": 44510.89406601852 }, { "content": " self.format_aligned(\"PUSH\", format!(\"{} ({:?})\", index, obj))\n\n }\n\n Pop => format!(\"POP\"),\n\n Jump(address) => self.format_aligned(\n\n \"JUMP\",\n\n format!(\n\n \"{} ({})\",\n\n address,\n\n self.format_instruction(instructions, &instructions[*address])\n\n ),\n\n ),\n\n JumpIfElse(if_addr, else_addr) => match self.peek() {\n\n Some(index) => {\n\n let obj = self.ctx.get_obj(*index).unwrap();\n\n self.format_aligned(\n\n \"JUMP_IF_ELSE\",\n\n format!(\"{} ({}) ? {} : {:?}\", obj, index, if_addr, else_addr),\n\n )\n\n }\n\n None => self.format_aligned(\n", "file_path": "src/vm/vm.rs", "rank": 97, "score": 44510.185354889174 }, { "content": " \"JUMP_IF_ELSE\",\n\n format!(\"[EMPTY] ? {} : {}\", if_addr, else_addr),\n\n ),\n\n },\n\n LoadConst(index) => {\n\n let obj = self.ctx.get_obj(*index).unwrap();\n\n self.format_aligned(\"LOAD_CONST\", format!(\"{} ({:?})\", index, obj))\n\n }\n\n DeclareVar(name) => self.format_aligned(\"DECLARE_VAR\", name),\n\n AssignVar(name) => {\n\n let index = self.peek().unwrap_or(&0);\n\n let obj = self.ctx.get_obj(*index).unwrap();\n\n self.format_aligned(\"ASSIGN_VAR\", format!(\"{} ({:?})\", name, obj))\n\n }\n\n LoadVar(name) => {\n\n let index = self.peek().unwrap_or(&0);\n\n let obj = self.ctx.get_obj(*index).unwrap();\n\n self.format_aligned(\"LOAD_VAR\", format!(\"{} ({:?})\", name, obj))\n\n }\n\n UnaryOp(operator) => self.format_aligned(\"UNARY_OP\", operator),\n", "file_path": "src/vm/vm.rs", "rank": 98, "score": 44508.52577462964 }, { "content": " if cfg!(debug_assertions) {\n\n self.dis(dis, ip, &instructions);\n\n print = !dis;\n\n } else {\n\n print = true;\n\n }\n\n if print {\n\n if let Some(tuple) = val.as_any().downcast_ref::<Tuple>() {\n\n let items: Vec<RustString> = tuple\n\n .items()\n\n .into_iter()\n\n .map(|i| format!(\"{}\", i))\n\n .collect();\n\n println!(\"{}\", items.join(\" \"));\n\n } else {\n\n println!(\"{}\", val);\n\n }\n\n }\n\n self.pop();\n\n }\n", "file_path": "src/vm/vm.rs", "rank": 99, "score": 44505.228981756234 } ]
Rust
pkmnapi-db/src/sav/player_name.rs
kevinselwyn/pkmnapi
170e6b6c9a74e85c377137bc086793898c415125
use crate::error::{self, Result}; use crate::patch::*; use crate::sav::Sav; use crate::string::*; impl Sav { pub fn get_player_name(&self) -> Result<SavePlayerName> { let offset = 0x2598; let save_player_name = SavePlayerName::from(&self.sav[offset..(offset + 0x0B)]); Ok(save_player_name) } pub fn set_player_name(&self, save_player_name: &SavePlayerName) -> Result<Patch> { let offset = 0x2598; let save_player_name_raw = save_player_name.to_raw(); let save_player_name_raw_len = save_player_name_raw.len(); let max_len = 0x0A; if save_player_name_raw_len > max_len { return Err(error::Error::SavPlayerNameWrongSize( max_len, save_player_name_raw_len, )); } let padding = vec![ 0x50; { if save_player_name_raw_len != max_len { 0x01 } else { 0x00 } } ]; let save_player_name_raw = [save_player_name_raw, padding].concat(); Ok(Patch::new(&offset, &save_player_name_raw)) } } #[derive(Debug, PartialEq)] pub struct SavePlayerName { pub name: ROMString, } impl From<&[u8]> for SavePlayerName { fn from(sav: &[u8]) -> Self { let name_end_index = sav.iter().position(|&r| r == 0x50).unwrap_or(sav.len()); let name = ROMString::new(&sav[..name_end_index]); SavePlayerName { name } } } impl SavePlayerName { pub fn to_raw(&self) -> Vec<u8> { self.name.value[..].to_vec() } }
use crate::error::{self, Result}; use crate::patch::*; use crate::sav::Sav; use crate::string::*; impl Sav { pub fn get_player_name(&self) -> Result<SavePlayerName> { let offset = 0x2598; let save_player_name = SavePlayerName::from(&self.sav[offset..(offset + 0x0B)]); Ok(save_player_name) } pub fn set_player_name(&self, save_player_name: &SavePlayerName) -> Result<Patch> { let offset = 0x2598; let save_player_name_raw = save_player_name.to_raw(); let save_player_name_raw_len = save_player_name_raw.len(); let max_len = 0x0A; if save_player_name_raw_len > max_len { return Err(error::Error::SavPlayerNameWrongSize( max_len, save_player_name_raw_len, )); } let padding = vec![
_index]); SavePlayerName { name } } } impl SavePlayerName { pub fn to_raw(&self) -> Vec<u8> { self.name.value[..].to_vec() } }
0x50; { if save_player_name_raw_len != max_len { 0x01 } else { 0x00 } } ]; let save_player_name_raw = [save_player_name_raw, padding].concat(); Ok(Patch::new(&offset, &save_player_name_raw)) } } #[derive(Debug, PartialEq)] pub struct SavePlayerName { pub name: ROMString, } impl From<&[u8]> for SavePlayerName { fn from(sav: &[u8]) -> Self { let name_end_index = sav.iter().position(|&r| r == 0x50).unwrap_or(sav.len()); let name = ROMString::new(&sav[..name_end
random
[ { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\npub fn load_sav() -> Vec<u8> {\n\n let PKMN_SAV = env::var(\"PKMN_SAV\")\n\n .expect(\"Set the PKMN_SAV environment variable to point to the SAV location\");\n\n\n\n fs::read(PKMN_SAV).unwrap()\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 0, "score": 251374.7091068284 }, { "content": "#[allow(dead_code)]\n\npub fn assert_headers(headers: &HeaderMap<'_>, expected: Vec<(&str, &str)>) -> Result<(), String> {\n\n let mut headers = headers.clone();\n\n\n\n for (key, val) in &expected {\n\n let header = match headers.get_one(key) {\n\n Some(header) => header,\n\n None => return Err(format!(\"Could not find header: {}\", key)),\n\n };\n\n\n\n if val.len() != 0 {\n\n assert_eq!(header, *val);\n\n }\n\n\n\n headers.remove(key);\n\n }\n\n\n\n for header in headers.iter() {\n\n panic!(\"Extra header found: {}\", header);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 1, "score": 214637.4988996217 }, { "content": "#[openapi]\n\n#[delete(\"/savs\")]\n\npub fn delete_sav(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n if_match: Result<IfMatch, IfMatchError>,\n\n) -> Result<status::NoContent, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let etag = utils::get_etag(if_match)?;\n\n let connection = sql.get_connection().unwrap();\n\n\n\n match sql.delete_user_sav_by_access_token(&connection, &access_token, &etag) {\n\n Ok(_) => {}\n\n Err(pkmnapi_sql::error::Error::ETagError) => return Err(ETagErrorMismatch::new()),\n\n Err(_) => return Err(SavErrorNoSav::new()),\n\n }\n\n\n\n Ok(status::NoContent)\n\n}\n", "file_path": "pkmnapi-api/src/routes/savs.rs", "rank": 2, "score": 205337.7199725315 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\npub fn load_rom() -> Vec<u8> {\n\n let PKMN_ROM = env::var(\"PKMN_ROM\")\n\n .expect(\"Set the PKMN_ROM environment variable to point to the ROM location\");\n\n\n\n fs::read(PKMN_ROM).unwrap()\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 3, "score": 203729.58762690812 }, { "content": "#[get(\"/savs\")]\n\npub fn get_sav<'a>(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Response<'a>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n\n\n let connection = sql.get_connection().unwrap();\n\n let sav = match sql.select_user_sav_by_access_token(&connection, &access_token) {\n\n Ok(Some(sav)) => sav,\n\n _ => return Err(SavErrorNoSav::new()),\n\n };\n\n\n\n let response = SavResponse::new(&sav);\n\n let body = serde_json::to_string(&response).unwrap();\n\n\n\n let response = Response::build()\n\n .header(ContentType::JSON)\n\n .header(Header::new(\"ETag\", sav.etag))\n\n .sized_body(Cursor::new(body))\n\n .finalize();\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/savs.rs", "rank": 4, "score": 200772.53976673915 }, { "content": "#[post(\"/savs\", data = \"<data>\")]\n\npub fn post_sav<'a>(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n data: Data,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Response<'a>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let sav = utils::get_data_raw(data);\n\n\n\n if let Err(_) = Sav::new(&sav) {\n\n return Err(SavErrorInvalidSav::new());\n\n }\n\n\n\n let connection = sql.get_connection().unwrap();\n\n let sav = match sql.update_user_sav_by_access_token(&connection, &access_token, &sav) {\n\n Ok(sav) => sav,\n\n Err(_) => return Err(SavErrorSavExists::new()),\n\n };\n\n\n\n let response = SavResponse::new(&sav);\n", "file_path": "pkmnapi-api/src/routes/savs.rs", "rank": 5, "score": 200772.37008493696 }, { "content": "#[openapi]\n\n#[get(\"/savs/player_names\")]\n\npub fn get_sav_player_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<SavPlayerNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let sav = match db.sav {\n\n Some(sav) => sav,\n\n None => return Err(SavErrorNoSav::new()),\n\n };\n\n\n\n let player_id = sav.get_player_id()?;\n\n let player_name = sav.get_player_name()?;\n\n\n\n let response = SavPlayerNameResponse::new(&player_id, &player_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/sav_player_names.rs", "rank": 6, "score": 192956.72098635035 }, { "content": "#[openapi]\n\n#[post(\"/savs/player_names\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_sav_player_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<SavPlayerNameRequest>, JsonError>,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_sav_player_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let sav = match db.sav {\n\n Some(sav) => sav,\n\n None => return Err(SavErrorNoSav::new()),\n\n };\n\n\n\n let player_name = SavePlayerName {\n\n name: ROMString::from(data.get_name()),\n\n };\n\n\n", "file_path": "pkmnapi-api/src/routes/sav_player_names.rs", "rank": 7, "score": 192956.34426177625 }, { "content": "/// Generate eTag\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use pkmnapi_sql::utils;\n\n///\n\n/// let etag = utils::etag(&\"bar\".to_owned().into_bytes());\n\n///\n\n/// assert_eq!(etag, \"w/\\\"37b51d194a7513e45b56f6524f2d51f2\\\"\");\n\n/// ```\n\npub fn etag(body: &Vec<u8>) -> String {\n\n let content = hash(body);\n\n\n\n format!(\"w/\\\"{}\\\"\", content)\n\n}\n", "file_path": "pkmnapi-sql/src/utils.rs", "rank": 8, "score": 191095.8319928003 }, { "content": "/// Generate hash string\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use pkmnapi_sql::utils;\n\n///\n\n/// let hash = utils::hash(&\"bar\".to_owned().into_bytes());\n\n///\n\n/// assert_eq!(hash, \"37b51d194a7513e45b56f6524f2d51f2\");\n\n/// ```\n\npub fn hash(value: &Vec<u8>) -> String {\n\n format!(\"{:02x}\", md5::compute(value))\n\n}\n\n\n", "file_path": "pkmnapi-sql/src/utils.rs", "rank": 9, "score": 191095.71694048867 }, { "content": "pub fn get_etag(if_match: Result<IfMatch, IfMatchError>) -> Result<String, ResponseError> {\n\n match if_match {\n\n Ok(if_match) => Ok(if_match.into_inner()),\n\n Err(_) => Err(ETagErrorMissing::new()),\n\n }\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 10, "score": 187147.47973784257 }, { "content": "pub fn get_data_raw(data: Data) -> Vec<u8> {\n\n let mut raw = Vec::new();\n\n\n\n data.stream_to(&mut raw).unwrap();\n\n\n\n raw\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 11, "score": 184819.57367290964 }, { "content": "#[allow(dead_code)]\n\npub fn assert_unauthorized(response: &mut Response) -> Result<(), String> {\n\n let response_body = response.body_string().unwrap();\n\n let headers = response.headers();\n\n\n\n let body = json!({\n\n \"data\": {\n\n \"id\": \"error_access_tokens_unauthorized\",\n\n \"type\": \"errors\",\n\n \"attributes\": {\n\n \"message\": \"Authorization header must be set\"\n\n }\n\n }\n\n });\n\n\n\n assert_eq!(response_body, body.to_string());\n\n assert_eq!(response.status(), Status::Unauthorized);\n\n\n\n assert_headers(\n\n headers,\n\n vec![\n\n (\"Content-Type\", \"application/json\"),\n\n (\"Server\", \"pkmnapi/0.1.0\"),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 12, "score": 181137.76371289205 }, { "content": "#[catch(404)]\n\npub fn not_found(_req: &Request) -> Result<ResponseError, ResponseError> {\n\n Ok(NotFoundError::new(\n\n BaseErrorResponseId::error_not_found,\n\n None,\n\n ))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/errors.rs", "rank": 13, "score": 175409.46576456228 }, { "content": "#[catch(429)]\n\npub fn too_many_requests<'a>(req: &Request) -> Result<Response<'a>, ResponseError> {\n\n let wait_time = match req.guard::<RateLimit>() {\n\n Outcome::Failure((_, RateLimitError::TooManyRequests(wait_time))) => wait_time,\n\n _ => 0,\n\n };\n\n\n\n let response = TooManyRequestsError::new(wait_time);\n\n let body = serde_json::to_string(&response).unwrap();\n\n\n\n let response = Response::build()\n\n .status(Status::TooManyRequests)\n\n .header(ContentType::JSON)\n\n .sized_body(Cursor::new(body))\n\n .finalize();\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/errors.rs", "rank": 14, "score": 169475.90695515898 }, { "content": "#[catch(500)]\n\npub fn internal_server_error<'a>(_req: &Request) -> Result<Response<'a>, ResponseError> {\n\n let response = InternalServerError::new();\n\n let body = serde_json::to_string(&response).unwrap();\n\n\n\n let response = Response::build()\n\n .status(Status::InternalServerError)\n\n .header(ContentType::JSON)\n\n .sized_body(Cursor::new(body))\n\n .finalize();\n\n\n\n Ok(response)\n\n}\n", "file_path": "pkmnapi-api/src/routes/errors.rs", "rank": 15, "score": 166792.19339796045 }, { "content": "#[allow(dead_code)]\n\npub fn post_sav(client: &Client, access_token: &String) {\n\n let sav = load_sav();\n\n\n\n client\n\n .post(\"/v1/savs\")\n\n .body(&sav)\n\n .header(auth_header(&access_token))\n\n .dispatch();\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 16, "score": 165155.7152893328 }, { "content": "pub fn from_numeric_str<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: FromStr,\n\n T::Err: Display,\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n T::from_str(&s).map_err(de::Error::custom)\n\n}\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 17, "score": 159600.41422436095 }, { "content": "pub fn get_db(\n\n sql: &State<PkmnapiSQL>,\n\n access_token: &String,\n\n) -> Result<(PkmnapiDB, PgPooledConnection), ResponseError> {\n\n let connection = sql.get_connection().unwrap();\n\n let rom_data = match sql.select_user_rom_data_by_access_token(&connection, &access_token) {\n\n Ok(Some(rom_data)) => rom_data,\n\n Ok(None) => return Err(RomErrorNoRom::new()),\n\n _ => {\n\n return Err(AccessTokenErrorInvalid::new(\n\n &\"Invalid access token\".to_owned(),\n\n ))\n\n }\n\n };\n\n\n\n let mut db = PkmnapiDB::new(&rom_data.data);\n\n\n\n match sql.select_user_sav_by_access_token(&connection, &access_token) {\n\n Ok(Some(sav)) => {\n\n db.sav(sav.data);\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 18, "score": 145996.01461366832 }, { "content": "#[openapi]\n\n#[get(\"/trades\")]\n\npub fn get_trade_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TradeResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_trade_id, max_trade_id) = db.trade_id_bounds();\n\n let trade_ids: Vec<u8> = (min_trade_id..=max_trade_id)\n\n .map(|trade_id| trade_id as u8)\n\n .collect();\n\n let trades = db.get_trade_all(&trade_ids)?;\n\n let pokedex_ids = trades\n\n .iter()\n\n .map(|(_, trade)| vec![trade.give_pokedex_id, trade.get_pokedex_id])\n\n .flatten()\n\n .collect();\n\n let pokemon_names = db.get_pokemon_name_all(&pokedex_ids)?;\n\n\n\n let response = TradeResponseAll::new(&trade_ids, &trades, &pokemon_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/trades.rs", "rank": 19, "score": 143531.09523317707 }, { "content": "#[openapi]\n\n#[post(\"/trades/<trade_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_trade(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<TradeRequest>, JsonError>,\n\n trade_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_trades_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let trade = Trade::new(\n\n data.get_give_pokedex_id(),\n\n data.get_get_pokedex_id(),\n\n data.get_nickname(),\n\n );\n\n\n\n let patch = db.set_trade(&trade_id, &trade)?;\n\n\n", "file_path": "pkmnapi-api/src/routes/trades.rs", "rank": 20, "score": 143531.09523317707 }, { "content": "pub fn insert_rom_patch(\n\n sql: State<PkmnapiSQL>,\n\n connection: PgPooledConnection,\n\n access_token: String,\n\n patch: Patch,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n error_id: BaseErrorResponseId,\n\n) -> Result<(), ResponseError> {\n\n let patch_description = get_patch_description(patch_description);\n\n\n\n match sql.insert_rom_patch(\n\n &connection,\n\n &access_token,\n\n &patch.to_raw(),\n\n patch_description,\n\n ) {\n\n Ok(_) => Ok(()),\n\n Err(e) => return Err(NotFoundError::new(error_id, Some(e.to_string()))),\n\n }\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 21, "score": 143531.09523317707 }, { "content": "pub fn get_access_token(\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<String, ResponseError> {\n\n match access_token {\n\n Ok(access_token) => Ok(access_token.into_inner()),\n\n Err(_) => Err(AccessTokenErrorUnauthorized::new()),\n\n }\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 22, "score": 143531.09523317707 }, { "content": "#[openapi]\n\n#[delete(\"/roms\")]\n\npub fn delete_rom(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n if_match: Result<IfMatch, IfMatchError>,\n\n) -> Result<status::NoContent, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let etag = utils::get_etag(if_match)?;\n\n let connection = sql.get_connection().unwrap();\n\n\n\n match sql.delete_user_rom_by_access_token(&connection, &access_token, &etag) {\n\n Ok(_) => {}\n\n Err(pkmnapi_sql::error::Error::ETagError) => return Err(ETagErrorMismatch::new()),\n\n Err(_) => return Err(RomErrorNoRom::new()),\n\n }\n\n\n\n Ok(status::NoContent)\n\n}\n", "file_path": "pkmnapi-api/src/routes/roms.rs", "rank": 23, "score": 143531.09523317707 }, { "content": "#[openapi]\n\n#[get(\"/trades/<trade_id>\")]\n\npub fn get_trade(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n trade_id: u8,\n\n) -> Result<Json<TradeResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let trade = db.get_trade(&trade_id)?;\n\n let pokedex_ids = vec![trade.give_pokedex_id, trade.get_pokedex_id];\n\n let pokemon_names = db.get_pokemon_name_all(&pokedex_ids)?;\n\n\n\n let response = TradeResponse::new(&trade_id, &trade, &pokemon_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/trades.rs", "rank": 24, "score": 143531.09523317707 }, { "content": "pub fn get_patch_description(\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n) -> Option<String> {\n\n match patch_description {\n\n Ok(patch_description) => patch_description.into_inner(),\n\n Err(_) => None,\n\n }\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 25, "score": 143531.09523317707 }, { "content": "pub fn setup() -> Client {\n\n let api = Pkmnapi::init();\n\n let client = Client::new(api).unwrap();\n\n\n\n client\n\n}\n\n\n", "file_path": "pkmnapi-api/tests/common.rs", "rank": 26, "score": 141430.748405605 }, { "content": "pub fn get_db_with_applied_patches(\n\n sql: &State<PkmnapiSQL>,\n\n access_token: &String,\n\n) -> Result<(PkmnapiDB, PgPooledConnection), ResponseError> {\n\n let (mut db, connection) = get_db(sql, access_token)?;\n\n\n\n let rom_patches = match sql.select_rom_patches_by_access_token(&connection, &access_token) {\n\n Ok(patches) => patches,\n\n Err(_) => vec![],\n\n };\n\n\n\n let sav_patches = match sql.select_sav_patches_by_access_token(&connection, &access_token) {\n\n Ok(patches) => patches,\n\n Err(_) => vec![],\n\n };\n\n\n\n for patch in rom_patches {\n\n db.apply_patch(patch.data);\n\n }\n\n\n\n if let Some(ref mut sav) = db.sav {\n\n for patch in sav_patches {\n\n sav.apply_patch(patch.data);\n\n }\n\n }\n\n\n\n Ok((db, connection))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 27, "score": 141184.622873297 }, { "content": "#[get(\"/roms\")]\n\npub fn get_rom<'a>(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Response<'a>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n\n\n let connection = sql.get_connection().unwrap();\n\n let rom_sql = match sql.select_user_rom_by_access_token(&connection, &access_token) {\n\n Ok(Some(rom_sql)) => rom_sql,\n\n _ => return Err(RomErrorNoRom::new()),\n\n };\n\n\n\n let response = RomResponse::new(&rom_sql);\n\n let body = serde_json::to_string(&response).unwrap();\n\n\n\n let response = Response::build()\n\n .header(ContentType::JSON)\n\n .header(Header::new(\"ETag\", rom_sql.etag))\n\n .sized_body(Cursor::new(body))\n\n .finalize();\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/roms.rs", "rank": 28, "score": 138965.82902511375 }, { "content": "pub fn get_data<T>(\n\n data: Result<Json<T>, JsonError>,\n\n error_id: BaseErrorResponseId,\n\n) -> Result<T, ResponseError> {\n\n match data {\n\n Ok(data) => Ok(data.into_inner()),\n\n Err(JsonError::Parse(_, e)) => {\n\n return Err(BadRequestError::new(error_id, Some(e.to_string())));\n\n }\n\n _ => {\n\n return Err(BadRequestError::new(\n\n error_id,\n\n Some(\"An unknown error occurred\".to_owned()),\n\n ));\n\n }\n\n }\n\n}\n\n\n", "file_path": "pkmnapi-api/src/utils.rs", "rank": 29, "score": 138965.82902511375 }, { "content": "#[get(\"/icons/<icon_id>\", format = \"image/gif\")]\n\npub fn get_icon<'a>(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n icon_id: u8,\n\n) -> Result<Response<'a>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let icon = db.get_icon(&icon_id)?;\n\n let gif = icon.to_gif(26)?;\n\n\n\n let response = Response::build()\n\n .header(ContentType::GIF)\n\n .header(Header::new(\n\n \"Content-Disposition\",\n\n format!(r#\"attachment; filename=\"icon-{}.gif\"\"#, icon_id),\n\n ))\n\n .sized_body(Cursor::new(gif))\n\n .finalize();\n\n\n\n Ok(response)\n\n}\n", "file_path": "pkmnapi-api/src/routes/icons.rs", "rank": 30, "score": 138965.82902511375 }, { "content": "#[post(\"/roms\", data = \"<data>\")]\n\npub fn post_rom<'a>(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n data: Data,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Response<'a>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let rom_data = utils::get_data_raw(data);\n\n\n\n let db = match PkmnapiDB::new(&rom_data).build() {\n\n Ok(db) => db,\n\n Err(_) => return Err(RomErrorInvalidRom::new()),\n\n };\n\n\n\n let connection = sql.get_connection().unwrap();\n\n let rom = match sql.update_user_rom_by_access_token(\n\n &connection,\n\n &access_token,\n\n &db.header.title,\n\n &rom_data,\n", "file_path": "pkmnapi-api/src/routes/roms.rs", "rank": 31, "score": 138965.82902511375 }, { "content": "pub fn post_pokemon_learnset(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonLearnsetRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_learnsets_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_learnset = data.get_learnset();\n\n\n\n let patch = db.set_pokemon_learnset(&pokedex_id, &pokemon_learnset)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_learnsets,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_learnsets.rs", "rank": 32, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/hms/moves/<hm_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_hm_move(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<HMMoveRequest>, JsonError>,\n\n hm_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_hms_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let hm_move = HMMove {\n\n move_id: data.get_move_id(),\n\n };\n\n\n\n let patch = db.set_hm_move(&hm_id, &hm_move)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_hms,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/hm_moves.rs", "rank": 33, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/maps/pokemon/<map_id>\")]\n\npub fn get_map_pokemon(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n map_id: u8,\n\n) -> Result<Json<MapPokemonResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let map_pokemon = db.get_map_pokemon(&map_id)?;\n\n let pokedex_ids = vec![\n\n map_pokemon\n\n .grass\n\n .pokemon\n\n .iter()\n\n .map(|pokemon| pokemon.pokedex_id)\n\n .collect::<Vec<u8>>(),\n\n map_pokemon\n\n .water\n\n .pokemon\n", "file_path": "pkmnapi-api/src/routes/map_pokemon.rs", "rank": 34, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokedex/entries\")]\n\npub fn get_pokedex_entry_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokedexEntryResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokedex_entries = db.get_pokedex_entry_all(&pokedex_ids)?;\n\n\n\n let response = PokedexEntryResponseAll::new(&pokedex_ids, &pokedex_entries);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokedex_entries.rs", "rank": 35, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_icon(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonIconRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_icons_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_icon = PokemonIcon::from(&data.get_icon_id());\n\n\n\n let patch = db.set_pokemon_icon(&pokedex_id, &pokemon_icon)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_icons,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_icons.rs", "rank": 36, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/names/<pokedex_id>\")]\n\npub fn get_pokemon_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_name = db.get_pokemon_name(&pokedex_id)?;\n\n\n\n let response = PokemonNameResponse::new(&pokedex_id, &pokemon_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokemon/names/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokemon_names.rs", "rank": 37, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/moves/<tm_id>\")]\n\npub fn get_tm_move(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n tm_id: u8,\n\n) -> Result<Json<TMMoveResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let tm_move = db.get_tm_move(&tm_id)?;\n\n let move_name = db.get_move_name(&tm_move.move_id)?;\n\n\n\n let response = TMMoveResponse::new(&tm_id, &tm_move, &move_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/tm_moves.rs", "rank": 38, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/tms/prices/<tm_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_tm_price(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<TMPriceRequest>, JsonError>,\n\n tm_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_tm_prices_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let tm_price = TMPrice {\n\n value: data.get_price(),\n\n };\n\n\n\n let patch = db.set_tm_price(&tm_id, &tm_price)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_tm_prices,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/tm_prices.rs", "rank": 39, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/machines/<pokedex_id>\")]\n\npub fn get_pokemon_machines(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonMachinesResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_machines = db.get_pokemon_machines(&pokedex_id)?;\n\n let tm_ids = pokemon_machines\n\n .iter()\n\n .filter_map(|machine| match machine {\n\n PokemonMachine::TM(tm_id) => Some(*tm_id),\n\n _ => None,\n\n })\n\n .collect();\n\n let tm_moves = db.get_tm_move_all(&tm_ids)?;\n\n let hm_ids = pokemon_machines\n\n .iter()\n", "file_path": "pkmnapi-api/src/routes/pokemon_machines.rs", "rank": 40, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/machines\")]\n\npub fn get_pokemon_machines_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonMachinesResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_machines = db.get_pokemon_machines_all(&pokedex_ids)?;\n\n let tm_ids = pokemon_machines\n\n .iter()\n\n .map(|(_, machine)| machine)\n\n .flatten()\n\n .filter_map(|machine| match machine {\n\n PokemonMachine::TM(tm_id) => Some(*tm_id),\n\n _ => None,\n", "file_path": "pkmnapi-api/src/routes/pokemon_machines.rs", "rank": 41, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/trainers/names/<trainer_id>\")]\n\npub fn get_trainer_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n trainer_id: u8,\n\n) -> Result<Json<TrainerNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let trainer_name = db.get_trainer_name(&trainer_id)?;\n\n\n\n let response = TrainerNameResponse::new(&trainer_id, &trainer_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/trainers/names/<trainer_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/trainer_names.rs", "rank": 42, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/moves/names/<move_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_move_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<MoveNameRequest>, JsonError>,\n\n move_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_move_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let move_name = MoveName {\n\n name: ROMString::from(data.get_name()),\n\n };\n\n\n\n let patch = db.set_move_name(&move_id, &move_name)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_move_names,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/move_names.rs", "rank": 43, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/hms/moves/<hm_id>\")]\n\npub fn get_hm_move(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n hm_id: u8,\n\n) -> Result<Json<HMMoveResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let hm_move = db.get_hm_move(&hm_id)?;\n\n let move_name = db.get_move_name(&hm_move.move_id)?;\n\n\n\n let response = HMMoveResponse::new(&hm_id, &hm_move, &move_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/hm_moves.rs", "rank": 44, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/prices/<tm_id>\")]\n\npub fn get_tm_price(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n tm_id: u8,\n\n) -> Result<Json<TMPriceResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let tm_price = db.get_tm_price(&tm_id)?;\n\n\n\n let response = TMPriceResponse::new(&tm_id, &tm_price);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/tm_prices.rs", "rank": 45, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/moves/names/<move_id>\")]\n\npub fn get_move_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n move_id: u8,\n\n) -> Result<Json<MoveNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let move_name = db.get_move_name(&move_id)?;\n\n\n\n let response = MoveNameResponse::new(&move_id, &move_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/move_names.rs", "rank": 46, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_cry(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonCryRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_cries_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_cry = Cry {\n\n base: data.get_base(),\n\n pitch: data.get_pitch(),\n\n length: data.get_length(),\n\n ..Default::default()\n\n };\n\n\n\n let patch = db.set_pokemon_cry(&pokedex_id, &pokemon_cry)?;\n", "file_path": "pkmnapi-api/src/routes/pokemon_cries.rs", "rank": 47, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_machines(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonMachinesRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_machines_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_machines = data.get_machines();\n\n\n\n let patch = db.set_pokemon_machines(&pokedex_id, &pokemon_machines)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_machines,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_machines.rs", "rank": 48, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/maps/pokemon/<map_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_map_pokemon(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<MapPokemonRequest>, JsonError>,\n\n map_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_map_pokemon_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let map_pokemon = MapPokemon {\n\n grass: data.get_grass(),\n\n water: data.get_water(),\n\n };\n\n\n\n let patch = db.set_map_pokemon(&map_id, &map_pokemon)?;\n\n\n\n utils::insert_rom_patch(\n", "file_path": "pkmnapi-api/src/routes/map_pokemon.rs", "rank": 49, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/access_tokens\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_access_token(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n data: Result<Json<AccessTokenRequest>, JsonError>,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<status::Created<JsonValue>, ResponseError> {\n\n if access_token.is_ok() {\n\n return Err(AccessTokenErrorForbidden::new());\n\n }\n\n\n\n let data = match data {\n\n Ok(data) => data.into_inner(),\n\n Err(JsonError::Parse(_, e)) => {\n\n return Err(AccessTokenErrorInvalid::new(&e.to_string()));\n\n }\n\n _ => {\n\n return Err(AccessTokenErrorInvalid::new(\n\n &\"An unknown error occurred\".to_owned(),\n\n ));\n\n }\n", "file_path": "pkmnapi-api/src/routes/access_tokens.rs", "rank": 50, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/stats/<pokedex_id>\")]\n\npub fn get_pokemon_stats(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonStatsResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_stats = db.get_pokemon_stats(&pokedex_id)?;\n\n let type_names = db.get_type_name_all(&pokemon_stats.type_ids)?;\n\n\n\n let response = PokemonStatsResponse::new(&pokedex_id, &pokemon_stats, &type_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokemon/stats/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokemon_stats.rs", "rank": 51, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/moves/stats/<move_id>\")]\n\npub fn get_move_stats(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n move_id: u8,\n\n) -> Result<Json<MoveStatsResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let move_stats = db.get_move_stats(&move_id)?;\n\n let type_name = db.get_type_name(&move_stats.type_id)?;\n\n\n\n let response = MoveStatsResponse::new(&move_id, &move_stats, &type_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/move_stats.rs", "rank": 52, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/moves\")]\n\npub fn get_tm_move_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TMMoveResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_tm_id, max_tm_id) = db.tm_id_bounds();\n\n let tm_ids: Vec<u8> = (min_tm_id..=max_tm_id).map(|tm_id| tm_id as u8).collect();\n\n let tm_moves = db.get_tm_move_all(&tm_ids)?;\n\n let move_ids = tm_moves.iter().map(|(_, tm)| tm.move_id).collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = TMMoveResponseAll::new(&tm_ids, &tm_moves, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/tm_moves.rs", "rank": 53, "score": 138948.260230655 }, { "content": "pub fn post_pokedex_entry(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokedexEntryRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokedex_entries_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokedex_entry = PokedexEntry {\n\n species: ROMString::from(data.get_species()),\n\n height: data.get_height(),\n\n weight: data.get_weight(),\n\n };\n\n\n\n let patch = db.set_pokedex_entry(&pokedex_id, &pokedex_entry)?;\n\n\n", "file_path": "pkmnapi-api/src/routes/pokedex_entries.rs", "rank": 54, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/maps/pokemon\")]\n\npub fn get_map_pokemon_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<MapPokemonResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_map_id, max_map_id) = db.map_id_bounds();\n\n let map_ids = (min_map_id..=max_map_id)\n\n .map(|map_id| map_id as u8)\n\n .collect();\n\n let map_pokemon = db.get_map_pokemon_all(&map_ids)?;\n\n let pokedex_ids = map_pokemon\n\n .iter()\n\n .map(|(_, map_pokemon)| {\n\n vec![\n\n map_pokemon\n\n .grass\n\n .pokemon\n", "file_path": "pkmnapi-api/src/routes/map_pokemon.rs", "rank": 55, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/evolutions\")]\n\npub fn get_pokemon_evolutions_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonEvolutionsResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_evolutions = db.get_pokemon_evolutions_all(&pokedex_ids)?;\n\n let pokemon_evolutions_pokedex_ids = pokemon_evolutions\n\n .iter()\n\n .map(|(_, pokemon_evolution)| pokemon_evolution)\n\n .flatten()\n\n .map(|pokemon_evolution| match pokemon_evolution {\n\n PokemonEvolution::LEVEL(evolution) => evolution.pokedex_id,\n\n PokemonEvolution::ITEM(evolution) => evolution.pokedex_id,\n", "file_path": "pkmnapi-api/src/routes/pokemon_evolutions.rs", "rank": 56, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/trainers/rewards\")]\n\npub fn get_trainer_reward_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TrainerRewardResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_trainer_id, max_trainer_id) = db.trainer_id_bounds();\n\n let trainer_ids: Vec<u8> = (min_trainer_id..=max_trainer_id)\n\n .map(|trainer_ids| trainer_ids as u8)\n\n .collect();\n\n let trainer_rewards = db.get_trainer_reward_all(&trainer_ids)?;\n\n\n\n let response = TrainerRewardResponseAll::new(&trainer_ids, &trainer_rewards);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/trainer_rewards.rs", "rank": 57, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokedex/texts/<pokedex_id>\")]\n\npub fn get_pokedex_text(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokedexTextResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokedex_text = db.get_pokedex_text(&pokedex_id)?;\n\n\n\n let response = PokedexTextResponse::new(&pokedex_id, &pokedex_text);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokedex/texts/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokedex_texts.rs", "rank": 58, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/movesets/<pokedex_id>\")]\n\npub fn get_pokemon_moveset(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonMovesetResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_moveset = db.get_pokemon_moveset(&pokedex_id)?;\n\n let move_names = db.get_move_name_all(&pokemon_moveset)?;\n\n\n\n let response = PokemonMovesetResponse::new(&pokedex_id, &pokemon_moveset, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokemon/movesets/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokemon_movesets.rs", "rank": 59, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[delete(\"/access_tokens\", format = \"application/json\", data = \"<data>\")]\n\npub fn delete_access_token(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n data: Result<Json<AccessTokenDeleteRequest>, JsonError>,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<status::NoContent, ResponseError> {\n\n if access_token.is_ok() {\n\n return Err(AccessTokenErrorForbidden::new());\n\n }\n\n\n\n let data = match data {\n\n Ok(data) => data.into_inner(),\n\n Err(JsonError::Parse(_, e)) => {\n\n return Err(AccessTokenErrorInvalid::new(&e.to_string()));\n\n }\n\n _ => {\n\n return Err(AccessTokenErrorInvalid::new(\n\n &\"An unknown error occurred\".to_owned(),\n\n ));\n\n }\n", "file_path": "pkmnapi-api/src/routes/access_tokens.rs", "rank": 60, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/names/<tm_id>\")]\n\npub fn get_tm_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n tm_id: u8,\n\n) -> Result<Json<TMNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let tm_name = db.get_tm_name(&tm_id)?;\n\n\n\n let response = TMNameResponse::new(&tm_id, &tm_name);\n\n\n\n Ok(Json(response))\n\n}\n", "file_path": "pkmnapi-api/src/routes/tm_names.rs", "rank": 61, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokedex/entries/<pokedex_id>\")]\n\npub fn get_pokedex_entry(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokedexEntryResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokedex_entry = db.get_pokedex_entry(&pokedex_id)?;\n\n\n\n let response = PokedexEntryResponse::new(&pokedex_id, &pokedex_entry);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokedex/entries/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokedex_entries.rs", "rank": 62, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/cries\", format = \"application/json\")]\n\npub fn get_pokemon_cry_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonCryResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_cries = db.get_pokemon_cry_all(&pokedex_ids)?;\n\n\n\n let response = PokemonCryResponseAll::new(&pokedex_ids, &pokemon_cries);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_cries.rs", "rank": 63, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/learnsets\")]\n\npub fn get_pokemon_learnset_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonLearnsetResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_learnsets = db.get_pokemon_learnset_all(&pokedex_ids)?;\n\n let move_ids = pokemon_learnsets\n\n .iter()\n\n .map(|(_, pokemon_learnset)| pokemon_learnset)\n\n .flatten()\n\n .map(|learnset| learnset.move_id)\n\n .collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = PokemonLearnsetResponseAll::new(&pokedex_ids, &pokemon_learnsets, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_learnsets.rs", "rank": 64, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/marts/items\")]\n\npub fn get_mart_items_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<MartItemsResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_mart_id, max_mart_id) = db.mart_id_bounds();\n\n let mart_ids: Vec<u8> = (min_mart_id..=max_mart_id)\n\n .map(|mart_id| mart_id as u8)\n\n .collect();\n\n let mart_items = db.get_mart_items_all(&mart_ids)?;\n\n let item_ids = mart_items\n\n .iter()\n\n .map(|(_, mart_item)| mart_item)\n\n .flatten()\n\n .filter_map(|mart_item| match mart_item {\n\n MartItem::ITEM(item_id) => Some(*item_id),\n\n _ => None,\n", "file_path": "pkmnapi-api/src/routes/mart_items.rs", "rank": 65, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/marts/items/<mart_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_mart_items(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<MartItemsRequest>, JsonError>,\n\n mart_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_mart_items_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let mart_items = data.get_mart_items();\n\n\n\n let patch = db.set_mart_items(&mart_id, &mart_items)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_mart_items,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/mart_items.rs", "rank": 66, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/prices\")]\n\npub fn get_tm_price_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TMPriceResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_tm_id, max_tm_id) = db.tm_id_bounds();\n\n let tm_ids: Vec<u8> = (min_tm_id..=max_tm_id).map(|tm_id| tm_id as u8).collect();\n\n let tm_prices = db.get_tm_price_all(&tm_ids)?;\n\n\n\n let response = TMPriceResponseAll::new(&tm_ids, &tm_prices);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/tm_prices.rs", "rank": 67, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/items/names\")]\n\npub fn get_item_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<ItemNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_item_id, max_item_id) = db.item_id_bounds();\n\n let item_ids: Vec<u8> = (min_item_id..=max_item_id)\n\n .map(|item_id| item_id as u8)\n\n .collect();\n\n let item_names = db.get_item_name_all(&item_ids)?;\n\n\n\n let response = ItemNameResponseAll::new(&item_ids, &item_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/item_names.rs", "rank": 68, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/trainers/names\")]\n\npub fn get_trainer_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TrainerNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_trainer_id, max_trainer_id) = db.trainer_id_bounds();\n\n let trainer_ids: Vec<u8> = (min_trainer_id..=max_trainer_id)\n\n .map(|trainer_id| trainer_id as u8)\n\n .collect();\n\n let trainer_names = db.get_trainer_name_all(&trainer_ids)?;\n\n\n\n let response = TrainerNameResponseAll::new(&trainer_ids, &trainer_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/trainer_names.rs", "rank": 69, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/moves/stats\")]\n\npub fn get_move_stats_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<MoveStatsResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_move_id, max_move_id) = db.move_id_bounds();\n\n let move_ids: Vec<u8> = (min_move_id..=max_move_id)\n\n .map(|move_id| move_id as u8)\n\n .collect();\n\n let move_stats = db.get_move_stats_all(&move_ids)?;\n\n let type_ids = move_stats\n\n .iter()\n\n .map(|(_, move_stats)| move_stats.type_id)\n\n .collect();\n\n let type_names = db.get_type_name_all(&type_ids)?;\n\n\n\n let response = MoveStatsResponseAll::new(&move_ids, &move_stats, &type_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/move_stats.rs", "rank": 70, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/moves/names\")]\n\npub fn get_move_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<MoveNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_move_id, max_move_id) = db.move_id_bounds();\n\n let move_ids: Vec<u8> = (min_move_id..=max_move_id)\n\n .map(|move_id| move_id as u8)\n\n .collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = MoveNameResponseAll::new(&move_ids, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/move_names.rs", "rank": 71, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/player_names\")]\n\npub fn get_player_names(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PlayerNamesResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let player_names = db.get_player_names()?;\n\n\n\n let response = PlayerNamesResponse::new(&player_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/player_names.rs", "rank": 72, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/marts/items/<mart_id>\")]\n\npub fn get_mart_items(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n mart_id: u8,\n\n) -> Result<Json<MartItemsResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let mart_items = db.get_mart_items(&mart_id)?;\n\n let item_ids = mart_items\n\n .iter()\n\n .filter_map(|mart_item| match mart_item {\n\n MartItem::ITEM(item_id) => Some(*item_id),\n\n _ => None,\n\n })\n\n .collect();\n\n let item_names = db.get_item_name_all(&item_ids)?;\n\n let tm_ids = mart_items\n\n .iter()\n", "file_path": "pkmnapi-api/src/routes/mart_items.rs", "rank": 73, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/movesets\")]\n\npub fn get_pokemon_moveset_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonMovesetResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_movesets = db.get_pokemon_moveset_all(&pokedex_ids)?;\n\n let move_ids = pokemon_movesets\n\n .iter()\n\n .map(|(_, pokemon_moveset)| pokemon_moveset.to_vec())\n\n .flatten()\n\n .collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = PokemonMovesetResponseAll::new(&pokedex_ids, &pokemon_movesets, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_movesets.rs", "rank": 74, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_stats(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonStatsRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_stats_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_stats = PokemonStats {\n\n pokedex_id: pokedex_id,\n\n base_hp: data.get_base_hp(),\n\n base_attack: data.get_base_attack(),\n\n base_defence: data.get_base_defence(),\n\n base_speed: data.get_base_speed(),\n\n base_special: data.get_base_special(),\n\n type_ids: data.get_type_ids(),\n", "file_path": "pkmnapi-api/src/routes/pokemon_stats.rs", "rank": 75, "score": 138948.260230655 }, { "content": "pub fn post_trainer_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<TrainerNameRequest>, JsonError>,\n\n trainer_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_trainer_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let trainer_name = TrainerName {\n\n name: ROMString::from(data.get_name()),\n\n };\n\n\n\n let patch = db.set_trainer_name(&trainer_id, &trainer_name)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_trainer_names,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/trainer_names.rs", "rank": 76, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonNameRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_name = PokemonName {\n\n name: ROMString::from(data.get_name()),\n\n };\n\n\n\n let patch = db.set_pokemon_name(&pokedex_id, &pokemon_name)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_names,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_names.rs", "rank": 77, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[delete(\"/roms/patches/<patch_id>\")]\n\npub fn delete_rom_patch(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n if_match: Result<IfMatch, IfMatchError>,\n\n patch_id: String,\n\n) -> Result<status::NoContent, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let etag = utils::get_etag(if_match)?;\n\n let connection = sql.get_connection().unwrap();\n\n\n\n match sql.delete_rom_patch_by_id(&connection, &access_token, &patch_id, &etag) {\n\n Ok(_) => {}\n\n Err(pkmnapi_sql::error::Error::ETagError) => return Err(ETagErrorMismatch::new()),\n\n Err(_) => return Err(RomErrorNoRom::new()),\n\n }\n\n\n\n Ok(status::NoContent)\n\n}\n", "file_path": "pkmnapi-api/src/routes/rom_patches.rs", "rank": 78, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/items/names/<item_id>\")]\n\npub fn get_item_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n item_id: u8,\n\n) -> Result<Json<ItemNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let item_name = db.get_item_name(&item_id)?;\n\n\n\n let response = ItemNameResponse::new(&item_id, &item_name);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/item_names.rs", "rank": 79, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/names\")]\n\npub fn get_pokemon_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_names = db.get_pokemon_name_all(&pokedex_ids)?;\n\n\n\n let response = PokemonNameResponseAll::new(&pokedex_ids, &pokemon_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_names.rs", "rank": 80, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/player_names\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_player_names(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PlayerNamesRequest>, JsonError>,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_player_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let player_names = PlayerNames {\n\n player: data\n\n .get_player_names()\n\n .iter()\n\n .map(|name| ROMString::from(name))\n\n .collect(),\n\n rival: data\n\n .get_rival_names()\n\n .iter()\n", "file_path": "pkmnapi-api/src/routes/player_names.rs", "rank": 81, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_moveset(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonMovesetRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_movesets_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_moveset = data.get_moveset();\n\n\n\n let patch = db.set_pokemon_moveset(&pokedex_id, &pokemon_moveset)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_movesets,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_movesets.rs", "rank": 82, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokedex/texts\")]\n\npub fn get_pokedex_text_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokedexTextResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokedex_texts = db.get_pokedex_text_all(&pokedex_ids)?;\n\n\n\n let response = PokedexTextResponseAll::new(&pokedex_ids, &pokedex_texts);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokedex_texts.rs", "rank": 83, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/roms/patches\", format = \"application/json\", rank = 1)]\n\npub fn get_rom_patches(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<RomPatchResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n\n\n let connection = sql.get_connection().unwrap();\n\n let patches = match sql.select_rom_patches_by_access_token(&connection, &access_token) {\n\n Ok(patches) => patches,\n\n Err(_) => return Err(RomErrorNoRom::new()),\n\n };\n\n\n\n let response = RomPatchResponseAll::new(&patches);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/rom_patches.rs", "rank": 84, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/items/names/<item_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_item_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<ItemNameRequest>, JsonError>,\n\n item_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_item_names_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let item_name = ItemName {\n\n name: ROMString::from(data.get_name()),\n\n };\n\n\n\n let patch = db.set_item_name(&item_id, &item_name)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_item_names,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/item_names.rs", "rank": 85, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/hms/names\")]\n\npub fn get_hm_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<HMNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_hm_id, max_hm_id) = db.hm_id_bounds();\n\n let hm_ids: Vec<u8> = (min_hm_id..=max_hm_id).map(|hm_id| hm_id as u8).collect();\n\n let hm_names = db.get_hm_name_all(&hm_ids)?;\n\n\n\n let response = HMNameResponseAll::new(&hm_ids, &hm_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/hm_names.rs", "rank": 86, "score": 138948.260230655 }, { "content": "pub fn post_pokedex_text(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokedexTextRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokedex_texts_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokedex_text = PokedexText {\n\n text: ROMString::from(data.get_text()),\n\n };\n\n\n\n let patch = db.set_pokedex_text(&pokedex_id, &pokedex_text)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokedex_texts,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokedex_texts.rs", "rank": 87, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/tms/names\")]\n\npub fn get_tm_name_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<TMNameResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_tm_id, max_tm_id) = db.tm_id_bounds();\n\n let tm_ids: Vec<u8> = (min_tm_id..=max_tm_id).map(|tm_id| tm_id as u8).collect();\n\n let tm_names = db.get_tm_name_all(&tm_ids)?;\n\n\n\n let response = TMNameResponseAll::new(&tm_ids, &tm_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/tm_names.rs", "rank": 88, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/stats\")]\n\npub fn get_pokemon_stats_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonStatsResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_stats = db.get_pokemon_stats_all(&pokedex_ids)?;\n\n let type_ids = pokemon_stats\n\n .iter()\n\n .map(|(_, pokemon_stats)| pokemon_stats.type_ids.to_vec())\n\n .flatten()\n\n .collect();\n\n let type_names = db.get_type_name_all(&type_ids)?;\n\n\n\n let response = PokemonStatsResponseAll::new(&pokedex_ids, &pokemon_stats, &type_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_stats.rs", "rank": 89, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/icons/<pokedex_id>\")]\n\npub fn get_pokemon_icon(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonIconResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_icon = db.get_pokemon_icon(&pokedex_id)?;\n\n\n\n let response = PokemonIconResponse::new(&pokedex_id, &pokemon_icon);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokemon/icons/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokemon_icons.rs", "rank": 90, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/learnsets/<pokedex_id>\")]\n\npub fn get_pokemon_learnset(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonLearnsetResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_learnset = db.get_pokemon_learnset(&pokedex_id)?;\n\n let move_ids = pokemon_learnset\n\n .iter()\n\n .map(|learnset| learnset.move_id)\n\n .collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = PokemonLearnsetResponse::new(&pokedex_id, &pokemon_learnset, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/pokemon/learnsets/<pokedex_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/pokemon_learnsets.rs", "rank": 91, "score": 138948.260230655 }, { "content": "pub fn post_pokemon_evolutions(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<PokemonEvolutionsRequest>, JsonError>,\n\n pokedex_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_pokemon_evolutions_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let pokemon_evolutions = data.get_evolutions();\n\n\n\n let patch = db.set_pokemon_evolutions(&pokedex_id, &pokemon_evolutions)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_pokemon_evolutions,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/pokemon_evolutions.rs", "rank": 92, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/hms/moves\")]\n\npub fn get_hm_move_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<HMMoveResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_hm_id, max_hm_id) = db.hm_id_bounds();\n\n let hm_ids: Vec<u8> = (min_hm_id..=max_hm_id).map(|hm_id| hm_id as u8).collect();\n\n let hm_moves = db.get_hm_move_all(&hm_ids)?;\n\n let move_ids: Vec<u8> = hm_moves.iter().map(|(_, hm)| hm.move_id).collect();\n\n let move_names = db.get_move_name_all(&move_ids)?;\n\n\n\n let response = HMMoveResponseAll::new(&hm_ids, &hm_moves, &move_names);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/hm_moves.rs", "rank": 93, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/hms/names/<hm_id>\")]\n\npub fn get_hm_name(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n hm_id: u8,\n\n) -> Result<Json<HMNameResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let hm_name = db.get_hm_name(&hm_id)?;\n\n\n\n let response = HMNameResponse::new(&hm_id, &hm_name);\n\n\n\n Ok(Json(response))\n\n}\n", "file_path": "pkmnapi-api/src/routes/hm_names.rs", "rank": 94, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/icons\")]\n\npub fn get_pokemon_icon_all(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n) -> Result<Json<PokemonIconResponseAll>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let (min_pokedex_id, max_pokedex_id) = db.pokedex_id_bounds();\n\n let pokedex_ids: Vec<u8> = (min_pokedex_id..=max_pokedex_id)\n\n .map(|pokedex_id| pokedex_id as u8)\n\n .collect();\n\n let pokemon_icons = db.get_pokemon_icon_all(&pokedex_ids)?;\n\n\n\n let response = PokemonIconResponseAll::new(&pokedex_ids, &pokemon_icons);\n\n\n\n Ok(Json(response))\n\n}\n\n\n", "file_path": "pkmnapi-api/src/routes/pokemon_icons.rs", "rank": 95, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/moves/stats/<move_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_move_stats(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<MoveStatsRequest>, JsonError>,\n\n move_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_move_stats_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let move_stats = MoveStats {\n\n move_id: move_id,\n\n effect: data.get_effect(),\n\n power: data.get_power(),\n\n type_id: data.get_type_id(),\n\n accuracy: data.get_accuracy(),\n\n pp: data.get_pp(),\n\n };\n", "file_path": "pkmnapi-api/src/routes/move_stats.rs", "rank": 96, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/pokemon/evolutions/<pokedex_id>\")]\n\npub fn get_pokemon_evolutions(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n pokedex_id: u8,\n\n) -> Result<Json<PokemonEvolutionsResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let pokemon_evolutions = db.get_pokemon_evolutions(&pokedex_id)?;\n\n let pokedex_ids = pokemon_evolutions\n\n .iter()\n\n .map(|pokemon_evolution| match pokemon_evolution {\n\n PokemonEvolution::LEVEL(evolution) => evolution.pokedex_id,\n\n PokemonEvolution::ITEM(evolution) => evolution.pokedex_id,\n\n PokemonEvolution::TRADE(evolution) => evolution.pokedex_id,\n\n })\n\n .collect();\n\n let pokemon_names = db.get_pokemon_name_all(&pokedex_ids)?;\n\n let item_ids = pokemon_evolutions\n", "file_path": "pkmnapi-api/src/routes/pokemon_evolutions.rs", "rank": 97, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[get(\"/trainers/rewards/<trainer_id>\")]\n\npub fn get_trainer_reward(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n trainer_id: u8,\n\n) -> Result<Json<TrainerRewardResponse>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let (db, _) = utils::get_db_with_applied_patches(&sql, &access_token)?;\n\n\n\n let trainer_reward = db.get_trainer_reward(&trainer_id)?;\n\n\n\n let response = TrainerRewardResponse::new(&trainer_id, &trainer_reward);\n\n\n\n Ok(Json(response))\n\n}\n\n\n\n#[openapi]\n\n#[post(\n\n \"/trainers/rewards/<trainer_id>\",\n\n format = \"application/json\",\n\n data = \"<data>\"\n\n)]\n", "file_path": "pkmnapi-api/src/routes/trainer_rewards.rs", "rank": 98, "score": 138948.260230655 }, { "content": "#[openapi]\n\n#[post(\"/tms/moves/<tm_id>\", format = \"application/json\", data = \"<data>\")]\n\npub fn post_tm_move(\n\n sql: State<PkmnapiSQL>,\n\n _rate_limit: RateLimit,\n\n access_token: Result<AccessToken, AccessTokenError>,\n\n patch_description: Result<PatchDescription, PatchDescriptionError>,\n\n data: Result<Json<TMMoveRequest>, JsonError>,\n\n tm_id: u8,\n\n) -> Result<status::Accepted<JsonValue>, ResponseError> {\n\n let access_token = utils::get_access_token(access_token)?;\n\n let data = utils::get_data(data, BaseErrorResponseId::error_tm_moves_invalid)?;\n\n let (db, connection) = utils::get_db(&sql, &access_token)?;\n\n\n\n let tm_move = TMMove {\n\n move_id: data.get_move_id(),\n\n };\n\n\n\n let patch = db.set_tm_move(&tm_id, &tm_move)?;\n\n\n\n utils::insert_rom_patch(\n\n sql,\n\n connection,\n\n access_token,\n\n patch,\n\n patch_description,\n\n BaseErrorResponseId::error_tm_moves,\n\n )?;\n\n\n\n Ok(status::Accepted(Some(json!({}))))\n\n}\n", "file_path": "pkmnapi-api/src/routes/tm_moves.rs", "rank": 99, "score": 138948.260230655 } ]
Rust
src/timer/wheel.rs
daschl/reef
f2238623bd2cbdf3bc75dbba1e962948a131c551
use chrono::{NaiveDateTime, Duration}; use linked_list::LinkedList; use bit_vec::BitVec; use timer::Timer; use std::cmp; pub struct TimerWheel<'a, T: 'a> { last: Duration, next: Duration, num_buckets: u32, buckets: Vec<LinkedList<&'a T>>, non_empty_buckets: BitVec, } enum RemoveAction { Remove, SeekForward, } impl<'a, T: 'a> TimerWheel<'a, T> where T: Timer { pub fn new() -> TimerWheel<'a, T> { let last = Duration::seconds(0); let next = Duration::max_value(); let num_buckets = next.num_seconds().count_ones() + next.num_seconds().count_zeros() + 1; let mut buckets = Vec::with_capacity(num_buckets as usize); for _ in 0..num_buckets { buckets.push(LinkedList::new()); } let non_empty_buckets = BitVec::from_elem(num_buckets as usize, false); TimerWheel { last: last, next: next, num_buckets: num_buckets, buckets: buckets, non_empty_buckets: non_empty_buckets, } } pub fn insert(&mut self, timer: &'a T) -> bool { let timestamp = TimerWheel::<T>::duration_since_epoch(timer.expires()); let index = self.get_index(timestamp) as usize; self.buckets.get_mut(index).expect("Index out of bounds!").push_back(timer); self.non_empty_buckets.set(index, true); if timestamp < self.next { self.next = timestamp; true } else { false } } pub fn remove(&mut self, timer: &'a T) { let timestamp = TimerWheel::<T>::duration_since_epoch(timer.expires()); let index = self.get_index(timestamp) as usize; let list = self.buckets.get_mut(index).expect("Index out of bounds!"); { let mut cursor = list.cursor(); loop { let action = match cursor.peek_next() { Some(t) => { if *t as *const T == timer as *const T { RemoveAction::Remove } else { RemoveAction::SeekForward } } None => break, }; match action { RemoveAction::Remove => { cursor.remove(); break; } RemoveAction::SeekForward => cursor.seek_forward(1), } } } if list.is_empty() { self.non_empty_buckets.set(index, false); } } pub fn expire(&mut self, now: NaiveDateTime) -> LinkedList<&'a T> { let timestamp = TimerWheel::<T>::duration_since_epoch(now); if timestamp < self.last { panic!("The timestamp to expire is smaller than last!"); } let mut expired = LinkedList::new(); let index = self.get_index(timestamp) as usize; let skipped = self.non_empty_buckets .iter() .enumerate() .skip(index + 1) .collect::<Vec<(usize, bool)>>(); for (i, _) in skipped { let length = expired.len(); expired.splice(length, self.buckets.get_mut(i).expect("Expected list")); self.non_empty_buckets.set(i, false); } self.last = timestamp; self.next = Duration::max_value(); let mut reinsert = vec![]; { let mut list = self.buckets.get_mut(index).unwrap(); while !list.is_empty() { let timer = list.pop_front().unwrap(); if timer.expires() <= now { list.push_back(timer); } else { reinsert.push(timer); } } } for t in &mut reinsert { self.insert(t); } self.non_empty_buckets.set(index, !self.buckets.get(index).unwrap().is_empty()); if self.next == Duration::max_value() && self.non_empty_buckets.any() { let list = self.buckets.get(self.last_non_empty_bucket()).unwrap(); for t in list.iter() { self.next = cmp::min(self.next, TimerWheel::<T>::duration_since_epoch(t.expires())); } } expired } fn get_index(&self, timestamp: Duration) -> u32 { if timestamp <= self.last { self.num_buckets - 1 } else { let index = (timestamp.num_seconds() ^ self.last.num_seconds()).leading_zeros(); debug_assert!(index < self.num_buckets - 1); index } } #[inline] fn last_non_empty_bucket(&self) -> usize { let (idx, _) = self.non_empty_buckets .iter() .filter(|b| *b == true) .enumerate() .last() .expect("No non-empty bucket found!"); idx } #[inline] fn duration_since_epoch(time_point: NaiveDateTime) -> Duration { Duration::seconds(time_point.timestamp()) } } #[cfg(test)] mod tests { use super::TimerWheel; use timer::Timer; use chrono::{NaiveDateTime, Duration, UTC, TimeZone}; #[derive(Debug,PartialEq)] struct MyTimer { expires: Duration, } impl Timer for MyTimer { fn expires(&self) -> NaiveDateTime { NaiveDateTime::from_timestamp(self.expires.num_seconds(), 0) } } #[test] fn test_insert_and_remove() { let timer = MyTimer { expires: Duration::days(3) }; let mut wheel = TimerWheel::<MyTimer>::new(); let index = wheel.get_index(Duration::days(3)); assert_eq!(Some(false), wheel.non_empty_buckets.get(index as usize)); wheel.insert(&timer); assert_eq!(Some(true), wheel.non_empty_buckets.get(index as usize)); wheel.remove(&timer); assert_eq!(Some(false), wheel.non_empty_buckets.get(index as usize)); } #[test] fn test_expire() { let timer = MyTimer { expires: Duration::days(2) }; let mut wheel = TimerWheel::<MyTimer>::new(); wheel.insert(&timer); let expired = wheel.expire(NaiveDateTime::from_timestamp(Duration::days(1).num_seconds(), 0)); assert_eq!(true, expired.is_empty()); let mut expired = wheel.expire(NaiveDateTime::from_timestamp(Duration::days(4).num_seconds(), 0)); assert_eq!(false, expired.is_empty()); assert_eq!(1, expired.len()); assert_eq!(timer, *expired.pop_front().unwrap()); } #[test] fn test_get_index() { let wheel = TimerWheel::<MyTimer>::new(); let index = wheel.get_index(Duration::seconds(1)); assert_eq!(63, index); } #[test] fn test_since_epoch() { let dt = UTC.ymd(2014, 7, 8).and_hms(9, 10, 11); let duration = TimerWheel::<MyTimer>::duration_since_epoch(dt.naive_utc()); assert_eq!(1404810611, duration.num_seconds()); } }
use chrono::{NaiveDateTime, Duration}; use linked_list::LinkedList; use bit_vec::BitVec; use timer::Timer; use std::cmp; pub struct TimerWheel<'a, T: 'a> { last: Duration, next: Duration, num_buckets: u32, buckets: Vec<LinkedList<&'a T>>, non_empty_buckets: BitVec, } enum RemoveAction { Remove, SeekForward, } impl<'a, T: 'a> TimerWheel<'a, T> where T: Timer { pub fn new() -> TimerWheel<'a, T> { let last = Duration::seconds(0); let next = Duration::max_value(); let num_buckets = next.num_seconds().count_ones() + next.num_seconds().count_zeros() + 1; let mut buckets = Vec::with_capacity(num_buckets as usize); for _ in 0..num_buckets { buckets.push(LinkedList::new()); } let non_empty_buckets = BitVec::from_elem(num_buckets as usize, false); TimerWheel { last: last, next: next, num_buckets: num_buckets, buckets: buckets, non_empty_buckets: non_empty_buckets, } } pub fn insert(&mut self, timer: &'a T) -> bool { let timestamp = TimerWheel::<T>::duration_since_epoch(timer.expires()); let index = self.get_index(timestamp) as usize; self.buckets.get_mut(index).expect("Index out of bounds!").push_back(timer); self.non_empty_buckets.set(index, true); if timestamp < self.next { self.next = timestamp; true } else { false } } pub fn remove(&mut self, timer: &'a T) { let timestamp = TimerWheel::<T>::duration_since_epoch(timer.expires()); let index = self.get_index(timestamp) as usize; let list = self.buckets.get_mut(index).expect("Index out of bounds!"); { let mut cursor = list.cursor(); loop { let action = match cursor.peek_next() { Some(t) => { if *t as *const T == timer as *const T { RemoveAction::Remove } else { RemoveAction::SeekForward } } None => break, }; match action { RemoveAction::Remove => { cursor.remove(); break; } RemoveAction::SeekForward => cursor.seek_forward(1), } } } if list.is_empty() { self.non_empty_buckets.set(index, false); } } pub fn expire(&mut self, now: NaiveDateTime) -> LinkedList<&'a T> { let timestamp = TimerWheel::<T
xpired = wheel.expire(NaiveDateTime::from_timestamp(Duration::days(4).num_seconds(), 0)); assert_eq!(false, expired.is_empty()); assert_eq!(1, expired.len()); assert_eq!(timer, *expired.pop_front().unwrap()); } #[test] fn test_get_index() { let wheel = TimerWheel::<MyTimer>::new(); let index = wheel.get_index(Duration::seconds(1)); assert_eq!(63, index); } #[test] fn test_since_epoch() { let dt = UTC.ymd(2014, 7, 8).and_hms(9, 10, 11); let duration = TimerWheel::<MyTimer>::duration_since_epoch(dt.naive_utc()); assert_eq!(1404810611, duration.num_seconds()); } }
>::duration_since_epoch(now); if timestamp < self.last { panic!("The timestamp to expire is smaller than last!"); } let mut expired = LinkedList::new(); let index = self.get_index(timestamp) as usize; let skipped = self.non_empty_buckets .iter() .enumerate() .skip(index + 1) .collect::<Vec<(usize, bool)>>(); for (i, _) in skipped { let length = expired.len(); expired.splice(length, self.buckets.get_mut(i).expect("Expected list")); self.non_empty_buckets.set(i, false); } self.last = timestamp; self.next = Duration::max_value(); let mut reinsert = vec![]; { let mut list = self.buckets.get_mut(index).unwrap(); while !list.is_empty() { let timer = list.pop_front().unwrap(); if timer.expires() <= now { list.push_back(timer); } else { reinsert.push(timer); } } } for t in &mut reinsert { self.insert(t); } self.non_empty_buckets.set(index, !self.buckets.get(index).unwrap().is_empty()); if self.next == Duration::max_value() && self.non_empty_buckets.any() { let list = self.buckets.get(self.last_non_empty_bucket()).unwrap(); for t in list.iter() { self.next = cmp::min(self.next, TimerWheel::<T>::duration_since_epoch(t.expires())); } } expired } fn get_index(&self, timestamp: Duration) -> u32 { if timestamp <= self.last { self.num_buckets - 1 } else { let index = (timestamp.num_seconds() ^ self.last.num_seconds()).leading_zeros(); debug_assert!(index < self.num_buckets - 1); index } } #[inline] fn last_non_empty_bucket(&self) -> usize { let (idx, _) = self.non_empty_buckets .iter() .filter(|b| *b == true) .enumerate() .last() .expect("No non-empty bucket found!"); idx } #[inline] fn duration_since_epoch(time_point: NaiveDateTime) -> Duration { Duration::seconds(time_point.timestamp()) } } #[cfg(test)] mod tests { use super::TimerWheel; use timer::Timer; use chrono::{NaiveDateTime, Duration, UTC, TimeZone}; #[derive(Debug,PartialEq)] struct MyTimer { expires: Duration, } impl Timer for MyTimer { fn expires(&self) -> NaiveDateTime { NaiveDateTime::from_timestamp(self.expires.num_seconds(), 0) } } #[test] fn test_insert_and_remove() { let timer = MyTimer { expires: Duration::days(3) }; let mut wheel = TimerWheel::<MyTimer>::new(); let index = wheel.get_index(Duration::days(3)); assert_eq!(Some(false), wheel.non_empty_buckets.get(index as usize)); wheel.insert(&timer); assert_eq!(Some(true), wheel.non_empty_buckets.get(index as usize)); wheel.remove(&timer); assert_eq!(Some(false), wheel.non_empty_buckets.get(index as usize)); } #[test] fn test_expire() { let timer = MyTimer { expires: Duration::days(2) }; let mut wheel = TimerWheel::<MyTimer>::new(); wheel.insert(&timer); let expired = wheel.expire(NaiveDateTime::from_timestamp(Duration::days(1).num_seconds(), 0)); assert_eq!(true, expired.is_empty()); let mut e
random
[ { "content": "#[bench]\n\nfn remove_one_timer(b: &mut test::Bencher) {\n\n let timer = SimpleTimer { expires: Duration::days(2) };\n\n let mut wheel = TimerWheel::new();\n\n wheel.insert(&timer);\n\n b.iter(|| {\n\n wheel.remove(&timer);\n\n });\n\n}\n", "file_path": "benches/wheel.rs", "rank": 1, "score": 67744.37278041245 }, { "content": "pub trait Timer {\n\n /// Denotes when the timer expires.\n\n fn expires(&self) -> NaiveDateTime;\n\n}\n", "file_path": "src/timer/mod.rs", "rank": 2, "score": 59433.76955480245 }, { "content": "#[bench]\n\nfn insert_one_timer(b: &mut test::Bencher) {\n\n let timer = SimpleTimer { expires: Duration::days(2) };\n\n let mut wheel = TimerWheel::new();\n\n b.iter(|| {\n\n wheel.insert(&timer);\n\n });\n\n}\n\n\n", "file_path": "benches/wheel.rs", "rank": 3, "score": 53403.779254847876 }, { "content": "#[derive(Debug,PartialEq)]\n\nstruct SimpleTimer {\n\n expires: Duration,\n\n}\n\n\n\nimpl Timer for SimpleTimer {\n\n fn expires(&self) -> NaiveDateTime {\n\n NaiveDateTime::from_timestamp(self.expires.num_seconds(), 0)\n\n }\n\n}\n\n\n", "file_path": "benches/wheel.rs", "rank": 4, "score": 48341.06519713012 }, { "content": "#[inline]\n\npub fn err<T, E>(err: E) -> Future<T, E> {\n\n Future::from_err(err)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::{FutureState, Future, Promise};\n\n\n\n #[test]\n\n fn test_future_from_ok() {\n\n let future = Future::<i32, i32>::from_ok(42);\n\n assert_eq!(true, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n }\n\n\n\n #[test]\n\n fn test_future_from_err() {\n\n let future = Future::<i32, i32>::from_err(-1);\n\n assert_eq!(true, future.is_ready());\n", "file_path": "src/future.rs", "rank": 5, "score": 31653.333975454603 }, { "content": "#[inline]\n\npub fn ok<T, E>(value: T) -> Future<T, E> {\n\n Future::from_ok(value)\n\n}\n\n\n\n/// Helper method to create a failed, completed `Future`.\n\n///\n\n/// This function is identical to calling `Future::from_err(err)`, it's just nicer to read and\n\n/// write in a `Continuation`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use reef::future::err;\n\n///\n\n/// let future = err::<i32, _>(-1);\n\n/// assert_eq!(true, future.is_ready());\n\n/// assert_eq!(false, future.is_ok());\n\n/// assert_eq!(true, future.is_err());\n\n/// ```\n", "file_path": "src/future.rs", "rank": 6, "score": 31653.333975454603 }, { "content": "#[derive(Debug)]\n\nstruct User {\n\n name: &'static str,\n\n age: i32,\n\n}\n\n\n", "file_path": "examples/hello_world.rs", "rank": 7, "score": 30872.64381646432 }, { "content": "fn main() {\n\n let (tx, rx) = channel();\n\n\n\n let child = thread::spawn(move || {\n\n\n\n // Create the Promise which will be completed when the vortex is loaded!\n\n let mut start_promise = Promise::<(), ()>::new();\n\n\n\n start_promise.future()\n\n .map(|_| {\n\n println!(\"Connected :)\");\n\n ok(42)\n\n })\n\n .map(|age| {\n\n ok(User {\n\n name: \"Michael\",\n\n age: age,\n\n })\n\n })\n\n .map(|user| {\n", "file_path": "examples/hello_world.rs", "rank": 8, "score": 30454.17436556382 }, { "content": "/// The Task is a generic abstraction over a closure to run inside the event loop.\n\npub trait Task {\n\n /// Run the Task.\n\n #[inline]\n\n fn run(&mut self);\n\n}\n\n\n\n/// A specific Task which takes a closure and runs it, passing the `FutureState` to it.\n\npub struct Continuation<T: 'static, E: 'static, F> {\n\n state: Option<FutureState<T, E>>,\n\n func: F,\n\n}\n\n\n\nimpl<T, E, F> Continuation<T, E, F>\n\n where F: FnMut(FutureState<T, E>)\n\n{\n\n /// Creates a continuation with both the state and the closure.\n\n ///\n\n /// If this method is used, no further information is needed to run the continuation.\n\n #[inline]\n\n pub fn complete(state: FutureState<T, E>, func: F) -> Continuation<T, E, F> {\n", "file_path": "src/task.rs", "rank": 9, "score": 27848.5948195506 }, { "content": "pub mod wheel;\n\n\n\nuse chrono::NaiveDateTime;\n\n\n", "file_path": "src/timer/mod.rs", "rank": 21, "score": 18842.081077266037 }, { "content": " /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let mut promise = Box::new(Promise::<i32, i32>::new());\n\n /// assert_eq!(FutureState::Pending, *promise.state_ref());\n\n ///\n\n /// let future = promise.future();\n\n /// assert_eq!(false, future.is_ready());\n\n ///\n\n /// promise.set_err(-1);\n\n /// assert_eq!(FutureState::Err(-1), *promise.state_ref());\n\n /// assert_eq!(true, future.is_ready());\n\n /// assert_eq!(true, future.is_err());\n\n /// ```\n\n #[inline]\n\n pub fn set_err(&mut self, err: E) {\n\n match self.task_state {\n\n Some(ptr) => unsafe { *ptr = Some(FutureState::Err(err)) },\n\n None => self.state = FutureState::Err(err),\n\n };\n\n self.schedule();\n", "file_path": "src/future.rs", "rank": 22, "score": 10.99202304301513 }, { "content": " pub fn is_err(&self) -> bool {\n\n self.state_ref().is_err()\n\n }\n\n\n\n /// Returns true if this `Future` is ready and is successful.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::Future;\n\n ///\n\n /// let future = Future::<i32, i32>::from_ok(42);\n\n /// assert_eq!(true, future.is_ok());\n\n /// ```\n\n #[inline]\n\n pub fn is_ok(&self) -> bool {\n\n self.state_ref().is_ok()\n\n }\n\n\n\n /// Attaches a closure to run when the `Future` completes.\n", "file_path": "src/future.rs", "rank": 23, "score": 9.268434240466867 }, { "content": " Err(E),\n\n}\n\n\n\nimpl<T, E> FutureState<T, E> {\n\n /// Returns `true` if the state is `Ok`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::FutureState;\n\n ///\n\n /// let state = FutureState::Pending::<i32, i32>;\n\n /// assert_eq!(false, state.is_ok());\n\n ///\n\n /// let state = FutureState::Ok::<_, i32>(1);\n\n /// assert_eq!(true, state.is_ok());\n\n /// ```\n\n #[inline]\n\n pub fn is_ok(&self) -> bool {\n\n match *self {\n", "file_path": "src/future.rs", "rank": 24, "score": 9.179837304941294 }, { "content": " /// })\n\n /// .map(|val| {\n\n /// assert_eq!(\"I am 42 years old!\", val);\n\n /// ok::<(), i32>(())\n\n /// });\n\n /// ```\n\n #[inline]\n\n pub fn map<F, TRES>(&mut self, f: F) -> Future<TRES, E>\n\n where F: Fn(T) -> Future<TRES, E> + 'static\n\n {\n\n if self.is_ready() {\n\n match self.state() {\n\n FutureState::Ok(v) => return f(v),\n\n FutureState::Err(e) => return Future::from_err(e),\n\n _ => panic!(\"This should not happen since we checked the state before.\"),\n\n }\n\n }\n\n\n\n let mut promise = Box::new(Promise::<TRES, E>::new());\n\n let future = promise.future();\n", "file_path": "src/future.rs", "rank": 25, "score": 9.10849406279214 }, { "content": "\n\n #[inline]\n\n pub fn schedule(task: Box<Task>) {\n\n TL_VT.with(|s| s.borrow().as_ref().unwrap().push_task(task));\n\n }\n\n\n\n fn run(&self) {\n\n // Additional Start logic goes here.\n\n\n\n self.start_promise.borrow_mut().set_ok(());\n\n\n\n loop {\n\n let control_msg = self.control.borrow().try_recv();\n\n if control_msg.is_ok() {\n\n match control_msg.unwrap() {\n\n ControlMsg::Stop => break,\n\n }\n\n }\n\n\n\n while let Some(mut task) = self.pop_task() {\n", "file_path": "src/vortex.rs", "rank": 26, "score": 9.093840660318227 }, { "content": " let vortex = Vortex {\n\n control: RefCell::new(control),\n\n task_queue: RefCell::new(VecDeque::new()),\n\n start_promise: RefCell::new(start_promise),\n\n };\n\n\n\n TL_VT.with(|s| {\n\n let mut s = s.borrow_mut();\n\n assert!(s.is_none());\n\n *s = Some(vortex);\n\n });\n\n }\n\n\n\n pub fn start() {\n\n TL_VT.with(|s| {\n\n let s = s.borrow();\n\n let v = (*s).as_ref().unwrap();\n\n v.run();\n\n });\n\n }\n", "file_path": "src/vortex.rs", "rank": 27, "score": 8.96533054115303 }, { "content": " &mut self.state\n\n }\n\n}\n\n\n\nimpl<T, E, F> Task for Continuation<T, E, F>\n\n where F: FnMut(FutureState<T, E>)\n\n{\n\n #[inline]\n\n fn run(&mut self) {\n\n let state = mem::replace(&mut self.state, None);\n\n (self.func)(state.expect(\"State not set when Continuation is run.\"));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use future::FutureState;\n\n use super::{Task, Continuation};\n\n\n", "file_path": "src/task.rs", "rank": 28, "score": 8.511455285922075 }, { "content": " ///\n\n /// let future = Future::<i32, i32>::from_err(-1);\n\n /// assert_eq!(true, future.is_ready());\n\n /// ```\n\n #[inline]\n\n pub fn is_ready(&self) -> bool {\n\n self.state_ref().is_ready()\n\n }\n\n\n\n /// Returns true if this `Future` is ready and has failed.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::Future;\n\n ///\n\n /// let future = Future::<i32, i32>::from_err(-1);\n\n /// assert_eq!(true, future.is_err());\n\n /// ```\n\n #[inline]\n", "file_path": "src/future.rs", "rank": 29, "score": 8.264639474013716 }, { "content": " let mut promise = Box::new(Promise::<TRES, E>::new());\n\n let future = promise.future();\n\n\n\n self.schedule(move |state| {\n\n match f(state).state() {\n\n FutureState::Ok(iv) => promise.set_ok(iv),\n\n FutureState::Err(ie) => promise.set_err(ie),\n\n _ => panic!(\"This should not happen since we checked the state before.\"),\n\n }\n\n });\n\n\n\n future\n\n }\n\n\n\n /// Schedules the Future if `Ready` or stores the Closure in the Promise if `Deferred`.\n\n #[inline]\n\n fn schedule<F>(&mut self, f: F)\n\n where F: FnMut(FutureState<T, E>) + 'static\n\n {\n\n match *self {\n", "file_path": "src/future.rs", "rank": 30, "score": 8.219798375752555 }, { "content": " Promise {\n\n future: None,\n\n state: FutureState::Pending,\n\n task: None,\n\n task_state: None,\n\n }\n\n }\n\n\n\n /// Creates a `Future` and links it with this `Promise`.\n\n ///\n\n /// **Important:** Make sure that the promise is boxed before calling `future` if you want\n\n /// to move the promise out of the stack, otherwise the unsafe pointers get screwed and\n\n /// you end up with a segfaults.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let mut promise = Box::new(Promise::<i32, i32>::new());\n", "file_path": "src/future.rs", "rank": 31, "score": 8.214435244164044 }, { "content": " }\n\n\n\n /// Stores the closure in a Task for later scheduling.\n\n #[inline]\n\n fn store_task<F>(&mut self, f: F)\n\n where F: FnMut(FutureState<T, E>) + 'static\n\n {\n\n let mut continuation = Box::new(Continuation::deferred(f));\n\n self.task_state = Some(continuation.state_as_mut());\n\n self.task = Some(continuation);\n\n }\n\n\n\n /// If the Task is set on this `Promise` it will be scheduled to run on the Vortex.\n\n #[inline]\n\n fn schedule(&mut self) {\n\n if self.task.is_some() {\n\n let task = mem::replace(&mut self.task, None)\n\n .expect(\"Task is None even if checked right before.\");\n\n Vortex::schedule(task);\n\n }\n", "file_path": "src/future.rs", "rank": 32, "score": 8.199145380385435 }, { "content": " FutureState::Ok(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns `true` if the state is `Err`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::FutureState;\n\n ///\n\n /// let state = FutureState::Pending::<i32, i32>;\n\n /// assert_eq!(false, state.is_err());\n\n ///\n\n /// let state = FutureState::Err::<i32, _>(-1);\n\n /// assert_eq!(true, state.is_err());\n\n /// ```\n\n #[inline]\n\n pub fn is_err(&self) -> bool {\n", "file_path": "src/future.rs", "rank": 33, "score": 8.124768365637498 }, { "content": "#![feature(test)]\n\n\n\nextern crate reef;\n\nextern crate test;\n\nextern crate chrono;\n\n\n\nuse reef::timer::wheel::TimerWheel;\n\nuse reef::timer::Timer;\n\nuse chrono::{Duration, NaiveDateTime};\n\n\n\n#[derive(Debug,PartialEq)]\n", "file_path": "benches/wheel.rs", "rank": 34, "score": 8.107069684312567 }, { "content": " /// assert_eq!(true, state.is_ready());\n\n /// ```\n\n #[inline]\n\n pub fn is_ready(&self) -> bool {\n\n self.is_ok() || self.is_err()\n\n }\n\n}\n\n\n\n/// Allows a `Future` to be completed at a later point in time.\n\n///\n\n/// When a new `Promise` is created through `Promise::new()` it does not reference a `Future`\n\n/// right away. Only once `future` is called then a `Future` is returned and (unsafe) pointer\n\n/// relationships are established between them.\n\n///\n\n/// If a `Task` (a `Continuation`) has been attached already then all state which is set through\n\n/// either the `set_ok` or the `set_err` methods is delivered into the task.\n\npub struct Promise<T: 'static, E: 'static> {\n\n future: Option<*mut Future<T, E>>,\n\n state: FutureState<T, E>,\n\n task: Option<Box<Task>>,\n", "file_path": "src/future.rs", "rank": 35, "score": 8.056824797472288 }, { "content": " task_state: Option<*mut Option<FutureState<T, E>>>,\n\n}\n\n\n\nimpl<T, E> Promise<T, E> {\n\n /// Create a new `Promise`.\n\n ///\n\n /// **Important:** If you want to move out the future later on, keep in mind to Box it before\n\n /// calling `future()`, otherwise the unsafe pointers will move and you'll get a segfault. Once\n\n /// all the unsafe code is removed hopefully this will be handled better.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let promise = Box::new(Promise::<i32, i32>::new());\n\n /// assert_eq!(FutureState::Pending, *promise.state_ref());\n\n /// ```\n\n #[inline]\n\n pub fn new() -> Promise<T, E> {\n", "file_path": "src/future.rs", "rank": 36, "score": 7.952769358411684 }, { "content": "use std::collections::vec_deque::VecDeque;\n\nuse std::cell::RefCell;\n\nuse std::sync::mpsc::Receiver;\n\nuse task::Task;\n\nuse future::Promise;\n\n\n\nthread_local! { static TL_VT: RefCell<Option<Vortex>> = RefCell::new(None) }\n\n\n\npub enum ControlMsg {\n\n Stop,\n\n}\n\n\n\npub struct Vortex {\n\n control: RefCell<Receiver<ControlMsg>>,\n\n task_queue: RefCell<VecDeque<Box<Task>>>,\n\n start_promise: RefCell<Promise<(), ()>>,\n\n}\n\n\n\nimpl Vortex {\n\n pub fn init(control: Receiver<ControlMsg>, start_promise: Promise<(), ()>) {\n", "file_path": "src/vortex.rs", "rank": 37, "score": 7.794562382791203 }, { "content": "extern crate chrono;\n\n\n\nextern crate bit_vec;\n\nextern crate linked_list;\n\n\n\npub mod task;\n\npub mod timer;\n\npub mod future;\n\npub mod vortex;\n", "file_path": "src/lib.rs", "rank": 38, "score": 7.697218838433191 }, { "content": " /// promise.set_ok(42);\n\n /// assert_eq!(FutureState::Ok(42), *promise.state_ref());\n\n /// assert_eq!(true, future.is_ready());\n\n /// assert_eq!(false, future.is_err());\n\n /// ```\n\n #[inline]\n\n pub fn set_ok(&mut self, value: T) {\n\n match self.task_state {\n\n Some(ptr) => unsafe { *ptr = Some(FutureState::Ok(value)) },\n\n None => self.state = FutureState::Ok(value),\n\n };\n\n self.schedule();\n\n }\n\n\n\n /// Sets the error on the Promise (completing the Future), scheduling the stored task as a\n\n /// side-effect.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n", "file_path": "src/future.rs", "rank": 39, "score": 7.599814040751658 }, { "content": " pub fn state_mut(&mut self) -> &mut FutureState<T, E> {\n\n self.task_state.map_or(&mut self.state, |ptr| {\n\n unsafe { (*ptr).as_mut().expect(\"Task State Pointer is null!\") }\n\n })\n\n }\n\n\n\n /// Sets the value on the Promise (completing the Future), scheduling the stored task as a\n\n /// side-effect.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let mut promise = Box::new(Promise::<i32, i32>::new());\n\n /// assert_eq!(FutureState::Pending, *promise.state_ref());\n\n ///\n\n /// let future = promise.future();\n\n /// assert_eq!(false, future.is_ready());\n\n ///\n", "file_path": "src/future.rs", "rank": 40, "score": 7.5919465746213195 }, { "content": " /// ```\n\n #[inline]\n\n pub fn state_ref(&self) -> &FutureState<T, E> {\n\n self.task_state.map_or(&self.state, |ptr| {\n\n unsafe { (*ptr).as_ref().expect(\"Task State Pointer is null!\") }\n\n })\n\n }\n\n\n\n /// Returns the current state of the `Promise` (which mirrors the state of the `Future`) as a\n\n /// mutable reference.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let mut promise = Box::new(Promise::<i32, i32>::new());\n\n /// assert_eq!(FutureState::Pending, *promise.state_mut());\n\n /// ```\n\n #[inline]\n", "file_path": "src/future.rs", "rank": 41, "score": 7.25246403695898 }, { "content": " /// assert_eq!(FutureState::Pending, *promise.state_ref());\n\n ///\n\n /// let future = promise.future();\n\n /// assert_eq!(false, future.is_ready());\n\n /// ```\n\n #[inline]\n\n pub fn future(&mut self) -> Future<T, E> {\n\n Future::deferred(self)\n\n }\n\n\n\n /// Returns the current state of the `Promise` (which mirrors the state of the `Future`) as a\n\n /// immutable reference.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{FutureState, Promise};\n\n ///\n\n /// let mut promise = Box::new(Promise::<i32, i32>::new());\n\n /// assert_eq!(FutureState::Pending, *promise.state_ref());\n", "file_path": "src/future.rs", "rank": 42, "score": 7.0955899003556215 }, { "content": " /// linked promise or directly from the locally stored state.\n\n #[inline]\n\n fn state(&mut self) -> FutureState<T, E> {\n\n match *self {\n\n Future::Ready(ref mut state) => unsafe { ptr::replace(state, FutureState::Invalid) },\n\n Future::Deferred(promise) => unsafe {\n\n let state = promise.as_mut()\n\n .expect(\"State in deferred Future is null\")\n\n .state_mut();\n\n mem::replace(state, FutureState::Invalid)\n\n },\n\n }\n\n }\n\n\n\n /// Returns true if this `Future` is ready (either errored or successful).\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::Future;\n", "file_path": "src/future.rs", "rank": 43, "score": 7.000349012888507 }, { "content": " /// assert!(false);\n\n /// ok::<(), i32>(())\n\n /// })\n\n /// .map_state(|state| {\n\n /// match state {\n\n /// FutureState::Ok(_) => assert!(false),\n\n /// FutureState::Err(e) => assert_eq!(-1, e),\n\n /// _ => assert!(false),\n\n /// }\n\n /// ok::<(), i32>(())\n\n /// });\n\n /// ```\n\n #[inline]\n\n pub fn map_state<F, TRES>(&mut self, f: F) -> Future<TRES, E>\n\n where F: Fn(FutureState<T, E>) -> Future<TRES, E> + 'static\n\n {\n\n if self.is_ready() {\n\n return f(self.state());\n\n }\n\n\n", "file_path": "src/future.rs", "rank": 44, "score": 6.671070346174996 }, { "content": " Continuation {\n\n state: Some(state),\n\n func: func,\n\n }\n\n }\n\n\n\n /// Creates a deferred continuation with just the closure, the state needs to be set\n\n /// separately before running it.\n\n #[inline]\n\n pub fn deferred(func: F) -> Continuation<T, E, F> {\n\n Continuation {\n\n state: None,\n\n func: func,\n\n }\n\n }\n\n\n\n /// Returns a mutable borrow of the internal future state so it can be modified when a\n\n /// deferred continuation is created.\n\n #[inline]\n\n pub fn state_as_mut(&mut self) -> &mut Option<FutureState<T, E>> {\n", "file_path": "src/task.rs", "rank": 45, "score": 6.489673534877671 }, { "content": " /// `future()` on the `Promise` is called.\n\n #[inline]\n\n fn deferred(promise: &mut Promise<T, E>) -> Future<T, E> {\n\n let mut future = Future::Deferred(promise as *mut Promise<T, E>);\n\n promise.future = Some(&mut future as *mut Future<T, E>);\n\n future\n\n }\n\n\n\n /// Creates a `Future` which is ready and has been completed with a value.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::Future;\n\n ///\n\n /// let future = Future::<i32, i32>::from_ok(42);\n\n /// assert_eq!(true, future.is_ready());\n\n /// assert_eq!(true, future.is_ok());\n\n /// assert_eq!(false, future.is_err());\n\n /// ```\n", "file_path": "src/future.rs", "rank": 46, "score": 6.168597581428259 }, { "content": " match *self {\n\n FutureState::Err(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns `true` if the state is either `Ok` or `Err`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::FutureState;\n\n ///\n\n /// let state = FutureState::Pending::<i32, i32>;\n\n /// assert_eq!(false, state.is_ready());\n\n ///\n\n /// let state = FutureState::Err::<i32, _>(-1);\n\n /// assert_eq!(true, state.is_ready());\n\n ///\n\n /// let state = FutureState::Ok::<_, i32>(1);\n", "file_path": "src/future.rs", "rank": 47, "score": 6.125383468679971 }, { "content": " #[inline]\n\n pub fn from_ok(value: T) -> Future<T, E> {\n\n Future::Ready(FutureState::Ok(value))\n\n }\n\n\n\n /// Creates a `Future` which is ready and has been completed with an error.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::Future;\n\n ///\n\n /// let future = Future::<i32, i32>::from_err(-1);\n\n /// assert_eq!(true, future.is_ready());\n\n /// assert_eq!(false, future.is_ok());\n\n /// assert_eq!(true, future.is_err());\n\n /// ```\n\n #[inline]\n\n pub fn from_err(error: E) -> Future<T, E> {\n\n Future::Ready(FutureState::Err(error))\n", "file_path": "src/future.rs", "rank": 48, "score": 5.727583507432403 }, { "content": "extern crate reef;\n\n\n\nuse reef::vortex::{Vortex, ControlMsg};\n\nuse reef::future::{Promise, ok};\n\n\n\nuse std::thread;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\nuse std::sync::mpsc::channel;\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/hello_world.rs", "rank": 49, "score": 5.195859087537274 }, { "content": " Future::Ready(_) => Vortex::schedule(Box::new(Continuation::complete(self.state(), f))),\n\n Future::Deferred(promise) => unsafe { (*promise).store_task(f) },\n\n }\n\n }\n\n}\n\n\n\n/// Helper method to create a successful, completed `Future`.\n\n///\n\n/// This function is identical to calling `Future::from_ok(value)`, it's just nicer to read and\n\n/// write in a `Continuation`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use reef::future::ok;\n\n///\n\n/// let future = ok::<_, i32>(42);\n\n/// assert_eq!(true, future.is_ready());\n\n/// assert_eq!(true, future.is_ok());\n\n/// assert_eq!(false, future.is_err());\n\n/// ```\n\n#[inline]\n", "file_path": "src/future.rs", "rank": 50, "score": 5.168742210313839 }, { "content": "\n\n #[test]\n\n fn test_extracting_state_from_ready_future() {\n\n let mut future = Future::<i32, i32>::from_ok(42);\n\n assert_eq!(true, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n\n\n let state = future.state();\n\n assert_eq!(FutureState::Ok(42), state);\n\n }\n\n\n\n #[test]\n\n fn test_extracting_state_from_deferred_future() {\n\n let mut promise = Promise::<i32, i32>::new();\n\n assert_eq!(FutureState::Pending, *promise.state_ref());\n\n\n\n promise.set_ok(42);\n\n assert_eq!(FutureState::Ok(42), *promise.state_ref());\n\n\n\n {\n", "file_path": "src/future.rs", "rank": 51, "score": 4.83089426284918 }, { "content": " assert_eq!(true, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n }\n\n\n\n #[test]\n\n fn test_set_promise_state_before_future() {\n\n let mut promise = Promise::<i32, i32>::new();\n\n assert_eq!(FutureState::Pending, *promise.state_ref());\n\n promise.set_ok(42);\n\n {\n\n let future = promise.future();\n\n assert_eq!(FutureState::Ok(42), *future.state_ref());\n\n assert_eq!(true, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_correct_states_on_pending() {\n\n let state = FutureState::Pending::<i32, i32>;\n", "file_path": "src/future.rs", "rank": 52, "score": 4.793425121947317 }, { "content": " assert_eq!(FutureState::Ok(42), *future.state_ref());\n\n assert_eq!(true, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n }\n\n\n\n }\n\n\n\n #[test]\n\n fn test_future_raw_pointer_when_promise_moved() {\n\n let mut promise = Promise::<i32, i32>::new();\n\n assert_eq!(FutureState::Pending, *promise.state_ref());\n\n promise.set_ok(42);\n\n move_promise_here(promise);\n\n }\n\n\n\n fn move_promise_here(mut promise: Promise<i32, i32>) {\n\n assert_eq!(FutureState::Ok(42), *promise.state_ref());\n\n\n\n let future = promise.future();\n\n assert_eq!(FutureState::Ok(42), *future.state_ref());\n", "file_path": "src/future.rs", "rank": 53, "score": 4.530312712856939 }, { "content": "//! Tasks and Continuations for executing and chaining Futures.\n\n//!\n\n//! # Overview\n\n//!\n\n//! Tasks and Continuations are used to \"glue\" futures together and execute them on the Vortex\n\n//! in an abstract fashion. Since every continuation is different the Vortex just executes Tasks\n\n//! as they come in (by calling its `run` method).\n\n\n\nuse future::FutureState;\n\nuse std::mem;\n\n\n\n/// The Task is a generic abstraction over a closure to run inside the event loop.\n", "file_path": "src/task.rs", "rank": 54, "score": 4.478592058589216 }, { "content": " #[test]\n\n fn should_run_continuation_with_ok() {\n\n let mut cont = Continuation::<_, i32, _>::complete(FutureState::Ok(1), |state| {\n\n match state {\n\n FutureState::Ok(v) => assert_eq!(v, 1),\n\n _ => assert!(false),\n\n }\n\n });\n\n\n\n cont.run();\n\n }\n\n\n\n #[test]\n\n fn should_run_continuation_with_err() {\n\n let mut cont = Continuation::<i32, _, _>::complete(FutureState::Err(-1), |state| {\n\n match state {\n\n FutureState::Err(e) => assert_eq!(e, -1),\n\n _ => assert!(false),\n\n }\n\n });\n\n\n\n cont.run();\n\n }\n\n\n\n}\n", "file_path": "src/task.rs", "rank": 55, "score": 4.456686245496774 }, { "content": " assert_eq!(true, future.is_err());\n\n }\n\n\n\n #[test]\n\n fn test_set_promise_state_after_future() {\n\n let mut promise = Promise::<i32, i32>::new();\n\n assert_eq!(FutureState::Pending, *promise.state_ref());\n\n\n\n {\n\n let future = promise.future();\n\n assert_eq!(FutureState::Pending, *future.state_ref());\n\n assert_eq!(false, future.is_ready());\n\n assert_eq!(false, future.is_err());\n\n }\n\n\n\n promise.set_ok(42);\n\n assert_eq!(FutureState::Ok(42), *promise.state_ref());\n\n\n\n {\n\n let future = promise.future();\n", "file_path": "src/future.rs", "rank": 56, "score": 4.306130373804857 }, { "content": " println!(\"{:?}\", user);\n\n ok(())\n\n });\n\n\n\n\n\n // Initialize the Vortex\n\n Vortex::init(rx, start_promise);\n\n\n\n // Start the Vortex (run the event loop)\n\n Vortex::start();\n\n });\n\n\n\n sleep(Duration::from_millis(1000));\n\n tx.send(ControlMsg::Stop).unwrap();\n\n child.join().unwrap();\n\n}\n", "file_path": "examples/hello_world.rs", "rank": 57, "score": 4.236398061561451 }, { "content": " }\n\n}\n\n\n\n/// Represents a result which may be completed at some point (or is already).\n\n///\n\n/// If a `Future` is initialized with a `Promise` it starts in a Deferred state. If it is\n\n/// initialized from either a value or an error it immediately goes into `Ready` state. A\n\n/// `Ready` future maintains its own `FutureState` while a `Deferred` one keeps the state\n\n/// inside the linked `Promise`.\n\npub enum Future<T: 'static, E: 'static> {\n\n /// The Future is not yet completed and depends on a Promise for state.\n\n Deferred(*mut Promise<T, E>),\n\n /// The Future is already completed and carries its own state.\n\n Ready(FutureState<T, E>),\n\n}\n\n\n\nimpl<T, E> Future<T, E> {\n\n /// Create a deferred Future, depending on a Promise.\n\n ///\n\n /// This method should not be called directly by the user but instead it is used when\n", "file_path": "src/future.rs", "rank": 58, "score": 3.6925188813290473 }, { "content": "\n\nuse task::Continuation;\n\nuse task::Task;\n\nuse vortex::Vortex;\n\n\n\n/// Contains the current state of a `Future`, `Promise` or `Continuation`.\n\n///\n\n/// Since state can be hold in various places (Futures, Promises and Continuations) this enum\n\n/// abstracts their different representations. See the variants for more information on the\n\n/// possible states.\n\n#[derive(Debug,PartialEq)]\n\npub enum FutureState<T: 'static, E: 'static> {\n\n /// This state is invalid, most likely because the previous state has been moved somewhere\n\n /// else already.\n\n Invalid,\n\n /// The state is not yet known, most likely the `Promise` is not completed yet.\n\n Pending,\n\n /// The state is known and it completed with a successful value `T`.\n\n Ok(T),\n\n /// The state is known and it completed with an error `E`.\n", "file_path": "src/future.rs", "rank": 59, "score": 3.5683819571909323 }, { "content": " }\n\n\n\n #[test]\n\n fn test_then_wrapped_on_ready_future() {\n\n let mut future = Future::<i32, i32>::from_ok(42);\n\n future.map(|val| {\n\n assert_eq!(42, val);\n\n Future::<i32, i32>::from_err(-1)\n\n })\n\n .map_state(|state| {\n\n match state {\n\n FutureState::Ok(_) => assert!(false),\n\n FutureState::Err(e) => assert_eq!(-1, e),\n\n _ => assert!(false),\n\n }\n\n Future::<(), i32>::from_ok(())\n\n });\n\n }\n\n\n\n #[test]\n", "file_path": "src/future.rs", "rank": 60, "score": 3.3613832740352407 }, { "content": " fn test_ignore_then_on_err() {\n\n let mut future = Future::<i32, i32>::from_ok(42);\n\n future.map(|val| {\n\n assert_eq!(42, val);\n\n Future::<i32, i32>::from_err(-1)\n\n })\n\n .map(|_| {\n\n assert!(false);\n\n Future::<(), i32>::from_ok(())\n\n })\n\n .map_state(|state| {\n\n match state {\n\n FutureState::Ok(_) => assert!(false),\n\n FutureState::Err(e) => assert_eq!(-1, e),\n\n _ => assert!(false),\n\n }\n\n Future::<(), i32>::from_ok(())\n\n });\n\n }\n\n\n\n}\n", "file_path": "src/future.rs", "rank": 61, "score": 3.2643150866723794 }, { "content": " ///\n\n /// This method is one of the most important ones on the Future. A closure needs to be passed\n\n /// in that will be called with the result of current Future. It can then perform all kinds\n\n /// of operations in it but it must return another `Future` where yet another closure can\n\n /// be attached.\n\n ///\n\n /// **Important:** This method will only be called if the Future is successful. If the Future\n\n /// fails, the error handling closures will be called instead. If your closure needs to react\n\n /// to both successful and failed results, use `map_state`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{Future, ok};\n\n ///\n\n /// let mut future = ok::<i32, i32>(42);\n\n /// future\n\n /// .map(|val| {\n\n /// assert_eq!(42, val);\n\n /// ok::<String, i32>(format!(\"I am {} years old!\", val))\n", "file_path": "src/future.rs", "rank": 62, "score": 3.1199834222733855 }, { "content": " assert_eq!(false, state.is_ok());\n\n assert_eq!(false, state.is_err());\n\n assert_eq!(false, state.is_ready());\n\n }\n\n\n\n #[test]\n\n fn test_correct_states_on_ok() {\n\n let state = FutureState::Ok::<_, i32>(1);\n\n assert_eq!(true, state.is_ok());\n\n assert_eq!(false, state.is_err());\n\n assert_eq!(true, state.is_ready());\n\n }\n\n\n\n #[test]\n\n fn test_correct_states_on_err() {\n\n let state = FutureState::Err::<i32, _>(-1);\n\n assert_eq!(false, state.is_ok());\n\n assert_eq!(true, state.is_err());\n\n assert_eq!(true, state.is_ready());\n\n }\n", "file_path": "src/future.rs", "rank": 63, "score": 2.714791414899485 }, { "content": " /// This method is one of the most important ones on the Future. A closure needs to be passed\n\n /// in that will be called with the resulting state of current Future. It can then perform all\n\n /// kinds of operations in it but it must return another `Future` where yet another closure can\n\n /// be attached.\n\n ///\n\n /// This method will be called with the `FutureState` in both the successful and failed case.\n\n /// The methods on the `FutureState` are available to extract the value and check the outcome.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use reef::future::{Future, ok, err, FutureState};\n\n ///\n\n /// let mut future = ok::<i32, i32>(42);\n\n /// future.map(|val| {\n\n /// assert_eq!(42, val);\n\n /// err::<i32, i32>(-1)\n\n /// })\n\n /// .map(|_| {\n\n /// // This will never be called!\n", "file_path": "src/future.rs", "rank": 64, "score": 2.554112791442569 }, { "content": " task.run();\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn pop_task(&self) -> Option<Box<Task>> {\n\n self.task_queue.borrow_mut().pop_back()\n\n }\n\n\n\n #[inline]\n\n fn push_task(&self, task: Box<Task>) {\n\n self.task_queue.borrow_mut().push_front(task);\n\n }\n\n}\n", "file_path": "src/vortex.rs", "rank": 65, "score": 2.414949084652341 }, { "content": "\n\n self.schedule(move |state| {\n\n match state {\n\n FutureState::Ok(v) => {\n\n match f(v).state() {\n\n FutureState::Ok(iv) => promise.set_ok(iv),\n\n FutureState::Err(ie) => promise.set_err(ie),\n\n _ => panic!(\"This should not happen since we checked the state before.\"),\n\n }\n\n }\n\n FutureState::Err(e) => promise.set_err(e),\n\n _ => panic!(\"This should not happen since we checked the state before.\"),\n\n }\n\n });\n\n\n\n future\n\n }\n\n\n\n /// Attaches a closure to run when the `Future` completes.\n\n ///\n", "file_path": "src/future.rs", "rank": 66, "score": 2.3749754502087264 }, { "content": " let mut future = promise.future();\n\n let state = future.state();\n\n assert_eq!(FutureState::Ok(42), state);\n\n assert_eq!(FutureState::Invalid, *future.state_ref());\n\n }\n\n\n\n assert_eq!(FutureState::Invalid, *promise.state_ref());\n\n }\n\n\n\n #[test]\n\n fn test_then_on_ready_future() {\n\n let mut future = Future::<i32, i32>::from_ok(42);\n\n future.map(|val| {\n\n assert_eq!(42, val);\n\n Future::<i32, i32>::from_ok(22)\n\n })\n\n .map(|val| {\n\n assert_eq!(22, val);\n\n Future::<(), i32>::from_ok(())\n\n });\n", "file_path": "src/future.rs", "rank": 67, "score": 1.9654755647607058 }, { "content": "# reef\n\n\n\nDo not use this, go use [tokio](https://github.com/tokio-rs) instead :)\n\n\n\nreef is an event-driven framework which allows you to build safe and highly\n\nperformant server application. It is (mostly) a [Rust](https://www.rust-lang.org/) port of the awesome\n\n[seastar](http://www.seastar-project.org/) framework which is written in C++.\n\n\n\nWhile the plan is to have something stable and production ready eventually, This\n\nproject is under heavy development and subject to change quite a bit. I'm experimenting\n\nwith all bits and pieces here, so expect it to eat your laundry and more.\n\n\n\nInstallation, Usage and Tutorials will come at a later point. For now treat it\n\nlike any other rust library with sucking documentation and check out the examples\n\nand source code ;)\n", "file_path": "README.md", "rank": 68, "score": 1.8570587046400904 }, { "content": "//! Main concurrency building blocks (Futures and Promises).\n\n//!\n\n//! # Overview\n\n//!\n\n//! Futures and Promises form the basic building blocks of concurrency in reef. A `Future`\n\n//! represents a result which may have been computed yet (and it may be computed with an error\n\n//! eventually). The `Promise` is responsible for completing the Future and filling its state\n\n//! with either the successful value or the error.\n\n//!\n\n//! A callback (typically a closure) can be attached to a `Future` which will be executed when it\n\n//! completes. Internally this is called a `Continuation`. Reef will make sure to schedule their\n\n//! callbacks and execute them when needed, so this is hidden from the user.\n\n//!\n\n//! If a value is known already, a `Future` can also be created directly without an associated\n\n//! `Promise`, in this case it will immediately be set into a completed state. Please see the\n\n//! individual documentations for Promises and Futures on their respective usage and more info\n\n//! on their inner workings.\n\n\n\nuse std::ptr;\n\nuse std::mem;\n", "file_path": "src/future.rs", "rank": 69, "score": 1.741704597986833 }, { "content": " }\n\n\n\n /// Returns the underlying `FutureState` as a immutable reference.\n\n ///\n\n /// **Note:** depending on if deferred or ready the state is either extracted from the\n\n /// linked promise or directly from the locally stored state.\n\n #[inline]\n\n fn state_ref(&self) -> &FutureState<T, E> {\n\n match *self {\n\n Future::Ready(ref state) => &state,\n\n Future::Deferred(promise) => unsafe {\n\n promise.as_ref().expect(\"State in deferred Future is null\").state_ref()\n\n },\n\n }\n\n }\n\n\n\n /// Returns the underlying `FutureState` with ownership and invalidates the state on the\n\n /// called instance.\n\n ///\n\n /// **Note:** depending on if deferred or ready the state is either extracted from the\n", "file_path": "src/future.rs", "rank": 70, "score": 1.507399629770827 } ]
Rust
path_ext/src/lib.rs
pwil3058/ergibus
c7c15d82998e9021ccc64ce192882da5cc89a25e
use std::{ env, io, path::{Component, Path, PathBuf, StripPrefixError}, }; use dirs; use thiserror::Error; #[derive(Debug, Error)] pub enum Error { #[error("Could not find user's home directory.")] CouldNotFindHome, #[error("Could not find current directory's parent.")] CouldNotFindParent, #[error("I/O Error")] IOError(#[from] io::Error), #[error("Error stripping path's prefix")] StripPrefixError(#[from] StripPrefixError), #[error("Unexpected prefix for this operation.")] UnexpectedPrefix, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum PathType { Absolute, RelativeCurDir, RelativeCurDirImplicit, RelativeParentDir, RelativeHomeDir, Empty, } impl PathType { pub fn of<P: AsRef<Path>>(path_arg: P) -> Self { let path = path_arg.as_ref(); match path.components().next() { None => PathType::Empty, Some(component) => match component { Component::RootDir | Component::Prefix(_) => PathType::Absolute, Component::CurDir => PathType::RelativeCurDir, Component::ParentDir => PathType::RelativeParentDir, Component::Normal(os_string) => { if os_string == "~" { PathType::RelativeHomeDir } else { PathType::RelativeCurDirImplicit } } }, } } } pub fn expand_current_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with(Component::CurDir) { let cur_dir = env::current_dir()?; let path_tail = path.strip_prefix(Component::CurDir)?; Ok(cur_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn expand_parent_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with(Component::ParentDir) { let cur_dir = env::current_dir()?; let parent_dir = match cur_dir.parent() { Some(parent_dir) => parent_dir, None => return Err(Error::CouldNotFindParent), }; let path_tail = path.strip_prefix(Component::ParentDir)?; Ok(parent_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn expand_home_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with("~") { let home_dir = match dirs::home_dir() { Some(home_dir) => home_dir, None => return Err(Error::CouldNotFindHome), }; let path_tail = path.strip_prefix("~")?; Ok(home_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn prepend_current_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); match path.components().next() { None => Ok(env::current_dir()?), Some(component) => match component { Component::Normal(os_string) => { if os_string == "~" { Err(Error::UnexpectedPrefix) } else { let cur_dir = env::current_dir()?; Ok(cur_dir.join(path)) } } _ => Err(Error::UnexpectedPrefix), }, } } pub fn absolute_path_buf<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); match PathType::of(path) { PathType::Absolute => Ok(path.to_path_buf()), PathType::RelativeCurDir => expand_current_dir(path), PathType::RelativeParentDir => expand_parent_dir(path), PathType::RelativeHomeDir => expand_home_dir(path), PathType::RelativeCurDirImplicit => prepend_current_dir(path), PathType::Empty => Ok(env::current_dir()?), } } #[cfg(test)] mod path_ext_tests { use crate::{ absolute_path_buf, expand_current_dir, expand_home_dir, expand_parent_dir, prepend_current_dir, }; use std::env; #[test] fn home_path_expansions() { let home_dir = dirs::home_dir().unwrap(); assert!(expand_home_dir("/home/dir").is_err()); assert_eq!( expand_home_dir("~/whatever").unwrap(), home_dir.join("whatever") ); assert_eq!( absolute_path_buf("~/whatever").unwrap(), home_dir.join("whatever") ); } #[test] fn cur_path_expansions() { let cur_dir = env::current_dir().unwrap(); assert!(expand_current_dir("/home/dir").is_err()); assert_eq!( expand_current_dir("./whatever").unwrap(), cur_dir.join("whatever") ); assert_eq!( absolute_path_buf("./whatever").unwrap(), cur_dir.join("whatever") ); assert!(prepend_current_dir("/home/dir").is_err()); assert_eq!( prepend_current_dir("whatever").unwrap(), cur_dir.join("whatever") ); assert_eq!( absolute_path_buf("whatever").unwrap(), cur_dir.join("whatever") ); } #[test] fn parent_path_expansions() { let cur_dir = env::current_dir().unwrap(); let parent_dir = cur_dir.parent().unwrap(); assert!(expand_parent_dir("/home/dir").is_err()); assert_eq!( expand_parent_dir("../whatever").unwrap(), parent_dir.join("whatever") ); assert_eq!( absolute_path_buf("../whatever").unwrap(), parent_dir.join("whatever") ); } }
use std::{ env, io, path::{Component, Path, PathBuf, StripPrefixError}, }; use dirs; use thiserror::Error; #[derive(Debug, Error)] pub enum Error { #[error("Could not find user's home directory.")] CouldNotFindHome, #[error("Could not find current directory's parent.")] CouldNotFindParent, #[error("I/O Error")] IOError(#[from] io::Error), #[error("Error stripping path's prefix")] StripPrefixError(#[from] StripPrefixError), #[error("Unexpected prefix for this operation.")] UnexpectedPrefix, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum PathType { Absolute, RelativeCurDir, RelativeCurDirImplicit, RelativeParentDir, RelativeHomeDir, Empty, } impl PathType { pub fn of<P: AsRef<Path>>(path_arg: P) -> Self { let path = path_arg.as_ref(); match path.components().next() { None => PathType::Empty, Some(component) => match component { Component::RootDir | Component::Prefix(_) => PathType::Absolute, Component::CurDir => PathType::RelativeCurDir,
lativeHomeDir } else { PathType::RelativeCurDirImplicit } } }, } } } pub fn expand_current_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with(Component::CurDir) { let cur_dir = env::current_dir()?; let path_tail = path.strip_prefix(Component::CurDir)?; Ok(cur_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn expand_parent_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with(Component::ParentDir) { let cur_dir = env::current_dir()?; let parent_dir = match cur_dir.parent() { Some(parent_dir) => parent_dir, None => return Err(Error::CouldNotFindParent), }; let path_tail = path.strip_prefix(Component::ParentDir)?; Ok(parent_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn expand_home_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); if path.starts_with("~") { let home_dir = match dirs::home_dir() { Some(home_dir) => home_dir, None => return Err(Error::CouldNotFindHome), }; let path_tail = path.strip_prefix("~")?; Ok(home_dir.join(path_tail)) } else { Err(Error::UnexpectedPrefix) } } pub fn prepend_current_dir<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); match path.components().next() { None => Ok(env::current_dir()?), Some(component) => match component { Component::Normal(os_string) => { if os_string == "~" { Err(Error::UnexpectedPrefix) } else { let cur_dir = env::current_dir()?; Ok(cur_dir.join(path)) } } _ => Err(Error::UnexpectedPrefix), }, } } pub fn absolute_path_buf<P: AsRef<Path>>(path_arg: P) -> Result<PathBuf, Error> { let path = path_arg.as_ref(); match PathType::of(path) { PathType::Absolute => Ok(path.to_path_buf()), PathType::RelativeCurDir => expand_current_dir(path), PathType::RelativeParentDir => expand_parent_dir(path), PathType::RelativeHomeDir => expand_home_dir(path), PathType::RelativeCurDirImplicit => prepend_current_dir(path), PathType::Empty => Ok(env::current_dir()?), } } #[cfg(test)] mod path_ext_tests { use crate::{ absolute_path_buf, expand_current_dir, expand_home_dir, expand_parent_dir, prepend_current_dir, }; use std::env; #[test] fn home_path_expansions() { let home_dir = dirs::home_dir().unwrap(); assert!(expand_home_dir("/home/dir").is_err()); assert_eq!( expand_home_dir("~/whatever").unwrap(), home_dir.join("whatever") ); assert_eq!( absolute_path_buf("~/whatever").unwrap(), home_dir.join("whatever") ); } #[test] fn cur_path_expansions() { let cur_dir = env::current_dir().unwrap(); assert!(expand_current_dir("/home/dir").is_err()); assert_eq!( expand_current_dir("./whatever").unwrap(), cur_dir.join("whatever") ); assert_eq!( absolute_path_buf("./whatever").unwrap(), cur_dir.join("whatever") ); assert!(prepend_current_dir("/home/dir").is_err()); assert_eq!( prepend_current_dir("whatever").unwrap(), cur_dir.join("whatever") ); assert_eq!( absolute_path_buf("whatever").unwrap(), cur_dir.join("whatever") ); } #[test] fn parent_path_expansions() { let cur_dir = env::current_dir().unwrap(); let parent_dir = cur_dir.parent().unwrap(); assert!(expand_parent_dir("/home/dir").is_err()); assert_eq!( expand_parent_dir("../whatever").unwrap(), parent_dir.join("whatever") ); assert_eq!( absolute_path_buf("../whatever").unwrap(), parent_dir.join("whatever") ); } }
Component::ParentDir => PathType::RelativeParentDir, Component::Normal(os_string) => { if os_string == "~" { PathType::Re
function_block-random_span
[ { "content": "pub fn ignore_report_or_fail<P: AsRef<Path>>(err: Error, path: P) -> EResult<()> {\n\n match &err {\n\n Error::FSOBrokenSymLink(link_path, target_path) => {\n\n log::warn!(\n\n \"{:?} -> {:?}: broken symbolic link ignored\",\n\n link_path,\n\n target_path\n\n );\n\n Ok(())\n\n }\n\n Error::IOError(io_err) => {\n\n match io_err.kind() {\n\n // we assume that \"not found\" is due to a race condition\n\n ErrorKind::NotFound => {\n\n log::trace!(\"{:?}: not found\", path.as_ref());\n\n Ok(())\n\n }\n\n // benign so just report it\n\n ErrorKind::PermissionDenied => {\n\n log::warn!(\"{:?}: permission denied\", path.as_ref());\n\n Ok(())\n\n }\n\n // programming error that needs to be fixed\n\n _ => Err(err),\n\n }\n\n }\n\n _ => Err(err),\n\n }\n\n}\n", "file_path": "ergibus_lib/src/report.rs", "rank": 5, "score": 189876.15974641245 }, { "content": "pub fn create_new_repo<P: AsRef<Path>>(\n\n name: &str,\n\n location: P,\n\n hash_algortithm_str: &str,\n\n) -> EResult<()> {\n\n if content_repo_exists(name) {\n\n return Err(Error::RepoExists(name.to_string()));\n\n }\n\n\n\n let hash_algorithm = HashAlgorithm::from_str(hash_algortithm_str)?;\n\n\n\n let mut repo_dir_path = location.as_ref().to_path_buf();\n\n repo_dir_path.push(\"ergibus\");\n\n repo_dir_path.push(\"repos\");\n\n repo_dir_path.push(name);\n\n\n\n let spec = RepoSpec::new(repo_dir_path, hash_algorithm);\n\n\n\n ContentMgmtKey::from(&spec).create_repo_dir()?;\n\n\n\n write_repo_spec(name, &spec)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 6, "score": 159633.37668377353 }, { "content": "pub fn create_new_archive<P: AsRef<Path>>(\n\n name: &str,\n\n content_repo_name: &str,\n\n location: P,\n\n inclusions: &[PathBuf],\n\n dir_exclusions: &[String],\n\n file_exclusions: &[String],\n\n) -> EResult<()> {\n\n if get_archive_spec_file_path(name).exists() {\n\n return Err(Error::ArchiveExists(name.to_string()));\n\n }\n\n if !content_repo_exists(content_repo_name) {\n\n return Err(Error::UnknownRepo(content_repo_name.to_string()));\n\n }\n\n for pattern in dir_exclusions.iter() {\n\n let _glob = Glob::new(&pattern).map_err(|err| Error::GlobError(err))?;\n\n }\n\n for pattern in file_exclusions.iter() {\n\n let _glob = Glob::new(&pattern).map_err(|err| Error::GlobError(err))?;\n\n }\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 7, "score": 159633.37668377353 }, { "content": "pub fn abs_default_config_dir_path() -> PathBuf {\n\n match dirs::config_dir() {\n\n Some(config_dir) => config_dir.join(\"ergibus\"),\n\n None => match path_ext::expand_home_dir(&PathBuf::from(DEFAULT_CONFIG_DIR_PATH)) {\n\n Ok(expanded_dir) => expanded_dir,\n\n Err(_) => panic!(\"config dir path expansion failed\"),\n\n },\n\n }\n\n}\n\n\n", "file_path": "ergibus_lib/src/config.rs", "rank": 8, "score": 157024.03525019222 }, { "content": "pub fn get_archive_config_dir_path() -> PathBuf {\n\n get_config_dir_path().join(\"archives\")\n\n}\n\n\n", "file_path": "ergibus_lib/src/config.rs", "rank": 9, "score": 157024.03525019222 }, { "content": "pub fn get_gui_config_dir_path() -> PathBuf {\n\n get_config_dir_path().join(\"gui\")\n\n}\n\n\n", "file_path": "ergibus_lib/src/config.rs", "rank": 10, "score": 157024.03525019222 }, { "content": "pub fn get_repo_config_dir_path() -> PathBuf {\n\n get_config_dir_path().join(\"repos\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn get_config_dir_works() {\n\n let new_path = \"./TEST/config\";\n\n env::set_var(DCDP_OVERRIDE_ENVAR, new_path);\n\n assert_eq!(get_config_dir_path(), PathBuf::from(new_path));\n\n assert_eq!(\n\n get_archive_config_dir_path(),\n\n PathBuf::from(new_path).join(\"archives\")\n\n );\n\n assert_eq!(\n\n get_repo_config_dir_path(),\n\n PathBuf::from(new_path).join(\"repos\")\n", "file_path": "ergibus_lib/src/config.rs", "rank": 11, "score": 157024.03525019222 }, { "content": "pub fn get_snapshot_paths_in_dir(dir_path: &Path, reverse: bool) -> EResult<Vec<PathBuf>> {\n\n let entries = get_ss_entries_in_dir(dir_path)?;\n\n let mut snapshot_paths = Vec::new();\n\n for entry in entries {\n\n let e_path = dir_path.join(entry.path());\n\n snapshot_paths.push(e_path);\n\n }\n\n if reverse {\n\n snapshot_paths.reverse();\n\n };\n\n Ok(snapshot_paths)\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 12, "score": 150603.42784515038 }, { "content": "fn get_entry_for_path<P: AsRef<Path>>(path_arg: P) -> EResult<fs::DirEntry> {\n\n let path = path_arg.as_ref();\n\n if let Some(parent_dir_path) = path.parent() {\n\n let read_dir = fs::read_dir(&parent_dir_path)?;\n\n for entry in read_dir.filter_map(|e| e.ok()) {\n\n if entry.path() == path {\n\n return Ok(entry);\n\n }\n\n }\n\n }\n\n let io_error = io::Error::new(io::ErrorKind::NotFound, format!(\"{:?}: not found\", path));\n\n Err(io_error.into())\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug)]\n\npub struct SnapshotPersistentData {\n\n root_dir: DirectoryData,\n\n base_dir_path: PathBuf,\n\n content_mgmt_key: ContentMgmtKey,\n\n archive_name: String,\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 13, "score": 144716.40147863096 }, { "content": "// for read only snapshot actions we only need the snapshot directory path\n\n// as the content manager key data is in the snapshot file.\n\n// NB: this means that we can use snapshots even if the configuration\n\n// data has been lost due to a file system failure (but in that case\n\n// the user will have to browse the file system to find the snapshots).\n\npub fn get_archive_snapshot_dir_path(archive_name: &str) -> EResult<PathBuf> {\n\n let archive_spec = read_archive_spec(archive_name)?;\n\n PathBuf::from(&archive_spec.snapshot_dir_path)\n\n .canonicalize()\n\n .map_err(|err| Error::ArchiveDirError(err, PathBuf::from(&archive_spec.snapshot_dir_path)))\n\n}\n\n\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 14, "score": 131031.36415825902 }, { "content": "pub fn get_snapshot_names_in_dir(dir_path: &Path, reverse: bool) -> EResult<Vec<String>> {\n\n let entries = get_ss_entries_in_dir(dir_path)?;\n\n let mut snapshot_names = Vec::new();\n\n for entry in entries {\n\n snapshot_names.push(String::from(entry.file_name().to_string_lossy().to_owned()));\n\n }\n\n if reverse {\n\n snapshot_names.reverse();\n\n };\n\n Ok(snapshot_names)\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 15, "score": 130110.41475592008 }, { "content": "fn get_config_dir_path() -> PathBuf {\n\n match env::var(DCDP_OVERRIDE_ENVAR) {\n\n Ok(dir_path) => {\n\n if dir_path.len() == 0 {\n\n abs_default_config_dir_path()\n\n } else if dir_path.starts_with(\"~\") {\n\n match path_ext::expand_home_dir(&PathBuf::from(dir_path)) {\n\n Ok(expanded_dir) => expanded_dir,\n\n Err(_) => panic!(\"config dir path expansion failed\",),\n\n }\n\n } else {\n\n PathBuf::from(dir_path)\n\n }\n\n }\n\n Err(_) => abs_default_config_dir_path(),\n\n }\n\n}\n\n\n", "file_path": "ergibus_lib/src/config.rs", "rank": 16, "score": 124385.76248151773 }, { "content": "fn move_aside_file_path(path: &Path) -> PathBuf {\n\n let dt = DateTime::<Local>::from(time::SystemTime::now());\n\n let suffix = format!(\"{}\", dt.format(\"ema-%Y-%m-%d-%H-%M-%S\"));\n\n let new_suffix = if let Some(current_suffix) = path.extension() {\n\n format!(\"{:?}-{}\", current_suffix, suffix)\n\n } else {\n\n suffix\n\n };\n\n path.with_extension(&new_suffix)\n\n}\n\n\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 17, "score": 123367.87982691824 }, { "content": "pub fn delete_snapshot_file(ss_file_path: &Path) -> EResult<()> {\n\n let snapshot = SnapshotPersistentData::from_file(ss_file_path)?;\n\n fs::remove_file(ss_file_path)\n\n .map_err(|err| Error::SnapshotDeleteIOError(err, ss_file_path.to_path_buf()))?;\n\n snapshot.release_contents()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 18, "score": 120940.35888565017 }, { "content": "fn get_ss_entries_in_dir(dir_path: &Path) -> EResult<Vec<DirEntry>> {\n\n let dir_entries = fs::read_dir(dir_path)\n\n .map_err(|err| Error::SnapshotDirIOError(err, dir_path.to_path_buf()))?;\n\n let mut ss_entries = Vec::new();\n\n for entry_or_err in dir_entries {\n\n match entry_or_err {\n\n Ok(entry) => {\n\n if entry_is_ss_file(&entry) {\n\n ss_entries.push(entry);\n\n }\n\n }\n\n Err(_) => (),\n\n }\n\n }\n\n ss_entries.sort_by_key(|e| e.path());\n\n Ok(ss_entries)\n\n}\n\n\n\nimpl SnapshotPersistentData {\n\n // Interrogation/extraction/restoration methods\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 19, "score": 113872.79137857817 }, { "content": "#[allow(dead_code)]\n\npub fn image() -> gtk::Image {\n\n gtk::Image::from_pixbuf(Some(&pixbuf()))\n\n}\n\n\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 20, "score": 108671.6259932812 }, { "content": "fn clear_way_for_new_dir(new_dir_path: &Path, overwrite: bool) -> EResult<()> {\n\n if new_dir_path.exists() && !new_dir_path.is_dir() {\n\n // Real dir or link to dir\n\n if overwrite {\n\n // Remove the file system object to make way for the directory\n\n fs::remove_file(new_dir_path)\n\n .map_err(|err| Error::SnapshotDeleteIOError(err, new_dir_path.to_path_buf()))?;\n\n } else {\n\n let new_path = move_aside_file_path(new_dir_path);\n\n fs::rename(new_dir_path, &new_path)\n\n .map_err(|err| Error::SnapshotMoveAsideFailed(new_dir_path.to_path_buf(), err))?;\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod fs_objects_tests {\n\n use super::DirectoryData;\n\n use std::path::{Component, PathBuf};\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 21, "score": 106445.87263504672 }, { "content": "#[allow(dead_code)]\n\npub fn pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(XPM)\n\n}\n\n\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 22, "score": 105920.92424590095 }, { "content": "pub fn get_snapshot_paths_for_archive(archive_name: &str, reverse: bool) -> EResult<Vec<PathBuf>> {\n\n let snapshot_dir_path = archive::get_archive_snapshot_dir_path(archive_name)?;\n\n let snapshot_paths = get_snapshot_paths_in_dir(&snapshot_dir_path, reverse)?;\n\n Ok(snapshot_paths)\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 23, "score": 100265.75488391958 }, { "content": "#[allow(dead_code)]\n\npub fn sized_image_or(size: i32) -> gtk::Image {\n\n if let Some(image) = sized_image(size) {\n\n image\n\n } else {\n\n image()\n\n }\n\n}\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 24, "score": 96041.5816440054 }, { "content": "pub fn generate_snapshot(\n\n archive_name: &str,\n\n) -> EResult<(time::Duration, FileStats, SymLinkStats, u64)> {\n\n let mut sg = SnapshotGenerator::new(archive_name)?;\n\n let stats = sg.generate_snapshot()?;\n\n sg.write_snapshot()?;\n\n Ok(stats)\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 25, "score": 94537.31152087353 }, { "content": "#[allow(dead_code)]\n\npub fn sized_pixbuf_or(size: i32) -> gdk_pixbuf::Pixbuf {\n\n if let Some(pixbuf) = sized_pixbuf(size) {\n\n pixbuf\n\n } else {\n\n pixbuf()\n\n }\n\n}\n\n\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 26, "score": 93817.28538709946 }, { "content": "pub fn get_named_snapshot(\n\n archive_name: &str,\n\n snapshot_name: &str,\n\n) -> EResult<SnapshotPersistentData> {\n\n let snapshot_dir_path = archive::get_archive_snapshot_dir_path(archive_name)?;\n\n let snapshot_file_path = snapshot_dir_path.join(snapshot_name);\n\n SnapshotPersistentData::from_file(&snapshot_file_path)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::archive;\n\n use crate::content;\n\n use fs2::FileExt;\n\n use std::env;\n\n use std::os::unix::fs::MetadataExt;\n\n use tempdir::TempDir;\n\n\n\n #[test]\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 27, "score": 92404.89092119542 }, { "content": "#[allow(dead_code)]\n\npub fn sized_image(size: i32) -> Option<gtk::Image> {\n\n if let Some(pixbuf) = pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear) {\n\n Some(gtk::Image::from_pixbuf(Some(&pixbuf)))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 28, "score": 91853.85172994705 }, { "content": "#[allow(dead_code)]\n\npub fn sized_pixbuf(size: i32) -> Option<gdk_pixbuf::Pixbuf> {\n\n pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n}\n\n\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 29, "score": 89771.765084246 }, { "content": "fn get_archive_spec_file_path(archive_name: &str) -> PathBuf {\n\n config::get_archive_config_dir_path().join(archive_name)\n\n}\n\n\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 30, "score": 86568.65033662593 }, { "content": "fn get_repo_spec_file_path(repo_name: &str) -> PathBuf {\n\n config::get_repo_config_dir_path().join(repo_name)\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 31, "score": 86568.65033662593 }, { "content": "pub fn get_repo_names() -> Vec<String> {\n\n let mut names = Vec::new();\n\n if let Ok(dir_entries) = fs::read_dir(config::get_repo_config_dir_path()) {\n\n for entry_or_err in dir_entries {\n\n if let Ok(entry) = entry_or_err {\n\n let path = entry.path();\n\n if path.is_file() {\n\n if let Some(file_name) = path.file_name() {\n\n if let Some(file_name) = file_name.to_str() {\n\n names.push(file_name.to_string());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n };\n\n names\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 32, "score": 81610.58798322249 }, { "content": "pub fn get_archive_names() -> Vec<String> {\n\n let mut names = Vec::new();\n\n if let Ok(dir_entries) = fs::read_dir(config::get_archive_config_dir_path()) {\n\n for entry_or_err in dir_entries {\n\n if let Ok(entry) = entry_or_err {\n\n let path = entry.path();\n\n if path.is_file() {\n\n if let Some(file_name) = path.file_name() {\n\n if let Some(file_name) = file_name.to_str() {\n\n names.push(file_name.to_string());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n };\n\n names\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 33, "score": 81610.58798322249 }, { "content": "pub trait RealPathBufType {\n\n fn is_real_dir(&self) -> bool;\n\n fn is_symlink_to_dir(&self) -> bool;\n\n fn is_real_file(&self) -> bool;\n\n fn is_symlink_to_file(&self) -> bool;\n\n fn is_symlink(&self) -> bool;\n\n}\n\n\n\nmacro_rules! impl_real_path_buf_type {\n\n ( $ptype:ident ) => {\n\n impl RealPathBufType for $ptype {\n\n fn is_real_dir(&self) -> bool {\n\n if let Ok(md) = self.symlink_metadata() {\n\n md.is_dir()\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn is_symlink_to_dir(&self) -> bool {\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 34, "score": 77214.74646296348 }, { "content": "pub fn delete_archive(archive_name: &str) -> EResult<()> {\n\n let snapshot_dir = Snapshots::try_from(archive_name)?;\n\n let spec_file_path = get_archive_spec_file_path(archive_name);\n\n fs::remove_file(&spec_file_path)?;\n\n snapshot_dir.delete()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ArchiveData {\n\n pub name: String,\n\n pub content_mgmt_key: ContentMgmtKey,\n\n pub snapshot_dir_path: PathBuf,\n\n pub includes: Vec<PathBuf>,\n\n pub exclusions: Exclusions,\n\n}\n\n\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 35, "score": 75666.52249458681 }, { "content": "pub fn content_repo_exists(repo_name: &str) -> bool {\n\n get_repo_spec_file_path(repo_name).exists()\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 36, "score": 75666.52249458681 }, { "content": "pub fn delete_repository(repo_name: &str) -> EResult<()> {\n\n let repo_key = get_content_mgmt_key(repo_name)?;\n\n let content_manager = repo_key.open_content_manager(Mutability::Mutable)?;\n\n content_manager.delete()?;\n\n let repo_spec_path = get_repo_spec_file_path(repo_name);\n\n fs::remove_file(repo_spec_path)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 37, "score": 75666.52249458681 }, { "content": "pub fn read_repo_spec(repo_name: &str) -> EResult<RepoSpec> {\n\n let spec_file_path = get_repo_spec_file_path(repo_name);\n\n let spec_file = File::open(&spec_file_path)\n\n .map_err(|err| Error::RepoReadError(err, spec_file_path.clone()))?;\n\n let spec = RepoSpec::from_reader(spec_file)?;\n\n Ok(spec)\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 38, "score": 69237.08500139209 }, { "content": "pub fn get_archive_data(archive_name: &str) -> EResult<ArchiveData> {\n\n let archive_spec = read_archive_spec(archive_name)?;\n\n let name = archive_name.to_string();\n\n let content_mgmt_key = get_content_mgmt_key(&archive_spec.content_repo_name)?;\n\n let snapshot_dir_path = archive_spec\n\n .snapshot_dir_path\n\n .canonicalize()\n\n .map_err(|err| Error::ArchiveDirError(err, archive_spec.snapshot_dir_path.clone()))?;\n\n // recheck paths in case spec file has been manually edited\n\n let mut includes = Vec::new();\n\n for inclusion in archive_spec.inclusions {\n\n let included_file_path = if inclusion.starts_with(\"~\") {\n\n expand_home_dir(&inclusion)\n\n .map_err(|e| Error::ArchiveIncludePathError(e, inclusion.to_path_buf()))?\n\n } else if inclusion.is_absolute() {\n\n inclusion\n\n } else {\n\n return Err(Error::RelativeIncludePath(\n\n inclusion,\n\n archive_name.to_string(),\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 39, "score": 69237.08500139209 }, { "content": "pub fn prune_repository(repo_name: &str) -> EResult<UnreferencedContentData> {\n\n let repo_key = get_content_mgmt_key(repo_name)?;\n\n let content_manager = repo_key.open_content_manager(Mutability::Mutable)?;\n\n Ok(content_manager.prune_contents()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod content_tests {\n\n use super::*;\n\n use dychatat::Mutability;\n\n use fs2::FileExt;\n\n use std::env;\n\n use std::fs::OpenOptions;\n\n use tempdir::TempDir;\n\n\n\n #[test]\n\n fn repo_works() {\n\n let file = OpenOptions::new()\n\n .write(true)\n\n .open(\"../test_lock_file\")\n", "file_path": "ergibus_lib/src/content.rs", "rank": 40, "score": 69237.08500139209 }, { "content": "pub fn get_content_mgmt_key(repo_name: &str) -> EResult<ContentMgmtKey> {\n\n if !content_repo_exists(repo_name) {\n\n Err(Error::UnknownRepo(repo_name.to_string()))\n\n } else {\n\n let spec = read_repo_spec(repo_name)?;\n\n Ok(ContentMgmtKey::from(&spec))\n\n }\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 41, "score": 66871.31987442926 }, { "content": "// GUI interface functions\n\npub fn delete_named_snapshots(archive_name: &str, snapshot_names: &[String]) -> EResult<()> {\n\n let snapshot_dir_path = archive::get_archive_snapshot_dir_path(archive_name)?;\n\n for snapshot_name in snapshot_names.iter() {\n\n let snapshot_file_path = snapshot_dir_path.join(snapshot_name);\n\n delete_snapshot_file(&snapshot_file_path)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 42, "score": 66055.22750623187 }, { "content": "fn entry_is_ss_file(entry: &DirEntry) -> bool {\n\n let path = entry.path();\n\n if path.is_file() {\n\n if let Some(file_name) = path.file_name() {\n\n if let Some(file_name) = file_name.to_str() {\n\n return SS_FILE_NAME_RE.is_match(file_name);\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 43, "score": 62639.50113211235 }, { "content": "pub fn get_snapshot_names_for_archive(archive_name: &str, reverse: bool) -> EResult<Vec<String>> {\n\n let snapshot_dir_path = archive::get_archive_snapshot_dir_path(archive_name)?;\n\n let snapshot_names = get_snapshot_names_in_dir(&snapshot_dir_path, reverse)?;\n\n Ok(snapshot_names)\n\n}\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 44, "score": 60607.967744874164 }, { "content": "/// Experimental Rust Git Inspired Back Up System\n\nenum SubCommands {\n\n /// Manage archives\n\n #[structopt(alias = \"ar\")]\n\n Archive(ManageArchives),\n\n /// Manage repositories\n\n #[structopt(alias = \"re\")]\n\n Repo(ManageRepositories),\n\n /// Manage archive snapshots\n\n #[structopt(alias = \"ms\")]\n\n ManageSnapshots(SnapshotManager),\n\n /// Manage snapshot contents\n\n #[structopt(alias = \"sc\")]\n\n SnapshotContents(SnapshotContents),\n\n /// Take backup snapshots\n\n #[structopt(alias = \"bu\")]\n\n BackUp(BackUp),\n\n}\n\n\n", "file_path": "ergibus/src/main.rs", "rank": 45, "score": 56851.15694091724 }, { "content": "#[derive(Debug)]\n\nenum ProtectedRefCounter {\n\n Immutable(RefCounter),\n\n Mutable(RefCell<RefCounter>),\n\n}\n\n\n\nimpl ProtectedRefCounter {\n\n // GENERAL\n\n fn is_mutable(&self) -> bool {\n\n match *self {\n\n ProtectedRefCounter::Immutable(_) => false,\n\n ProtectedRefCounter::Mutable(_) => true,\n\n }\n\n }\n\n\n\n fn from_file(\n\n file: &mut File,\n\n mutability: Mutability,\n\n ) -> Result<ProtectedRefCounter, RepoError> {\n\n let ref_counter = RefCounter::from_file(file)?;\n\n if mutability == Mutability::Mutable {\n", "file_path": "dychatat/src/lib.rs", "rank": 46, "score": 55554.03534021448 }, { "content": "fn main() {\n\n let ergibus = Ergibus::from_args();\n\n\n\n stderrlog::new()\n\n //.module(module_path!())\n\n .quiet(ergibus.quiet)\n\n .verbosity(ergibus.verbose)\n\n .timestamp(ergibus.ts.unwrap_or(stderrlog::Timestamp::Off))\n\n .init()\n\n .unwrap();\n\n\n\n if let Err(err) = match ergibus.sub_cmd {\n\n SubCommands::Archive(sub_cmd) => sub_cmd.exec(),\n\n SubCommands::Repo(sub_cmd) => sub_cmd.exec(),\n\n SubCommands::ManageSnapshots(sub_cmd) => sub_cmd.exec(),\n\n SubCommands::SnapshotContents(sub_cmd) => sub_cmd.exec(),\n\n SubCommands::BackUp(sub_cmd) => sub_cmd.exec(),\n\n } {\n\n error!(\"{:?}\", err);\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "ergibus/src/main.rs", "rank": 47, "score": 52393.942784410174 }, { "content": "fn main() {\n\n orbtk::initialize();\n\n\n\n Application::new()\n\n .window(|ctx| {\n\n Window::new()\n\n .title(\"Ergibus (OrbTk)\")\n\n .position((2000.0, 1000.0))\n\n .size(300.0, 100.0)\n\n .resizeable(true)\n\n .child(\n\n Stack::new()\n\n .child(SnapshotSelectionView::new().build(ctx))\n\n .child(\n\n TextBlock::new()\n\n .text(\"Ergibus OrbTk GUI is under construction\")\n\n .v_align(\"center\")\n\n .h_align(\"center\")\n\n .build(ctx),\n\n )\n\n .child(ArchiveSelectionView::new().build(ctx))\n\n .child(ListView::new().build(ctx))\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n })\n\n .run();\n\n}\n", "file_path": "ergibus_orbtk/src/main.rs", "rank": 48, "score": 51020.2721880015 }, { "content": "fn main() {\n\n recollections::init(&config::get_gui_config_dir_path().join(\"recollections\"));\n\n let flags = gio::ApplicationFlags::empty();\n\n let app = gtk::Application::new(None, flags)\n\n .unwrap_or_else(|err| panic!(\"{:?}: line {:?}: {:?}\", file!(), line!(), err));\n\n app.connect_activate(activate);\n\n app.run(&[]);\n\n}\n", "file_path": "ergibus_gtk/src/main.rs", "rank": 49, "score": 51020.2721880015 }, { "content": "fn write_archive_spec(\n\n archive_name: &str,\n\n archive_spec: &ArchiveSpec,\n\n overwrite: bool,\n\n) -> EResult<()> {\n\n let spec_file_path = get_archive_spec_file_path(archive_name);\n\n if !overwrite && spec_file_path.exists() {\n\n return Err(Error::ArchiveExists(archive_name.to_string()));\n\n }\n\n match spec_file_path.parent() {\n\n Some(config_dir_path) => {\n\n if !config_dir_path.exists() {\n\n fs::create_dir_all(&config_dir_path)\n\n .map_err(|err| Error::ArchiveWriteError(err, config_dir_path.to_path_buf()))?;\n\n }\n\n }\n\n None => (),\n\n }\n\n let spec_file = File::create(&spec_file_path)\n\n .map_err(|err| Error::ArchiveWriteError(err, spec_file_path.clone()))?;\n\n serde_yaml::to_writer(&spec_file, archive_spec)\n\n .map_err(|err| Error::ArchiveYamlWriteError(err, archive_name.to_string()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 50, "score": 48612.361189208124 }, { "content": "pub trait Name {\n\n fn name(&self) -> &OsStr;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default, PartialEq)]\n\npub struct FileData {\n\n file_name: OsString,\n\n attributes: Attributes,\n\n content_token: String,\n\n}\n\n\n\nimpl Name for FileData {\n\n fn name(&self) -> &OsStr {\n\n &self.file_name\n\n }\n\n}\n\n\n\nimpl FileData {\n\n pub fn file_system_object<P: AsRef<Path>>(\n\n path_arg: P,\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 51, "score": 48191.975942605604 }, { "content": "pub trait AttributesIfce: From<Metadata> {\n\n fn size(&self) -> u64;\n\n fn set_file_attributes(&self, file_path: &Path) -> Result<(), io::Error>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, Default)]\n\n#[cfg(target_family = \"unix\")]\n\npub struct Attributes {\n\n st_dev: u64,\n\n st_ino: u64,\n\n st_nlink: u64,\n\n st_mode: u32,\n\n st_uid: u32,\n\n st_gid: u32,\n\n st_size: u64,\n\n st_atime: i64,\n\n st_atime_nsec: i64,\n\n st_mtime: i64,\n\n st_mtime_nsec: i64,\n\n st_ctime: i64,\n", "file_path": "ergibus_lib/src/attributes.rs", "rank": 52, "score": 43484.66349574279 }, { "content": "fn activate(app: &gtk::Application) {\n\n let window = gtk::ApplicationWindow::new(app);\n\n window.set_title(\"ERGIBUS GUI\");\n\n if let Some(geometry) = recollections::recall(\"main_window:geometry\") {\n\n window.parse_geometry(&geometry);\n\n } else {\n\n window.set_default_size(200, 200);\n\n };\n\n window.connect_configure_event(|_, event| {\n\n recollections::remember(\"main_window:geometry\", &format_geometry(event));\n\n false\n\n });\n\n let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);\n\n let snapshots_manager = SnapshotsManager::new();\n\n vbox.pack_start(snapshots_manager.pwo(), true, true, 0);\n\n let label = gtk::Label::new(Some(\"GUI is under construction\"));\n\n vbox.pack_start(&label, false, false, 0);\n\n window.add(&vbox);\n\n window.show_all();\n\n}\n\n\n", "file_path": "ergibus_gtk/src/main.rs", "rank": 53, "score": 41921.791572993214 }, { "content": "fn format_for_inform(extraction_stats: &ExtractionStats) -> String {\n\n format!(\"{:16} Directories\\n{:16} Files\\n{:16} Bytes\\n{:16} Directory Sym Links\\n{:16} File Sym Links\\n\",\n\n extraction_stats.dir_count,\n\n extraction_stats.file_count,\n\n extraction_stats.bytes_count,\n\n extraction_stats.dir_sym_link_count,\n\n extraction_stats.file_sym_link_count\n\n )\n\n}\n\n\n", "file_path": "ergibus_gtk/src/g_snapshot.rs", "rank": 54, "score": 39188.735900668675 }, { "content": "fn generate_digest(list: &Vec<String>) -> Vec<u8> {\n\n let mut hasher = Hasher::new(Algorithm::SHA256);\n\n for ref item in list {\n\n hasher.write_all(item.as_bytes()).expect(UNEXPECTED);\n\n }\n\n hasher.finish()\n\n}\n\n\n\n#[derive(PWO)]\n\npub struct SnapshotListViewCore {\n\n vbox: gtk::Box,\n\n archive_selector: g_archive::ArchiveSelector,\n\n buffered_list_view: TreeViewWithPopup,\n\n buffered_list_store: BufferedListStore<SnapshotRowData>,\n\n changed_archive_callbacks: RefCell<Vec<Box<dyn Fn(Option<String>)>>>,\n\n}\n\n\n\n#[derive(PWO, Wrapper, WClone)]\n\npub struct SnapshotListView(Rc<SnapshotListViewCore>);\n\n\n", "file_path": "ergibus_gtk/src/g_snapshots.rs", "rank": 55, "score": 36814.52074101123 }, { "content": "fn read_archive_spec(archive_name: &str) -> EResult<ArchiveSpec> {\n\n let spec_file_path = get_archive_spec_file_path(archive_name);\n\n let spec_file = File::open(&spec_file_path).map_err(|err| match err.kind() {\n\n ErrorKind::NotFound => Error::ArchiveUnknown(archive_name.to_string()),\n\n _ => Error::ArchiveReadError(err, spec_file_path.clone()),\n\n })?;\n\n let spec: ArchiveSpec = serde_yaml::from_reader(&spec_file)\n\n .map_err(|err| Error::ArchiveYamlReadError(err, archive_name.to_string()))?;\n\n Ok(spec)\n\n}\n\n\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 56, "score": 35698.64265927904 }, { "content": "fn write_repo_spec(repo_name: &str, repo_spec: &RepoSpec) -> EResult<()> {\n\n let spec_file_path = get_repo_spec_file_path(repo_name);\n\n if spec_file_path.exists() {\n\n return Err(Error::RepoExists(repo_name.to_string()));\n\n }\n\n match spec_file_path.parent() {\n\n Some(config_dir_path) => {\n\n if !config_dir_path.exists() {\n\n fs::create_dir_all(&config_dir_path)\n\n .map_err(|err| Error::RepoWriteError(err, config_dir_path.to_path_buf()))?;\n\n }\n\n }\n\n None => (),\n\n }\n\n let spec_file = File::create(&spec_file_path)\n\n .map_err(|err| Error::RepoWriteError(err, spec_file_path.clone()))?;\n\n repo_spec.to_writer(spec_file)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ergibus_lib/src/content.rs", "rank": 57, "score": 33354.338358210414 }, { "content": "use std::{convert::From, ffi::OsString, io, path::PathBuf};\n\n\n\nuse crate::ReferencedContentData;\n\nuse serde_json;\n\nuse serde_yaml;\n\nuse thiserror::*;\n\n\n\n/// A wrapper around the various error types than can be encountered\n\n/// by this crate.\n\n#[derive(Debug, Error)]\n\npub enum RepoError {\n\n #[error(\"I/O Error\")]\n\n IOError(#[from] io::Error),\n\n #[error(\"Json Error\")]\n\n JsonError(#[from] serde_json::Error),\n\n #[error(\"Not implemented\")]\n\n NotImplemented,\n\n #[error(\"{0:?}: repository path already exists\")]\n\n RepoDirExists(PathBuf),\n\n #[error(\"{0}: unknown hash algorithm\")]\n", "file_path": "dychatat/src/error.rs", "rank": 58, "score": 33080.0523977988 }, { "content": " UnknownHashAlgorithm(String),\n\n #[error(\"{0}: unknown content token\")]\n\n UnknownToken(String),\n\n #[error(\"Serde Yaml Error\")]\n\n YamlError(#[from] serde_yaml::Error),\n\n #[error(\"{0:?}: malformed string\")]\n\n BadOsString(OsString),\n\n #[error(\"Still has {0} references to {1} items\")]\n\n StillBeingReferenced(u128, u64),\n\n}\n\n\n\nimpl From<OsString> for RepoError {\n\n fn from(os_string: OsString) -> Self {\n\n RepoError::BadOsString(os_string)\n\n }\n\n}\n\n\n\nimpl From<ReferencedContentData> for RepoError {\n\n fn from(rcd: ReferencedContentData) -> Self {\n\n RepoError::StillBeingReferenced(rcd.num_references, rcd.num_items)\n\n }\n\n}\n", "file_path": "dychatat/src/error.rs", "rank": 59, "score": 33069.55786787628 }, { "content": "// Copyright 2021 Peter Williams <[email protected]> <[email protected]>\n\n\n\npub static XPM: &[&str] = &[\n\n \"64 64 15 1\",\n\n \" \tc None\",\n\n \"1\tc #8D8D8D\",\n\n \"2\tc #00DB00\",\n\n \"3\tc #E9E9E9\",\n\n \"4\tc #2D2D2D\",\n\n \"5\tc #7C7C7C\",\n\n \"6\tc #C9C9C9\",\n\n \"7\tc #616161\",\n\n \"8\tc #111111\",\n\n \"9\tc #FEFEFE\",\n\n \"A\tc #3F3F3F\",\n\n \"B\tc #D7D7D7\",\n\n \"C\tc #B1B1B1\",\n\n \"D\tc #F3F3F3\",\n\n \"E\tc #9F9F9F\",\n\n \" \",\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 66, "score": 29769.550366026786 }, { "content": " \" DA555555555555555555555555555555555555AD \",\n\n \" 385555555555555555555555555555555555555583 \",\n\n \" BA84444444444855555555555555844444444448AB \",\n\n \" D666BBBBBBBBC4555555555555554CBBBBBBBB666D \",\n\n \" B4555555555555554B \",\n\n \" B4555555555555554B \",\n\n \" B4555555555555554B \",\n\n \" C8555555555555558C \",\n\n \" DE8555555555555558ED \",\n\n \" E8555555555555558E \",\n\n \" DE8555555555555558ED \",\n\n \" DE8555555555555558ED \",\n\n \" E8555555555555558E \",\n\n \" E8555555555555558E \",\n\n \" E8555555555555558E \",\n\n \" E8555555555555558E \",\n\n \" E8555555555555558E \",\n\n \" E8555555555555558E \",\n\n \" CAAAAAAAAAAAAAAAAC \",\n\n \" DDDDDDDDDDDDDDDD \",\n\n \" \",\n\n \" \",\n\n \" \",\n\n];\n\n\n\nuse pw_gtk_ext::{gdk_pixbuf, gtk};\n\n\n\n#[allow(dead_code)]\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 67, "score": 29767.796053814352 }, { "content": " \" 75555555555554 \",\n\n \" 6455555555555546 \",\n\n \" 345555555555555543 \",\n\n \" C85555555555555588 \",\n\n \" C4555555555555555548 \",\n\n \" 75555555555555555558 \",\n\n \" 1855555555555555555588 \",\n\n \" 7555555555555555555558 \",\n\n \" 585555555555555555555588 \",\n\n \" 68555555555555555555555586 \",\n\n \" B78555555555555555555555558B \",\n\n \" E855555555555555555555555558 \",\n\n \" CA55555555555555555555555555A8 \",\n\n \" 785555555555555555555555555558 \",\n\n \" 64555555555555555555555555555546 \",\n\n \" 3A55555555555555555555555555555583 \",\n\n \" D4555555555555555555555555555555558D \",\n\n \" C4555555555555555555555555555555558C \",\n\n \" 455555555555555555555555555555555558 \",\n\n \" 55555555555555555555555555555555555588 \",\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 68, "score": 29765.502273705897 }, { "content": " \" \",\n\n \" \",\n\n \" 6666666666666666666666666666666666666666 \",\n\n \" 688888888888888888888888888888888888888886 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" 682222222222222222222222222222222222222286 \",\n\n \" B4444444444444444444884444444444444444444B \",\n\n \" 33333333333333333D6776D33333333333333333 \",\n\n \" 6446 \",\n\n \" B4558B \",\n\n \" 585558 \",\n\n \" D18555581D \",\n\n \" 3855555583 \",\n\n \" 358555555883 \",\n\n \" 185555555588 \",\n\n \" BA5555555555AB \",\n", "file_path": "ergibus_gtk/src/icons/up_dir.rs", "rank": 69, "score": 29765.502273705897 }, { "content": " use super::*;\n\n use std::fs;\n\n\n\n #[cfg(unix)]\n\n fn soft_link_dir(target: &str, link: &str) -> std::result::Result<(), std::io::Error> {\n\n std::os::unix::fs::symlink(target, link)\n\n }\n\n\n\n #[cfg(unix)]\n\n fn soft_link_file(target: &str, link: &str) -> std::result::Result<(), std::io::Error> {\n\n std::os::unix::fs::symlink(target, link)\n\n }\n\n\n\n #[cfg(windows)]\n\n fn soft_link_dir(target: &str, link: &str) -> std::result::Result<(), std::io::Error> {\n\n std::os::windows::fs::symlink_dir(target, link)\n\n }\n\n\n\n #[cfg(windows)]\n\n fn soft_link_file(target: &str, link: &str) -> std::result::Result<(), std::io::Error> {\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 70, "score": 28264.77597996972 }, { "content": "use std::path::{Path, PathBuf};\n\n\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 71, "score": 28260.32663287375 }, { "content": " std::os::windows::fs::symlink_file(target, link)\n\n }\n\n\n\n #[test]\n\n fn path_buf_is_real_dir_works() {\n\n assert!(PathBuf::from(\"src\").is_real_dir());\n\n assert!(!PathBuf::from(\"nonexistent\").is_real_dir());\n\n }\n\n\n\n #[test]\n\n fn path_buf_is_symlink_to_dir_works() {\n\n assert!(!PathBuf::from(\"src\").is_symlink_to_dir());\n\n assert!(!PathBuf::from(\"src\").is_symlink());\n\n assert!(!PathBuf::from(\"nonexistent\").is_symlink_to_dir());\n\n assert!(!PathBuf::from(\"nonexistent\").is_symlink());\n\n soft_link_dir(\"../target\", \"link_to_target\").unwrap();\n\n assert!(PathBuf::from(\"link_to_target\").is_symlink_to_dir());\n\n assert!(PathBuf::from(\"link_to_target\").is_symlink());\n\n assert!(!PathBuf::from(\"link_to_target\").is_symlink_to_file());\n\n fs::remove_file(PathBuf::from(\"link_to_target\")).unwrap();\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 72, "score": 28257.917057005325 }, { "content": " fn path_is_real_dir_works() {\n\n assert!(Path::new(\"src\").is_real_dir());\n\n assert!(!Path::new(\"nonexistent\").is_real_dir());\n\n }\n\n\n\n #[test]\n\n fn path_is_symlink_to_dir_works() {\n\n assert!(!Path::new(\"src\").is_symlink_to_dir());\n\n assert!(!Path::new(\"nonexistent\").is_symlink_to_dir());\n\n soft_link_dir(\"../target\", \"link_to_target_2\").unwrap();\n\n assert!(Path::new(\"link_to_target_2\").is_symlink_to_dir());\n\n fs::remove_file(Path::new(\"link_to_target_2\")).unwrap();\n\n }\n\n\n\n #[test]\n\n fn path_is_real_file_works() {\n\n assert!(Path::new(\"COPYRIGHT\").is_real_file());\n\n assert!(!Path::new(\"nonexistent\").is_real_file());\n\n }\n\n\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 73, "score": 28256.62409948045 }, { "content": " }\n\n };\n\n false\n\n }\n\n\n\n fn is_symlink(&self) -> bool {\n\n if let Ok(md) = self.symlink_metadata() {\n\n return md.file_type().is_symlink();\n\n };\n\n false\n\n }\n\n }\n\n };\n\n}\n\n\n\nimpl_real_path_buf_type!(PathBuf);\n\nimpl_real_path_buf_type!(Path);\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 74, "score": 28255.984625792993 }, { "content": " }\n\n\n\n #[test]\n\n fn path_buf_is_real_file_works() {\n\n assert!(PathBuf::from(\"COPYRIGHT\").is_real_file());\n\n assert!(!PathBuf::from(\"nonexistent\").is_real_file());\n\n }\n\n\n\n #[test]\n\n fn path_buf_is_symlink_to_file_works() {\n\n assert!(!PathBuf::from(\"COPYRIGHT\").is_symlink_to_file());\n\n assert!(!PathBuf::from(\"nonexistent\").is_symlink_to_file());\n\n soft_link_file(\"COPYRIGHT\", \"link_to_COPYRIGHT_2\").unwrap();\n\n assert!(PathBuf::from(\"link_to_COPYRIGHT_2\").is_symlink_to_file());\n\n assert!(PathBuf::from(\"link_to_COPYRIGHT_2\").is_symlink());\n\n assert!(!PathBuf::from(\"link_to_COPYRIGHT_2\").is_symlink_to_dir());\n\n fs::remove_file(PathBuf::from(\"link_to_COPYRIGHT_2\")).unwrap();\n\n }\n\n\n\n #[test]\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 75, "score": 28254.205500644646 }, { "content": " #[test]\n\n fn path_is_symlink_to_file_works() {\n\n assert!(!Path::new(\"COPYRIGHT\").is_symlink_to_file());\n\n assert!(!Path::new(\"nonexistent\").is_symlink_to_file());\n\n soft_link_file(\"COPYRIGHT\", \"link_to_COPYRIGHT\").unwrap();\n\n assert!(Path::new(\"link_to_COPYRIGHT\").is_symlink_to_file());\n\n fs::remove_file(Path::new(\"link_to_COPYRIGHT\")).unwrap();\n\n }\n\n}\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 76, "score": 28252.47834673805 }, { "content": " if let Ok(md) = self.symlink_metadata() {\n\n if md.file_type().is_symlink() {\n\n return self.is_dir();\n\n }\n\n };\n\n false\n\n }\n\n\n\n fn is_real_file(&self) -> bool {\n\n if let Ok(md) = self.symlink_metadata() {\n\n md.is_file()\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn is_symlink_to_file(&self) -> bool {\n\n if let Ok(md) = self.symlink_metadata() {\n\n if md.file_type().is_symlink() {\n\n return self.is_file();\n", "file_path": "ergibus_lib/src/path_buf_ext.rs", "rank": 77, "score": 28250.976231073106 }, { "content": "# ergibus\n\nExperimental Rust Git Inspired Back Up System\n\n\n\nUNDER CONSTRUCTION\n", "file_path": "README.md", "rank": 78, "score": 22952.677047161007 }, { "content": "# dychatat\n\nDeposit Your Content Here And Take A Token\n", "file_path": "dychatat/README.md", "rank": 79, "score": 22194.71117352434 }, { "content": "# ergibus\n\nExperimental Rust Git Inspired Back Up System\n", "file_path": "ergibus_lib/README.md", "rank": 80, "score": 21485.20558563504 }, { "content": " current_subdir_iter,\n\n }\n\n }\n\n\n\n pub fn find_subdir<P: AsRef<Path>>(&self, path_arg: P) -> EResult<&Self> {\n\n let subdir_path = path_arg.as_ref();\n\n debug_assert!(subdir_path.is_absolute());\n\n let rel_path = subdir_path\n\n .strip_prefix(&self.path)\n\n .map_err(|_| Error::SnapshotUnknownDirectory(subdir_path.to_path_buf()))?;\n\n match rel_path.components().next() {\n\n None => Ok(self),\n\n Some(Component::Normal(first_name)) => match self.get_directory(&first_name) {\n\n Some(sd) => sd.find_subdir(path_arg),\n\n None => Err(Error::SnapshotUnknownDirectory(subdir_path.to_path_buf())),\n\n },\n\n _ => Err(Error::FSOMalformedPath(rel_path.to_path_buf())),\n\n }\n\n }\n\n\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 81, "score": 29.495379493789404 }, { "content": "use pw_gtk_ext::sav_state::SAV_SELN_MADE;\n\nuse std::path::{Path, PathBuf};\n\n\n\n#[derive(PWO)]\n\npub struct CurrentDirectoryManagerCore {\n\n h_box: gtk::Box,\n\n button: gtk::Button,\n\n label: gtk::Label,\n\n}\n\n\n\n#[derive(PWO, WClone)]\n\npub struct CurrentDirectoryManager(Rc<CurrentDirectoryManagerCore>);\n\n\n\nimpl CurrentDirectoryManager {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Self {\n\n let h_box = gtk::BoxBuilder::new()\n\n .orientation(gtk::Orientation::Horizontal)\n\n .build();\n\n let button = ButtonBuilder::new()\n\n .tooltip_text(\"Change directory up one level\")\n", "file_path": "ergibus_gtk/src/g_snapshot.rs", "rank": 82, "score": 25.309294133213864 }, { "content": " Self {\n\n base_dir_path,\n\n hash_algorithm,\n\n }\n\n }\n\n\n\n pub fn from_reader(reader: impl Read) -> Result<Self, RepoError> {\n\n let spec: Self = serde_yaml::from_reader(reader)?;\n\n Ok(spec)\n\n }\n\n\n\n pub fn to_writer(&self, writer: impl Write) -> Result<(), RepoError> {\n\n serde_yaml::to_writer(writer, self)?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Clone, Copy, Debug)]\n\npub enum Mutability {\n\n Immutable,\n", "file_path": "dychatat/src/lib.rs", "rank": 83, "score": 23.890173547241993 }, { "content": " subdir.release_contents(content_mgr)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn find_or_add_subdir<P>(&mut self, path_arg: P) -> EResult<&mut DirectoryData>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let abs_subdir_path = path_arg.as_ref();\n\n debug_assert!(abs_subdir_path.is_absolute());\n\n let rel_path = abs_subdir_path.strip_prefix(&self.path).expect(UNEXPECTED);\n\n match rel_path.components().next() {\n\n None => Ok(self),\n\n Some(Component::Normal(first_name)) => match self.index_for(first_name) {\n\n Ok(index) => self.contents[index]\n\n .get_dir_data_mut()\n\n .expect(UNEXPECTED)\n\n .find_or_add_subdir(abs_subdir_path),\n\n Err(index) => {\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 84, "score": 23.483020374159704 }, { "content": " DuplicateFileSystemObjectName,\n\n FSOMalformedPath(std::path::PathBuf),\n\n FSOBrokenSymLink(std::path::PathBuf, std::path::PathBuf),\n\n}\n\n\n\nimpl From<dychatat::RepoError> for Error {\n\n fn from(error: dychatat::RepoError) -> Self {\n\n Error::RepoError(error)\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(error: std::io::Error) -> Self {\n\n Error::IOError(error)\n\n }\n\n}\n\n\n\npub type EResult<T> = Result<T, Error>;\n\n\n\nimpl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"Ergibus library error: {:?}\", self)\n\n }\n\n}\n\n\n\nimpl std::error::Error for Error {}\n", "file_path": "ergibus_lib/src/lib.rs", "rank": 85, "score": 23.030982160908067 }, { "content": "#[derive(Debug)]\n\npub enum Error {\n\n ArchiveDirError(std::io::Error, std::path::PathBuf),\n\n ArchiveEmpty(ArchiveNameOrDirPath),\n\n ArchiveExists(String),\n\n ArchiveUnknown(String),\n\n ArchiveReadError(std::io::Error, std::path::PathBuf),\n\n ArchiveWriteError(std::io::Error, std::path::PathBuf),\n\n ArchiveYamlReadError(serde_yaml::Error, String),\n\n ArchiveYamlWriteError(serde_yaml::Error, String),\n\n RelativeIncludePath(std::path::PathBuf, String),\n\n ArchiveIncludePathError(path_ext::Error, std::path::PathBuf),\n\n\n\n GlobError(globset::Error),\n\n\n\n IOError(std::io::Error),\n\n\n\n ContentCopyIOError(std::io::Error),\n\n RepoError(dychatat::RepoError),\n\n RepoExists(String),\n", "file_path": "ergibus_lib/src/lib.rs", "rank": 86, "score": 22.636447988614677 }, { "content": " let dir_path = PathBuf::from(path)\n\n .canonicalize()\n\n .map_err(|err| Error::ArchiveDirError(err, PathBuf::from(path)))?;\n\n Ok(Self {\n\n archive_name: None,\n\n dir_path,\n\n })\n\n }\n\n}\n\n\n\nimpl Snapshots {\n\n pub fn id(&self) -> ArchiveNameOrDirPath {\n\n if let Some(ref name) = self.archive_name {\n\n ArchiveNameOrDirPath::ArchiveName(name.clone())\n\n } else {\n\n ArchiveNameOrDirPath::DirPath(self.dir_path.clone())\n\n }\n\n }\n\n\n\n pub fn delete(&self) -> EResult<()> {\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 87, "score": 22.5307753589203 }, { "content": " } else {\n\n panic!(\"either --archive or --exigency must be present\");\n\n };\n\n use ContentsSubCmd::*;\n\n match &self.sub_cmd {\n\n Extract {\n\n file_path,\n\n dir_path,\n\n overwrite,\n\n with_name,\n\n into_dir,\n\n show_stats,\n\n } => {\n\n let into_dir = if let Some(into_dir) = into_dir {\n\n into_dir.clone()\n\n } else {\n\n env::current_dir()?\n\n };\n\n if let Some(file_path) = file_path {\n\n let stats = snapshot_dir.copy_file_to(\n", "file_path": "ergibus/src/snapshot_sub_cmds.rs", "rank": 88, "score": 21.942854026201353 }, { "content": "use std::convert::TryFrom;\n\nuse std::fs::{self, File};\n\nuse std::io::ErrorKind;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time;\n\n\n\nuse globset::{Glob, GlobSet, GlobSetBuilder};\n\nuse hostname;\n\nuse serde_yaml;\n\nuse users;\n\nuse walkdir;\n\n\n\nuse path_ext::expand_home_dir;\n\nuse path_ext::{absolute_path_buf, PathType};\n\n\n\nuse crate::report::ignore_report_or_fail;\n\nuse crate::{\n\n config,\n\n content::{content_repo_exists, get_content_mgmt_key, ContentMgmtKey},\n\n fs_objects::ExtractionStats,\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 89, "score": 21.92616874421016 }, { "content": " c_mgt_key: &ContentMgmtKey,\n\n overwrite: bool,\n\n ) -> EResult<ExtractionStats> {\n\n // TODO: Add hard link retention to copying of directories\n\n let mut stats = ExtractionStats::default();\n\n clear_way_for_new_dir(to_dir_path, overwrite)?;\n\n if !to_dir_path.is_dir() {\n\n fs::create_dir_all(to_dir_path)\n\n .map_err(|err| Error::SnapshotDirIOError(err, to_dir_path.to_path_buf()))?;\n\n if let Ok(to_dir) = self.find_subdir(to_dir_path) {\n\n to_dir\n\n .attributes\n\n .set_file_attributes(to_dir_path)\n\n .map_err(|err| Error::ContentCopyIOError(err))?;\n\n }\n\n }\n\n stats.dir_count += 1;\n\n // First create all of the sub directories\n\n for subdir in self.subdir_iter(true) {\n\n let path_tail = subdir.path.strip_prefix(&self.path).unwrap(); // Should not fail\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 90, "score": 21.052700007350715 }, { "content": " // expand inclusion paths while relativity is well defined\n\n let mut exp_inclusions = vec![];\n\n for inclusion in inclusions {\n\n let abs_inclusion = absolute_path_buf(inclusion)\n\n .map_err(|e| Error::ArchiveIncludePathError(e, inclusion.to_path_buf()))?;\n\n exp_inclusions.push(abs_inclusion.canonicalize()?);\n\n }\n\n let mut snapshot_dir_path = location.as_ref().to_path_buf();\n\n snapshot_dir_path.push(\"ergibus\");\n\n snapshot_dir_path.push(\"archives\");\n\n match hostname::get_hostname() {\n\n Some(hostname) => snapshot_dir_path.push(hostname),\n\n None => (),\n\n };\n\n match users::get_current_username() {\n\n Some(user_name) => snapshot_dir_path.push(user_name),\n\n None => (),\n\n };\n\n snapshot_dir_path.push(name);\n\n fs::create_dir_all(&snapshot_dir_path)\n", "file_path": "ergibus_lib/src/archive.rs", "rank": 91, "score": 20.89718423903434 }, { "content": " use std::os::unix::fs::symlink;\n\n symlink(&self.link_target, as_path)\n\n .map_err(|err| Error::SnapshotMoveAsideFailed(as_path.to_path_buf(), err))?;\n\n } else {\n\n panic!(\"not implemented for this os\")\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default, PartialEq)]\n\npub struct DirectoryData {\n\n pub(crate) path: PathBuf,\n\n attributes: Attributes,\n\n pub(crate) contents: Vec<FileSystemObject>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Default, Copy, Clone)]\n\npub struct FileStats {\n\n pub file_count: u64,\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 92, "score": 20.637216268714404 }, { "content": " \"{} -> {}\",\n\n link_data.name().to_string_lossy(),\n\n link_data.link_target.to_string_lossy()\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl FileSystemObject {\n\n pub fn get_dir_data(&self) -> Option<&DirectoryData> {\n\n use FileSystemObject::*;\n\n match self {\n\n Directory(dir_data) => Some(dir_data),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn get_dir_data_mut(&mut self) -> Option<&mut DirectoryData> {\n\n use FileSystemObject::*;\n\n match self {\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 93, "score": 20.38592002087139 }, { "content": " RepoReadError(std::io::Error, std::path::PathBuf),\n\n RepoWriteError(std::io::Error, std::path::PathBuf),\n\n UnknownRepo(String),\n\n\n\n LastSnapshot(ArchiveNameOrDirPath),\n\n NoSnapshotAvailable,\n\n SnapshotDeleteIOError(std::io::Error, std::path::PathBuf),\n\n SnapshotDirIOError(std::io::Error, std::path::PathBuf),\n\n SnapshotIndexOutOfRange(ArchiveNameOrDirPath, i64),\n\n SnapshotMismatch(std::path::PathBuf),\n\n SnapshotMismatchDirty(std::io::Error, std::path::PathBuf),\n\n SnapshotMoveAsideFailed(std::path::PathBuf, std::io::Error),\n\n SnapshotReadIOError(std::io::Error, std::path::PathBuf),\n\n SnapshotReadJsonError(serde_json::Error, std::path::PathBuf),\n\n SnapshotUnknownFile(std::path::PathBuf),\n\n SnapshotUnknownDirectory(std::path::PathBuf),\n\n SnapshotWriteIOError(std::io::Error, std::path::PathBuf),\n\n SnapshotSerializeError(serde_json::Error),\n\n SnapshotsFailed(i32),\n\n\n", "file_path": "ergibus_lib/src/lib.rs", "rank": 94, "score": 20.36891827790828 }, { "content": " pub fn find_file<P: AsRef<Path>>(&self, file_path_arg: P) -> EResult<&FileData> {\n\n let file_path = file_path_arg.as_ref();\n\n match file_path.file_name() {\n\n Some(file_name) => {\n\n if let Some(dir_path) = file_path.parent() {\n\n let subdir = self.find_subdir(dir_path)?;\n\n match subdir.get_file(file_name) {\n\n Some(file_data) => Ok(file_data),\n\n None => Err(Error::SnapshotUnknownFile(file_path.to_path_buf())),\n\n }\n\n } else {\n\n match self.get_file(file_name) {\n\n Some(file_data) => Ok(file_data),\n\n None => Err(Error::SnapshotUnknownFile(file_path.to_path_buf())),\n\n }\n\n }\n\n }\n\n None => Err(Error::SnapshotUnknownFile(file_path.to_path_buf())),\n\n }\n\n }\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 95, "score": 20.329968524859865 }, { "content": " let dir_path = dir_path_arg.as_ref();\n\n match PathType::of(dir_path) {\n\n PathType::Absolute => self.root_dir.find_subdir(dir_path),\n\n PathType::RelativeCurDirImplicit => self\n\n .root_dir\n\n .find_subdir(&self.base_dir_path.join(dir_path)),\n\n PathType::Empty => self.root_dir.find_subdir(&self.base_dir_path),\n\n _ => self.root_dir.find_subdir(\n\n absolute_path_buf(dir_path)\n\n .map_err(|_| Error::SnapshotUnknownDirectory(dir_path.to_path_buf()))?,\n\n ),\n\n }\n\n }\n\n\n\n pub fn find_file<P: AsRef<Path>>(&self, file_path_arg: P) -> EResult<&FileData> {\n\n let file_path = file_path_arg.as_ref();\n\n match PathType::of(file_path) {\n\n PathType::Absolute => self.root_dir.find_file(file_path),\n\n PathType::RelativeCurDirImplicit => {\n\n self.root_dir.find_file(&self.base_dir_path.join(file_path))\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 96, "score": 20.307093885487927 }, { "content": "// Copyright 2021 Peter Williams <[email protected]> <[email protected]>\n\n\n\nuse crate::archive::{get_archive_data, ArchiveData, Exclusions};\n\nuse crate::content::ContentMgmtKey;\n\nuse crate::fs_objects::{DirectoryData, ExtractionStats, FileData, SymLinkData};\n\nuse crate::fs_objects::{FileStats, SymLinkStats};\n\nuse crate::report::ignore_report_or_fail;\n\nuse crate::{archive, EResult, Error, UNEXPECTED};\n\nuse chrono::{DateTime, Local};\n\nuse log::*;\n\nuse path_ext::{absolute_path_buf, PathType};\n\nuse serde::Serialize;\n\nuse std::convert::TryFrom;\n\nuse std::fs::{DirEntry, File};\n\nuse std::io::{self, ErrorKind, Read, Write};\n\nuse std::path::{Component, Path, PathBuf};\n\nuse std::{fs, time};\n\n\n", "file_path": "ergibus_lib/src/snapshot.rs", "rank": 97, "score": 19.898785207072642 }, { "content": " /// The file system path of the directory where the repository will reside.\n\n base_dir_path: PathBuf,\n\n /// The hash algorithm to be used when calculating content digests.\n\n hash_algorithm: HashAlgorithm,\n\n}\n\n\n\nimpl fmt::Display for RepoSpec {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"dir: {} digest: {}\",\n\n self.base_dir_path.as_os_str().to_string_lossy(),\n\n self.hash_algorithm\n\n )\n\n }\n\n}\n\n\n\nimpl RepoSpec {\n\n pub fn new<P: AsRef<Path>>(base_dir_path: P, hash_algorithm: HashAlgorithm) -> Self {\n\n let base_dir_path = base_dir_path.as_ref().to_path_buf();\n", "file_path": "dychatat/src/lib.rs", "rank": 98, "score": 19.718339112244905 }, { "content": "impl AddAssign for SymLinkStats {\n\n fn add_assign(&mut self, other: SymLinkStats) {\n\n *self = SymLinkStats {\n\n dir_sym_link_count: self.dir_sym_link_count + other.dir_sym_link_count,\n\n file_sym_link_count: self.file_sym_link_count + other.file_sym_link_count,\n\n };\n\n }\n\n}\n\n\n\nimpl DirectoryData {\n\n pub fn try_new<P: AsRef<Path>>(root_dir: P) -> EResult<Self> {\n\n let mut dir_data = Self::default();\n\n dir_data.path = root_dir.as_ref().canonicalize()?;\n\n dir_data.attributes = dir_data.path.metadata()?.into();\n\n\n\n Ok(dir_data)\n\n }\n\n\n\n pub fn file_system_object<P: AsRef<Path>>(root_dir: P) -> EResult<FileSystemObject> {\n\n Ok(FileSystemObject::Directory(Self::try_new(root_dir)?))\n", "file_path": "ergibus_lib/src/fs_objects.rs", "rank": 99, "score": 19.447703565743897 } ]
Rust
wgpu-hal/src/dx11/library.rs
jinleili/wgpu
a845dcc21c976e1de3de7b3e0bec8703dcdd905c
use std::ptr; use winapi::{ shared::{ dxgi, minwindef::{HMODULE, UINT}, winerror, }, um::{d3d11, d3d11_1, d3d11_2, d3dcommon}, }; use crate::auxil::dxgi::result::HResult; type D3D11CreateDeviceFun = unsafe extern "system" fn( *mut dxgi::IDXGIAdapter, d3dcommon::D3D_DRIVER_TYPE, HMODULE, UINT, *const d3dcommon::D3D_FEATURE_LEVEL, UINT, UINT, *mut *mut d3d11::ID3D11Device, *mut d3dcommon::D3D_FEATURE_LEVEL, *mut *mut d3d11::ID3D11DeviceContext, ) -> native::HRESULT; pub(super) struct D3D11Lib { d3d11_create_device: libloading::os::windows::Symbol<D3D11CreateDeviceFun>, lib: libloading::Library, } impl D3D11Lib { pub fn new() -> Option<Self> { unsafe { let lib = libloading::Library::new("d3d11.dll").ok()?; let d3d11_create_device = lib .get::<D3D11CreateDeviceFun>(b"D3D11CreateDevice") .ok()? .into_raw(); Some(Self { lib, d3d11_create_device, }) } } pub fn create_device( &self, adapter: native::DxgiAdapter, ) -> Option<(super::D3D11Device, d3dcommon::D3D_FEATURE_LEVEL)> { let feature_levels = [ d3dcommon::D3D_FEATURE_LEVEL_11_1, d3dcommon::D3D_FEATURE_LEVEL_11_0, d3dcommon::D3D_FEATURE_LEVEL_10_1, d3dcommon::D3D_FEATURE_LEVEL_10_0, d3dcommon::D3D_FEATURE_LEVEL_9_3, d3dcommon::D3D_FEATURE_LEVEL_9_2, d3dcommon::D3D_FEATURE_LEVEL_9_1, ]; let mut device = native::WeakPtr::<d3d11::ID3D11Device>::null(); let mut feature_level: d3dcommon::D3D_FEATURE_LEVEL = 0; let mut hr = unsafe { (self.d3d11_create_device)( adapter.as_mut_ptr() as *mut _, d3dcommon::D3D_DRIVER_TYPE_UNKNOWN, ptr::null_mut(), 0, feature_levels.as_ptr(), feature_levels.len() as u32, d3d11::D3D11_SDK_VERSION, device.mut_self(), &mut feature_level, ptr::null_mut(), ) }; if hr == winerror::E_INVALIDARG { hr = unsafe { (self.d3d11_create_device)( adapter.as_mut_ptr() as *mut _, d3dcommon::D3D_DRIVER_TYPE_UNKNOWN, ptr::null_mut(), 0, feature_levels[1..].as_ptr(), feature_levels[1..].len() as u32, d3d11::D3D11_SDK_VERSION, device.mut_self(), &mut feature_level, ptr::null_mut(), ) }; } if let Err(err) = hr.into_result() { log::error!("Failed to make a D3D11 device: {}", err); return None; } unsafe { match device.cast::<d3d11_2::ID3D11Device2>().into_result() { Ok(device2) => { device.destroy(); return Some((super::D3D11Device::Device2(device2), feature_level)); } Err(hr) => { log::info!("Failed to cast device to ID3D11Device2: {}", hr) } } } unsafe { match device.cast::<d3d11_1::ID3D11Device1>().into_result() { Ok(device1) => { device.destroy(); return Some((super::D3D11Device::Device1(device1), feature_level)); } Err(hr) => { log::info!("Failed to cast device to ID3D11Device1: {}", hr) } } } Some((super::D3D11Device::Device(device), feature_level)) } }
use std::ptr; use winapi::{ shared::{ dxgi, minwindef::{HMODULE, UINT}, winerror, }, um::{d3d11, d3d11_1, d3d11_2, d3dcommon}, }; use crate::auxil::dxgi::result::HResult; type D3D11CreateDeviceFun = unsafe extern "system" fn( *mut dxgi::IDXGIAdapter, d3dcommon::D3D_DRIVER_TYPE, HMODULE, UINT, *const d3dcommon::D3D_FEATURE_LEVEL, UINT, UINT, *mut *mut d3d11::ID3D11Device, *mut d3dcommon::D3D_FEATURE_LEVEL, *mut *mut d3d11::ID3D11DeviceContext, ) -> native::HRESULT; pub(super) struct D3D11Lib { d3d11_create_device: libloading::os::windows::Symbol<D3D11CreateDeviceFun>, lib: libloading::Library, } impl D3D11Lib { pub fn new() -> Option<Self> {
pub fn create_device( &self, adapter: native::DxgiAdapter, ) -> Option<(super::D3D11Device, d3dcommon::D3D_FEATURE_LEVEL)> { let feature_levels = [ d3dcommon::D3D_FEATURE_LEVEL_11_1, d3dcommon::D3D_FEATURE_LEVEL_11_0, d3dcommon::D3D_FEATURE_LEVEL_10_1, d3dcommon::D3D_FEATURE_LEVEL_10_0, d3dcommon::D3D_FEATURE_LEVEL_9_3, d3dcommon::D3D_FEATURE_LEVEL_9_2, d3dcommon::D3D_FEATURE_LEVEL_9_1, ]; let mut device = native::WeakPtr::<d3d11::ID3D11Device>::null(); let mut feature_level: d3dcommon::D3D_FEATURE_LEVEL = 0; let mut hr = unsafe { (self.d3d11_create_device)( adapter.as_mut_ptr() as *mut _, d3dcommon::D3D_DRIVER_TYPE_UNKNOWN, ptr::null_mut(), 0, feature_levels.as_ptr(), feature_levels.len() as u32, d3d11::D3D11_SDK_VERSION, device.mut_self(), &mut feature_level, ptr::null_mut(), ) }; if hr == winerror::E_INVALIDARG { hr = unsafe { (self.d3d11_create_device)( adapter.as_mut_ptr() as *mut _, d3dcommon::D3D_DRIVER_TYPE_UNKNOWN, ptr::null_mut(), 0, feature_levels[1..].as_ptr(), feature_levels[1..].len() as u32, d3d11::D3D11_SDK_VERSION, device.mut_self(), &mut feature_level, ptr::null_mut(), ) }; } if let Err(err) = hr.into_result() { log::error!("Failed to make a D3D11 device: {}", err); return None; } unsafe { match device.cast::<d3d11_2::ID3D11Device2>().into_result() { Ok(device2) => { device.destroy(); return Some((super::D3D11Device::Device2(device2), feature_level)); } Err(hr) => { log::info!("Failed to cast device to ID3D11Device2: {}", hr) } } } unsafe { match device.cast::<d3d11_1::ID3D11Device1>().into_result() { Ok(device1) => { device.destroy(); return Some((super::D3D11Device::Device1(device1), feature_level)); } Err(hr) => { log::info!("Failed to cast device to ID3D11Device1: {}", hr) } } } Some((super::D3D11Device::Device(device), feature_level)) } }
unsafe { let lib = libloading::Library::new("d3d11.dll").ok()?; let d3d11_create_device = lib .get::<D3D11CreateDeviceFun>(b"D3D11CreateDevice") .ok()? .into_raw(); Some(Self { lib, d3d11_create_device, }) } }
function_block-function_prefix_line
[ { "content": "type WlDisplayDisconnectFun = unsafe extern \"system\" fn(display: *const raw::c_void);\n\n\n", "file_path": "wgpu-hal/src/gles/egl.rs", "rank": 1, "score": 363831.6744697565 }, { "content": "type WlEglWindowDestroyFun = unsafe extern \"system\" fn(window: *const raw::c_void);\n\n\n\n#[cfg(target_os = \"android\")]\n\nextern \"C\" {\n\n pub fn ANativeWindow_setBuffersGeometry(\n\n window: *mut raw::c_void,\n\n width: i32,\n\n height: i32,\n\n format: i32,\n\n ) -> i32;\n\n}\n\n\n", "file_path": "wgpu-hal/src/gles/egl.rs", "rank": 2, "score": 359086.98347811296 }, { "content": "type WlEglWindowCreateFun = unsafe extern \"system\" fn(\n\n surface: *const raw::c_void,\n\n width: raw::c_int,\n\n height: raw::c_int,\n\n) -> *mut raw::c_void;\n\n\n", "file_path": "wgpu-hal/src/gles/egl.rs", "rank": 3, "score": 339476.67797235865 }, { "content": "type WlEglWindowResizeFun = unsafe extern \"system\" fn(\n\n window: *const raw::c_void,\n\n width: raw::c_int,\n\n height: raw::c_int,\n\n dx: raw::c_int,\n\n dy: raw::c_int,\n\n);\n\n\n", "file_path": "wgpu-hal/src/gles/egl.rs", "rank": 4, "score": 339476.6779723587 }, { "content": "/// Tries to create a IDXGIFactory6, then a IDXGIFactory4, then a IDXGIFactory2, then a IDXGIFactory1,\n\n/// returning the one that succeeds, or if the required_factory_type fails to be\n\n/// created.\n\npub fn create_factory(\n\n required_factory_type: DxgiFactoryType,\n\n instance_flags: crate::InstanceFlags,\n\n) -> Result<(native::DxgiLib, native::DxgiFactory), crate::InstanceError> {\n\n let lib_dxgi = native::DxgiLib::new().map_err(|_| crate::InstanceError)?;\n\n\n\n let mut factory_flags = native::FactoryCreationFlags::empty();\n\n\n\n if instance_flags.contains(crate::InstanceFlags::VALIDATION) {\n\n // The `DXGI_CREATE_FACTORY_DEBUG` flag is only allowed to be passed to\n\n // `CreateDXGIFactory2` if the debug interface is actually available. So\n\n // we check for whether it exists first.\n\n match lib_dxgi.get_debug_interface1() {\n\n Ok(pair) => match pair.into_result() {\n\n Ok(debug_controller) => {\n\n unsafe { debug_controller.destroy() };\n\n factory_flags |= native::FactoryCreationFlags::DEBUG;\n\n }\n\n Err(err) => {\n\n log::warn!(\"Unable to enable DXGI debug interface: {}\", err);\n", "file_path": "wgpu-hal/src/auxil/dxgi/factory.rs", "rank": 5, "score": 222307.73850673705 }, { "content": "pub fn unregister_exception_handler() {\n\n let mut count_guard = EXCEPTION_HANLDER_COUNT.lock();\n\n if *count_guard == 1 {\n\n unsafe {\n\n errhandlingapi::RemoveVectoredExceptionHandler(output_debug_string_handler as *mut _)\n\n };\n\n }\n\n *count_guard -= 1;\n\n}\n\n\n\nconst MESSAGE_PREFIXES: &[(&str, log::Level)] = &[\n\n (\"CORRUPTION\", log::Level::Error),\n\n (\"ERROR\", log::Level::Error),\n\n (\"WARNING\", log::Level::Warn),\n\n (\"INFO\", log::Level::Info),\n\n (\"MESSAGE\", log::Level::Debug),\n\n];\n\n\n\nunsafe extern \"system\" fn output_debug_string_handler(\n\n exception_info: *mut winnt::EXCEPTION_POINTERS,\n", "file_path": "wgpu-hal/src/auxil/dxgi/exception.rs", "rank": 6, "score": 218999.360972688 }, { "content": "pub fn register_exception_handler() {\n\n let mut count_guard = EXCEPTION_HANLDER_COUNT.lock();\n\n if *count_guard == 0 {\n\n unsafe {\n\n errhandlingapi::AddVectoredExceptionHandler(0, Some(output_debug_string_handler))\n\n };\n\n }\n\n *count_guard += 1;\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/exception.rs", "rank": 7, "score": 218999.360972688 }, { "content": "#[op]\n\npub fn op_webgpu_create_query_set(\n\n state: &mut OpState,\n\n args: CreateQuerySetArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let device_resource = state.resource_table.get::<WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n let instance = &state.borrow::<Instance>();\n\n\n\n let descriptor = wgpu_types::QuerySetDescriptor {\n\n label: args.label.map(Cow::from),\n\n ty: args.r#type.into(),\n\n count: args.count,\n\n };\n\n\n\n gfx_put!(device => instance.device_create_query_set(\n\n device,\n\n &descriptor,\n\n ()\n\n ) => state, WebGpuQuerySet)\n\n}\n\n\n", "file_path": "deno_webgpu/src/lib.rs", "rank": 8, "score": 218312.66036740367 }, { "content": "#[cfg(test)]\n\n#[allow(dead_code)]\n\npub fn test<E: Example>(mut params: FrameworkRefTest) {\n\n use std::{mem, num::NonZeroU32};\n\n\n\n assert_eq!(params.width % 64, 0, \"width needs to be aligned 64\");\n\n\n\n let features = E::required_features() | params.optional_features;\n\n\n\n test_common::initialize_test(\n\n mem::take(&mut params.base_test_parameters).features(features),\n\n |ctx| {\n\n let spawner = Spawner::new();\n\n\n\n let dst_texture = ctx.device.create_texture(&wgpu::TextureDescriptor {\n\n label: Some(\"destination\"),\n\n size: wgpu::Extent3d {\n\n width: params.width,\n\n height: params.height,\n\n depth_or_array_layers: 1,\n\n },\n\n mip_level_count: 1,\n", "file_path": "wgpu/examples/framework.rs", "rank": 9, "score": 214273.41477771866 }, { "content": "pub fn initialize_test(parameters: TestParameters, test_function: impl FnOnce(TestingContext)) {\n\n // We don't actually care if it fails\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let _ = env_logger::try_init();\n\n #[cfg(target_arch = \"wasm32\")]\n\n let _ = console_log::init_with_level(log::Level::Info);\n\n\n\n let _test_guard = isolation::OneTestPerProcessGuard::new();\n\n\n\n let (adapter, _surface_guard) = initialize_adapter();\n\n\n\n let adapter_info = adapter.get_info();\n\n let adapter_lowercase_name = adapter_info.name.to_lowercase();\n\n let adapter_features = adapter.features();\n\n let adapter_limits = adapter.limits();\n\n let adapter_downlevel_capabilities = adapter.get_downlevel_capabilities();\n\n\n\n let missing_features = parameters.required_features - adapter_features;\n\n if !missing_features.is_empty() {\n\n log::info!(\"TEST SKIPPED: MISSING FEATURES {:?}\", missing_features);\n", "file_path": "wgpu/tests/common/mod.rs", "rank": 10, "score": 212845.08105285728 }, { "content": "pub fn init(unstable: bool) -> Extension {\n\n Extension::builder(env!(\"CARGO_PKG_NAME\"))\n\n .dependencies(vec![\"deno_webidl\", \"deno_web\"])\n\n .js(include_js_files!(\n\n prefix \"deno:deno_webgpu\",\n\n \"01_webgpu.js\",\n\n \"02_idl_types.js\",\n\n ))\n\n .ops(declare_webgpu_ops())\n\n .state(move |state| {\n\n // TODO: check & possibly streamline this\n\n // Unstable might be able to be OpMiddleware\n\n // let unstable_checker = state.borrow::<super::UnstableChecker>();\n\n // let unstable = unstable_checker.unstable;\n\n state.put(Unstable(unstable));\n\n Ok(())\n\n })\n\n .build()\n\n}\n\n\n", "file_path": "deno_webgpu/src/lib.rs", "rank": 11, "score": 204512.93174006545 }, { "content": "// Run some code in an error scope and assert that validation fails.\n\npub fn fail<T>(device: &wgpu::Device, callback: impl FnOnce() -> T) -> T {\n\n device.push_error_scope(wgpu::ErrorFilter::Validation);\n\n let result = callback();\n\n assert!(pollster::block_on(device.pop_error_scope()).is_some());\n\n\n\n result\n\n}\n\n\n", "file_path": "wgpu/tests/common/mod.rs", "rank": 12, "score": 204099.70201135936 }, { "content": "// Run some code in an error scope and assert that validation succeeds.\n\npub fn valid<T>(device: &wgpu::Device, callback: impl FnOnce() -> T) -> T {\n\n device.push_error_scope(wgpu::ErrorFilter::Validation);\n\n let result = callback();\n\n assert!(pollster::block_on(device.pop_error_scope()).is_none());\n\n\n\n result\n\n}\n\n\n", "file_path": "wgpu/tests/common/mod.rs", "rank": 13, "score": 204099.70201135936 }, { "content": "pub fn map_binding_type(ty: wgt::BindingType) -> vk::DescriptorType {\n\n match ty {\n\n wgt::BindingType::Buffer {\n\n ty,\n\n has_dynamic_offset,\n\n ..\n\n } => match ty {\n\n wgt::BufferBindingType::Storage { .. } => match has_dynamic_offset {\n\n true => vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,\n\n false => vk::DescriptorType::STORAGE_BUFFER,\n\n },\n\n wgt::BufferBindingType::Uniform => match has_dynamic_offset {\n\n true => vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,\n\n false => vk::DescriptorType::UNIFORM_BUFFER,\n\n },\n\n },\n\n wgt::BindingType::Sampler { .. } => vk::DescriptorType::SAMPLER,\n\n wgt::BindingType::Texture { .. } => vk::DescriptorType::SAMPLED_IMAGE,\n\n wgt::BindingType::StorageTexture { .. } => vk::DescriptorType::STORAGE_IMAGE,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 14, "score": 198266.13980044017 }, { "content": "pub fn map_binding_type(ty: &wgt::BindingType) -> native::DescriptorRangeType {\n\n use wgt::BindingType as Bt;\n\n match *ty {\n\n Bt::Sampler { .. } => native::DescriptorRangeType::Sampler,\n\n Bt::Buffer {\n\n ty: wgt::BufferBindingType::Uniform,\n\n ..\n\n } => native::DescriptorRangeType::CBV,\n\n Bt::Buffer {\n\n ty: wgt::BufferBindingType::Storage { read_only: true },\n\n ..\n\n }\n\n | Bt::Texture { .. } => native::DescriptorRangeType::SRV,\n\n Bt::Buffer {\n\n ty: wgt::BufferBindingType::Storage { read_only: false },\n\n ..\n\n }\n\n | Bt::StorageTexture { .. } => native::DescriptorRangeType::UAV,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 15, "score": 196005.19429515343 }, { "content": "#[test]\n\nfn texture_format_serialize() {\n\n assert_eq!(\n\n serde_json::to_string(&TextureFormat::R8Unorm).unwrap(),\n\n \"\\\"r8unorm\\\"\".to_string()\n\n );\n\n assert_eq!(\n\n serde_json::to_string(&TextureFormat::R8Snorm).unwrap(),\n\n \"\\\"r8snorm\\\"\".to_string()\n\n );\n\n assert_eq!(\n\n serde_json::to_string(&TextureFormat::R8Uint).unwrap(),\n\n \"\\\"r8uint\\\"\".to_string()\n\n );\n\n assert_eq!(\n\n serde_json::to_string(&TextureFormat::R8Sint).unwrap(),\n\n \"\\\"r8sint\\\"\".to_string()\n\n );\n\n assert_eq!(\n\n serde_json::to_string(&TextureFormat::R16Uint).unwrap(),\n\n \"\\\"r16uint\\\"\".to_string()\n", "file_path": "wgpu-types/src/lib.rs", "rank": 16, "score": 194507.23363377363 }, { "content": "#[test]\n\nfn texture_format_deserialize() {\n\n assert_eq!(\n\n serde_json::from_str::<TextureFormat>(\"\\\"r8unorm\\\"\").unwrap(),\n\n TextureFormat::R8Unorm\n\n );\n\n assert_eq!(\n\n serde_json::from_str::<TextureFormat>(\"\\\"r8snorm\\\"\").unwrap(),\n\n TextureFormat::R8Snorm\n\n );\n\n assert_eq!(\n\n serde_json::from_str::<TextureFormat>(\"\\\"r8uint\\\"\").unwrap(),\n\n TextureFormat::R8Uint\n\n );\n\n assert_eq!(\n\n serde_json::from_str::<TextureFormat>(\"\\\"r8sint\\\"\").unwrap(),\n\n TextureFormat::R8Sint\n\n );\n\n assert_eq!(\n\n serde_json::from_str::<TextureFormat>(\"\\\"r16uint\\\"\").unwrap(),\n\n TextureFormat::R16Uint\n", "file_path": "wgpu-types/src/lib.rs", "rank": 17, "score": 194507.23363377363 }, { "content": "#[test]\n\nfn test_max_mips() {\n\n // 1D\n\n assert_eq!(\n\n Extent3d {\n\n width: 240,\n\n height: 1,\n\n depth_or_array_layers: 1\n\n }\n\n .max_mips(TextureDimension::D1),\n\n 1\n\n );\n\n // 2D\n\n assert_eq!(\n\n Extent3d {\n\n width: 1,\n\n height: 1,\n\n depth_or_array_layers: 1\n\n }\n\n .max_mips(TextureDimension::D2),\n\n 1\n", "file_path": "wgpu-types/src/lib.rs", "rank": 18, "score": 194507.23363377363 }, { "content": "#[test]\n\nfn test_physical_size() {\n\n let format = TextureFormat::Bc1RgbaUnormSrgb; // 4x4 blocks\n\n assert_eq!(\n\n Extent3d {\n\n width: 7,\n\n height: 7,\n\n depth_or_array_layers: 1\n\n }\n\n .physical_size(format),\n\n Extent3d {\n\n width: 8,\n\n height: 8,\n\n depth_or_array_layers: 1\n\n }\n\n );\n\n // Doesn't change, already aligned\n\n assert_eq!(\n\n Extent3d {\n\n width: 8,\n\n height: 8,\n", "file_path": "wgpu-types/src/lib.rs", "rank": 19, "score": 194507.23363377363 }, { "content": "/// Type for the callback of uncaptured error handler\n\npub trait UncapturedErrorHandler: Fn(Error) + Send + 'static {}\n\nimpl<T> UncapturedErrorHandler for T where T: Fn(Error) + Send + 'static {}\n\n\n\n/// Error type\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Out of memory error\n\n OutOfMemory {\n\n /// Lower level source of the error.\n\n source: Box<dyn error::Error + Send + 'static>,\n\n },\n\n /// Validation error, signifying a bug in code or data\n\n Validation {\n\n /// Lower level source of the error.\n\n source: Box<dyn error::Error + Send + 'static>,\n\n /// Description of the validation error.\n\n description: String,\n\n },\n\n}\n\nstatic_assertions::assert_impl_all!(Error: Send);\n", "file_path": "wgpu/src/lib.rs", "rank": 20, "score": 194005.33790107153 }, { "content": "// Run some code in an error scope and assert that validation succeeds or fails depending on the\n\n// provided `should_fail` boolean.\n\npub fn fail_if<T>(device: &wgpu::Device, should_fail: bool, callback: impl FnOnce() -> T) -> T {\n\n if should_fail {\n\n fail(device, callback)\n\n } else {\n\n valid(device, callback)\n\n }\n\n}\n", "file_path": "wgpu/tests/common/mod.rs", "rank": 21, "score": 193549.97112988273 }, { "content": "pub fn enumerate_adapters(factory: native::DxgiFactory) -> Vec<native::DxgiAdapter> {\n\n let mut adapters = Vec::with_capacity(8);\n\n\n\n for cur_index in 0.. {\n\n if let Some(factory6) = factory.as_factory6() {\n\n profiling::scope!(\"IDXGIFactory6::EnumAdapterByGpuPreference\");\n\n // We're already at dxgi1.6, we can grab IDXGIAdapater4 directly\n\n let mut adapter4 = native::WeakPtr::<dxgi1_6::IDXGIAdapter4>::null();\n\n let hr = unsafe {\n\n factory6.EnumAdapterByGpuPreference(\n\n cur_index,\n\n dxgi1_6::DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE,\n\n &dxgi1_6::IDXGIAdapter4::uuidof(),\n\n adapter4.mut_void(),\n\n )\n\n };\n\n\n\n if hr == winerror::DXGI_ERROR_NOT_FOUND {\n\n break;\n\n }\n", "file_path": "wgpu-hal/src/auxil/dxgi/factory.rs", "rank": 22, "score": 193381.95959679302 }, { "content": "#[cfg(feature = \"serde\")]\n\nfn default_depth() -> u32 {\n\n 1\n\n}\n\n\n\nimpl Default for Extent3d {\n\n fn default() -> Self {\n\n Self {\n\n width: 1,\n\n height: 1,\n\n depth_or_array_layers: 1,\n\n }\n\n }\n\n}\n\n\n\nimpl Extent3d {\n\n /// Calculates the [physical size] backing a texture of the given\n\n /// format and extent. This includes padding to the block width\n\n /// and height of the format.\n\n ///\n\n /// This is the texture extent that you must upload at when uploading to _mipmaps_ of compressed textures.\n", "file_path": "wgpu-types/src/lib.rs", "rank": 23, "score": 190477.79986169277 }, { "content": "pub fn map_vertex_format(format: wgt::VertexFormat) -> dxgiformat::DXGI_FORMAT {\n\n use wgt::VertexFormat as Vf;\n\n use winapi::shared::dxgiformat::*;\n\n\n\n match format {\n\n Vf::Unorm8x2 => DXGI_FORMAT_R8G8_UNORM,\n\n Vf::Snorm8x2 => DXGI_FORMAT_R8G8_SNORM,\n\n Vf::Uint8x2 => DXGI_FORMAT_R8G8_UINT,\n\n Vf::Sint8x2 => DXGI_FORMAT_R8G8_SINT,\n\n Vf::Unorm8x4 => DXGI_FORMAT_R8G8B8A8_UNORM,\n\n Vf::Snorm8x4 => DXGI_FORMAT_R8G8B8A8_SNORM,\n\n Vf::Uint8x4 => DXGI_FORMAT_R8G8B8A8_UINT,\n\n Vf::Sint8x4 => DXGI_FORMAT_R8G8B8A8_SINT,\n\n Vf::Unorm16x2 => DXGI_FORMAT_R16G16_UNORM,\n\n Vf::Snorm16x2 => DXGI_FORMAT_R16G16_SNORM,\n\n Vf::Uint16x2 => DXGI_FORMAT_R16G16_UINT,\n\n Vf::Sint16x2 => DXGI_FORMAT_R16G16_SINT,\n\n Vf::Float16x2 => DXGI_FORMAT_R16G16_FLOAT,\n\n Vf::Unorm16x4 => DXGI_FORMAT_R16G16B16A16_UNORM,\n\n Vf::Snorm16x4 => DXGI_FORMAT_R16G16B16A16_SNORM,\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 24, "score": 189173.2756909353 }, { "content": "pub fn map_index_format(format: wgt::IndexFormat) -> dxgiformat::DXGI_FORMAT {\n\n match format {\n\n wgt::IndexFormat::Uint16 => dxgiformat::DXGI_FORMAT_R16_UINT,\n\n wgt::IndexFormat::Uint32 => dxgiformat::DXGI_FORMAT_R32_UINT,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 25, "score": 189173.2756909353 }, { "content": "pub fn map_texture_format(format: wgt::TextureFormat) -> dxgiformat::DXGI_FORMAT {\n\n match map_texture_format_failable(format) {\n\n Some(f) => f,\n\n None => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 26, "score": 189173.2756909353 }, { "content": "// Note: SRV and UAV can't use the depth formats directly\n\npub fn map_texture_format_nodepth(format: wgt::TextureFormat) -> dxgiformat::DXGI_FORMAT {\n\n match format {\n\n wgt::TextureFormat::Depth16Unorm => dxgiformat::DXGI_FORMAT_R16_UNORM,\n\n wgt::TextureFormat::Depth32Float => dxgiformat::DXGI_FORMAT_R32_FLOAT,\n\n wgt::TextureFormat::Depth32FloatStencil8 => {\n\n dxgiformat::DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS\n\n }\n\n wgt::TextureFormat::Stencil8\n\n | wgt::TextureFormat::Depth24Plus\n\n | wgt::TextureFormat::Depth24PlusStencil8 => dxgiformat::DXGI_FORMAT_R24_UNORM_X8_TYPELESS,\n\n _ => {\n\n assert_eq!(\n\n crate::FormatAspects::from(format),\n\n crate::FormatAspects::COLOR\n\n );\n\n map_texture_format(format)\n\n }\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 27, "score": 186882.73982232754 }, { "content": "// Note: DXGI doesn't allow sRGB format on the swapchain,\n\n// but creating RTV of swapchain buffers with sRGB works.\n\npub fn map_texture_format_nosrgb(format: wgt::TextureFormat) -> dxgiformat::DXGI_FORMAT {\n\n match format {\n\n wgt::TextureFormat::Bgra8UnormSrgb => dxgiformat::DXGI_FORMAT_B8G8R8A8_UNORM,\n\n wgt::TextureFormat::Rgba8UnormSrgb => dxgiformat::DXGI_FORMAT_R8G8B8A8_UNORM,\n\n _ => map_texture_format(format),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 28, "score": 186882.69961062234 }, { "content": "pub fn map_texture_format_depth_typeless(format: wgt::TextureFormat) -> dxgiformat::DXGI_FORMAT {\n\n match format {\n\n wgt::TextureFormat::Depth16Unorm => dxgiformat::DXGI_FORMAT_R16_TYPELESS,\n\n wgt::TextureFormat::Depth32Float => dxgiformat::DXGI_FORMAT_R32_TYPELESS,\n\n wgt::TextureFormat::Depth32FloatStencil8 => dxgiformat::DXGI_FORMAT_R32G8X24_TYPELESS,\n\n wgt::TextureFormat::Stencil8 => dxgiformat::DXGI_FORMAT_R24G8_TYPELESS,\n\n wgt::TextureFormat::Depth24Plus | wgt::TextureFormat::Depth24PlusStencil8 => {\n\n dxgiformat::DXGI_FORMAT_R24G8_TYPELESS\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 29, "score": 184657.17984365448 }, { "content": "pub fn map_texture_format_failable(format: wgt::TextureFormat) -> Option<dxgiformat::DXGI_FORMAT> {\n\n use wgt::TextureFormat as Tf;\n\n use winapi::shared::dxgiformat::*;\n\n\n\n Some(match format {\n\n Tf::R8Unorm => DXGI_FORMAT_R8_UNORM,\n\n Tf::R8Snorm => DXGI_FORMAT_R8_SNORM,\n\n Tf::R8Uint => DXGI_FORMAT_R8_UINT,\n\n Tf::R8Sint => DXGI_FORMAT_R8_SINT,\n\n Tf::R16Uint => DXGI_FORMAT_R16_UINT,\n\n Tf::R16Sint => DXGI_FORMAT_R16_SINT,\n\n Tf::R16Unorm => DXGI_FORMAT_R16_UNORM,\n\n Tf::R16Snorm => DXGI_FORMAT_R16_SNORM,\n\n Tf::R16Float => DXGI_FORMAT_R16_FLOAT,\n\n Tf::Rg8Unorm => DXGI_FORMAT_R8G8_UNORM,\n\n Tf::Rg8Snorm => DXGI_FORMAT_R8G8_SNORM,\n\n Tf::Rg8Uint => DXGI_FORMAT_R8G8_UINT,\n\n Tf::Rg8Sint => DXGI_FORMAT_R8G8_SINT,\n\n Tf::Rg16Unorm => DXGI_FORMAT_R16G16_UNORM,\n\n Tf::Rg16Snorm => DXGI_FORMAT_R16G16_SNORM,\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 30, "score": 181552.09509159072 }, { "content": "fn create_struct_layout_tests(storage_type: InputStorageType) -> Vec<ShaderTest> {\n\n let input_values: Vec<_> = (0..(MAX_BUFFER_SIZE as u32 / 4)).collect();\n\n\n\n let mut tests = Vec::new();\n\n\n\n // Vector tests\n\n for components in [2, 3, 4] {\n\n for ty in [\"f32\", \"u32\", \"i32\"] {\n\n let input_members = format!(\"member: vec{components}<{ty}>,\");\n\n // There's 2 possible ways to load a component of a vector:\n\n // - Do `input.member.x` (direct)\n\n // - Store `input.member` in a variable; do `var.x` (loaded)\n\n let mut direct = String::new();\n\n let mut loaded = String::from(\"let loaded = input.member;\");\n\n let component_accessors = [\"x\", \"y\", \"z\", \"w\"]\n\n .into_iter()\n\n .take(components)\n\n .enumerate();\n\n for (idx, component) in component_accessors {\n\n writeln!(\n", "file_path": "wgpu/tests/shader/struct_layout.rs", "rank": 31, "score": 181492.21576015043 }, { "content": "pub fn map_buffer_usage(usage: wgt::BufferUsages) -> hal::BufferUses {\n\n let mut u = hal::BufferUses::empty();\n\n u.set(\n\n hal::BufferUses::MAP_READ,\n\n usage.contains(wgt::BufferUsages::MAP_READ),\n\n );\n\n u.set(\n\n hal::BufferUses::MAP_WRITE,\n\n usage.contains(wgt::BufferUsages::MAP_WRITE),\n\n );\n\n u.set(\n\n hal::BufferUses::COPY_SRC,\n\n usage.contains(wgt::BufferUsages::COPY_SRC),\n\n );\n\n u.set(\n\n hal::BufferUses::COPY_DST,\n\n usage.contains(wgt::BufferUsages::COPY_DST),\n\n );\n\n u.set(\n\n hal::BufferUses::INDEX,\n", "file_path": "wgpu-core/src/conv.rs", "rank": 32, "score": 179792.94735730012 }, { "content": "pub fn is_valid_external_image_copy_dst_texture_format(format: wgt::TextureFormat) -> bool {\n\n use wgt::TextureFormat as Tf;\n\n match format {\n\n Tf::R8Unorm\n\n | Tf::R16Float\n\n | Tf::R32Float\n\n | Tf::Rg8Unorm\n\n | Tf::Rg16Float\n\n | Tf::Rg32Float\n\n | Tf::Rgba8Unorm\n\n | Tf::Rgba8UnormSrgb\n\n | Tf::Bgra8Unorm\n\n | Tf::Bgra8UnormSrgb\n\n | Tf::Rgb10a2Unorm\n\n | Tf::Rgba16Float\n\n | Tf::Rgba32Float => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "wgpu-core/src/conv.rs", "rank": 33, "score": 178302.46887798246 }, { "content": "pub fn format_pretty_any(\n\n writer: &mut dyn fmt::Write,\n\n global: &Global<IdentityManagerFactory>,\n\n error: &(dyn Error + 'static),\n\n) {\n\n let mut fmt = ErrorFormatter { writer, global };\n\n\n\n if let Some(pretty_err) = error.downcast_ref::<ContextError>() {\n\n return pretty_err.fmt_pretty(&mut fmt);\n\n }\n\n\n\n if let Some(pretty_err) = error.downcast_ref::<crate::command::RenderCommandError>() {\n\n return pretty_err.fmt_pretty(&mut fmt);\n\n }\n\n if let Some(pretty_err) = error.downcast_ref::<crate::binding_model::CreateBindGroupError>() {\n\n return pretty_err.fmt_pretty(&mut fmt);\n\n }\n\n if let Some(pretty_err) =\n\n error.downcast_ref::<crate::binding_model::CreatePipelineLayoutError>()\n\n {\n", "file_path": "wgpu-core/src/error.rs", "rank": 34, "score": 177621.45873621455 }, { "content": "pub fn map_texture_dimension(dim: wgt::TextureDimension) -> vk::ImageType {\n\n match dim {\n\n wgt::TextureDimension::D1 => vk::ImageType::TYPE_1D,\n\n wgt::TextureDimension::D2 => vk::ImageType::TYPE_2D,\n\n wgt::TextureDimension::D3 => vk::ImageType::TYPE_3D,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 35, "score": 176883.7341686388 }, { "content": "/// Return true if the fragment `format` is covered by the provided `output`.\n\npub fn check_texture_format(\n\n format: wgt::TextureFormat,\n\n output: &NumericType,\n\n) -> Result<(), NumericType> {\n\n let nt = NumericType::from_texture_format(format);\n\n if nt.is_subtype_of(output) {\n\n Ok(())\n\n } else {\n\n Err(nt)\n\n }\n\n}\n\n\n\npub type StageIo = FastHashMap<wgt::ShaderLocation, InterfaceVar>;\n\n\n\nimpl Interface {\n\n fn populate(\n\n list: &mut Vec<Varying>,\n\n binding: Option<&naga::Binding>,\n\n ty: naga::Handle<naga::Type>,\n\n arena: &naga::UniqueArena<naga::Type>,\n", "file_path": "wgpu-core/src/validation.rs", "rank": 36, "score": 175211.43934310376 }, { "content": "/// Checks that the given buffer usage contains the required buffer usage,\n\n/// returns an error otherwise.\n\npub fn check_buffer_usage(\n\n actual: wgt::BufferUsages,\n\n expected: wgt::BufferUsages,\n\n) -> Result<(), MissingBufferUsageError> {\n\n if !actual.contains(expected) {\n\n Err(MissingBufferUsageError { actual, expected })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Error)]\n\n#[error(\"texture usage is {actual:?} which does not contain required usage {expected:?}\")]\n\npub struct MissingTextureUsageError {\n\n pub(crate) actual: wgt::TextureUsages,\n\n pub(crate) expected: wgt::TextureUsages,\n\n}\n\n\n", "file_path": "wgpu-core/src/validation.rs", "rank": 37, "score": 175211.43934310376 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn initialize_adapter_from_env(\n\n _instance: &Instance,\n\n _backend_bits: Backends,\n\n) -> Option<Adapter> {\n\n None\n\n}\n\n\n\n/// Initialize the adapter obeying the WGPU_ADAPTER_NAME environment variable and if it doesn't exist fall back on a default adapter.\n\npub async fn initialize_adapter_from_env_or_default(\n\n instance: &Instance,\n\n backend_bits: wgt::Backends,\n\n compatible_surface: Option<&Surface>,\n\n) -> Option<Adapter> {\n\n match initialize_adapter_from_env(instance, backend_bits) {\n\n Some(a) => Some(a),\n\n None => {\n\n instance\n\n .request_adapter(&RequestAdapterOptions {\n\n power_preference: power_preference_from_env().unwrap_or_default(),\n\n force_fallback_adapter: false,\n\n compatible_surface,\n\n })\n\n .await\n\n }\n\n }\n\n}\n\n\n", "file_path": "wgpu/src/util/init.rs", "rank": 38, "score": 175211.43934310376 }, { "content": "pub fn map_topology(\n\n topology: wgt::PrimitiveTopology,\n\n) -> (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE,\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY,\n\n) {\n\n match topology {\n\n wgt::PrimitiveTopology::PointList => (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE_POINT,\n\n d3dcommon::D3D_PRIMITIVE_TOPOLOGY_POINTLIST,\n\n ),\n\n wgt::PrimitiveTopology::LineList => (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE_LINE,\n\n d3dcommon::D3D_PRIMITIVE_TOPOLOGY_LINELIST,\n\n ),\n\n wgt::PrimitiveTopology::LineStrip => (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE_LINE,\n\n d3dcommon::D3D_PRIMITIVE_TOPOLOGY_LINESTRIP,\n\n ),\n\n wgt::PrimitiveTopology::TriangleList => (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,\n\n d3dcommon::D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,\n\n ),\n\n wgt::PrimitiveTopology::TriangleStrip => (\n\n d3d12::D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,\n\n d3dcommon::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 39, "score": 175211.43934310376 }, { "content": "pub fn compare_image_output(\n\n path: impl AsRef<Path> + AsRef<OsStr>,\n\n width: u32,\n\n height: u32,\n\n data: &[u8],\n\n tolerance: u8,\n\n max_outliers: usize,\n\n) {\n\n let comparison_data = read_png(&path, width, height);\n\n\n\n if let Some(cmp) = comparison_data {\n\n assert_eq!(cmp.len(), data.len());\n\n\n\n let difference_data: Vec<_> = cmp\n\n .chunks_exact(4)\n\n .zip(data.chunks_exact(4))\n\n .flat_map(|(cmp_chunk, data_chunk)| {\n\n [\n\n calc_difference(cmp_chunk[0], data_chunk[0]),\n\n calc_difference(cmp_chunk[1], data_chunk[1]),\n", "file_path": "wgpu/tests/common/image.rs", "rank": 40, "score": 175211.43934310376 }, { "content": "pub fn map_texture_usage(\n\n usage: wgt::TextureUsages,\n\n aspect: hal::FormatAspects,\n\n) -> hal::TextureUses {\n\n let mut u = hal::TextureUses::empty();\n\n u.set(\n\n hal::TextureUses::COPY_SRC,\n\n usage.contains(wgt::TextureUsages::COPY_SRC),\n\n );\n\n u.set(\n\n hal::TextureUses::COPY_DST,\n\n usage.contains(wgt::TextureUsages::COPY_DST),\n\n );\n\n u.set(\n\n hal::TextureUses::RESOURCE,\n\n usage.contains(wgt::TextureUsages::TEXTURE_BINDING),\n\n );\n\n u.set(\n\n hal::TextureUses::STORAGE_READ | hal::TextureUses::STORAGE_READ_WRITE,\n\n usage.contains(wgt::TextureUsages::STORAGE_BINDING),\n", "file_path": "wgpu-core/src/conv.rs", "rank": 41, "score": 175211.43934310376 }, { "content": "/// Checks that the given texture usage contains the required texture usage,\n\n/// returns an error otherwise.\n\npub fn check_texture_usage(\n\n actual: wgt::TextureUsages,\n\n expected: wgt::TextureUsages,\n\n) -> Result<(), MissingTextureUsageError> {\n\n if !actual.contains(expected) {\n\n Err(MissingTextureUsageError { actual, expected })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Error)]\n\npub enum BindingError {\n\n #[error(\"binding is missing from the pipeline layout\")]\n\n Missing,\n\n #[error(\"visibility flags don't include the shader stage\")]\n\n Invisible,\n\n #[error(\"The shader requires the load/store access flags {required:?} but only {allowed:?} is allowed\")]\n\n WrongUsage {\n\n required: GlobalUse,\n", "file_path": "wgpu-core/src/validation.rs", "rank": 42, "score": 175211.43934310376 }, { "content": "pub fn map_buffer_usage(usage: crate::BufferUses) -> vk::BufferUsageFlags {\n\n let mut flags = vk::BufferUsageFlags::empty();\n\n if usage.contains(crate::BufferUses::COPY_SRC) {\n\n flags |= vk::BufferUsageFlags::TRANSFER_SRC;\n\n }\n\n if usage.contains(crate::BufferUses::COPY_DST) {\n\n flags |= vk::BufferUsageFlags::TRANSFER_DST;\n\n }\n\n if usage.contains(crate::BufferUses::UNIFORM) {\n\n flags |= vk::BufferUsageFlags::UNIFORM_BUFFER;\n\n }\n\n if usage.intersects(crate::BufferUses::STORAGE_READ | crate::BufferUses::STORAGE_READ_WRITE) {\n\n flags |= vk::BufferUsageFlags::STORAGE_BUFFER;\n\n }\n\n if usage.contains(crate::BufferUses::INDEX) {\n\n flags |= vk::BufferUsageFlags::INDEX_BUFFER;\n\n }\n\n if usage.contains(crate::BufferUses::VERTEX) {\n\n flags |= vk::BufferUsageFlags::VERTEX_BUFFER;\n\n }\n\n if usage.contains(crate::BufferUses::INDIRECT) {\n\n flags |= vk::BufferUsageFlags::INDIRECT_BUFFER;\n\n }\n\n flags\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 43, "score": 174771.7169316935 }, { "content": "pub fn map_texture_usage(usage: crate::TextureUses) -> mtl::MTLTextureUsage {\n\n use crate::TextureUses as Tu;\n\n\n\n let mut mtl_usage = mtl::MTLTextureUsage::Unknown;\n\n\n\n mtl_usage.set(\n\n mtl::MTLTextureUsage::RenderTarget,\n\n usage.intersects(Tu::COLOR_TARGET | Tu::DEPTH_STENCIL_READ | Tu::DEPTH_STENCIL_WRITE),\n\n );\n\n mtl_usage.set(\n\n mtl::MTLTextureUsage::ShaderRead,\n\n usage.intersects(\n\n Tu::RESOURCE | Tu::DEPTH_STENCIL_READ | Tu::STORAGE_READ | Tu::STORAGE_READ_WRITE,\n\n ),\n\n );\n\n mtl_usage.set(\n\n mtl::MTLTextureUsage::ShaderWrite,\n\n usage.intersects(Tu::STORAGE_READ_WRITE),\n\n );\n\n\n\n mtl_usage\n\n}\n\n\n", "file_path": "wgpu-hal/src/metal/conv.rs", "rank": 44, "score": 174771.7169316935 }, { "content": "pub fn map_texture_usage(usage: crate::TextureUses) -> vk::ImageUsageFlags {\n\n let mut flags = vk::ImageUsageFlags::empty();\n\n if usage.contains(crate::TextureUses::COPY_SRC) {\n\n flags |= vk::ImageUsageFlags::TRANSFER_SRC;\n\n }\n\n if usage.contains(crate::TextureUses::COPY_DST) {\n\n flags |= vk::ImageUsageFlags::TRANSFER_DST;\n\n }\n\n if usage.contains(crate::TextureUses::RESOURCE) {\n\n flags |= vk::ImageUsageFlags::SAMPLED;\n\n }\n\n if usage.contains(crate::TextureUses::COLOR_TARGET) {\n\n flags |= vk::ImageUsageFlags::COLOR_ATTACHMENT;\n\n }\n\n if usage.intersects(\n\n crate::TextureUses::DEPTH_STENCIL_READ | crate::TextureUses::DEPTH_STENCIL_WRITE,\n\n ) {\n\n flags |= vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT;\n\n }\n\n if usage.intersects(crate::TextureUses::STORAGE_READ | crate::TextureUses::STORAGE_READ_WRITE) {\n\n flags |= vk::ImageUsageFlags::STORAGE;\n\n }\n\n flags\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 45, "score": 174771.7169316935 }, { "content": "pub fn map_index_format(index_format: wgt::IndexFormat) -> vk::IndexType {\n\n match index_format {\n\n wgt::IndexFormat::Uint16 => vk::IndexType::UINT16,\n\n wgt::IndexFormat::Uint32 => vk::IndexType::UINT32,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 46, "score": 174429.43853217352 }, { "content": "pub fn map_blend_component(\n\n component: &wgt::BlendComponent,\n\n) -> (\n\n mtl::MTLBlendOperation,\n\n mtl::MTLBlendFactor,\n\n mtl::MTLBlendFactor,\n\n) {\n\n (\n\n map_blend_op(component.operation),\n\n map_blend_factor(component.src_factor),\n\n map_blend_factor(component.dst_factor),\n\n )\n\n}\n\n\n", "file_path": "wgpu-hal/src/metal/conv.rs", "rank": 47, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_create_sampler(\n\n state: &mut OpState,\n\n args: CreateSamplerArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let descriptor = wgpu_core::resource::SamplerDescriptor {\n\n label: args.label.map(Cow::from),\n\n address_modes: [\n\n args.address_mode_u,\n\n args.address_mode_v,\n\n args.address_mode_w,\n\n ],\n\n mag_filter: args.mag_filter,\n\n min_filter: args.min_filter,\n\n mipmap_filter: args.mipmap_filter,\n", "file_path": "deno_webgpu/src/sampler.rs", "rank": 48, "score": 172911.830873099 }, { "content": "pub fn map_subresource_layers(\n\n base: &crate::TextureCopyBase,\n\n texture_aspect: crate::FormatAspects,\n\n) -> (vk::ImageSubresourceLayers, vk::Offset3D) {\n\n let offset = vk::Offset3D {\n\n x: base.origin.x as i32,\n\n y: base.origin.y as i32,\n\n z: base.origin.z as i32,\n\n };\n\n let subresource = vk::ImageSubresourceLayers {\n\n aspect_mask: map_aspects(base.aspect & texture_aspect),\n\n mip_level: base.mip_level,\n\n base_array_layer: base.array_layer,\n\n layer_count: 1,\n\n };\n\n (subresource, offset)\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 49, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_write_buffer(\n\n state: &mut OpState,\n\n queue_rid: ResourceId,\n\n buffer: ResourceId,\n\n buffer_offset: u64,\n\n data_offset: usize,\n\n size: Option<usize>,\n\n buf: ZeroCopyBuf,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let buffer_resource = state\n\n .resource_table\n\n .get::<super::buffer::WebGpuBuffer>(buffer)?;\n\n let buffer = buffer_resource.0;\n\n let queue_resource = state.resource_table.get::<WebGpuQueue>(queue_rid)?;\n\n let queue = queue_resource.0;\n\n\n\n let data = match size {\n\n Some(size) => &buf[data_offset..(data_offset + size)],\n\n None => &buf[data_offset..],\n", "file_path": "deno_webgpu/src/queue.rs", "rank": 50, "score": 172911.830873099 }, { "content": "pub fn map_render_targets(\n\n color_targets: &[Option<wgt::ColorTargetState>],\n\n) -> [d3d12::D3D12_RENDER_TARGET_BLEND_DESC; d3d12::D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT as usize]\n\n{\n\n let dummy_target = d3d12::D3D12_RENDER_TARGET_BLEND_DESC {\n\n BlendEnable: 0,\n\n LogicOpEnable: 0,\n\n SrcBlend: d3d12::D3D12_BLEND_ZERO,\n\n DestBlend: d3d12::D3D12_BLEND_ZERO,\n\n BlendOp: d3d12::D3D12_BLEND_OP_ADD,\n\n SrcBlendAlpha: d3d12::D3D12_BLEND_ZERO,\n\n DestBlendAlpha: d3d12::D3D12_BLEND_ZERO,\n\n BlendOpAlpha: d3d12::D3D12_BLEND_OP_ADD,\n\n LogicOp: d3d12::D3D12_LOGIC_OP_CLEAR,\n\n RenderTargetWriteMask: 0,\n\n };\n\n let mut raw_targets = [dummy_target; d3d12::D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT as usize];\n\n\n\n for (raw, ct) in raw_targets.iter_mut().zip(color_targets.iter()) {\n\n if let Some(ct) = ct.as_ref() {\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 51, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_buffer_unmap(\n\n state: &mut OpState,\n\n buffer_rid: ResourceId,\n\n mapped_rid: ResourceId,\n\n buf: Option<ZeroCopyBuf>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let mapped_resource = state\n\n .resource_table\n\n .take::<WebGpuBufferMapped>(mapped_rid)?;\n\n let instance = state.borrow::<super::Instance>();\n\n let buffer_resource = state.resource_table.get::<WebGpuBuffer>(buffer_rid)?;\n\n let buffer = buffer_resource.0;\n\n\n\n if let Some(buf) = buf {\n\n let slice = unsafe { std::slice::from_raw_parts_mut(mapped_resource.0, mapped_resource.1) };\n\n slice.copy_from_slice(&buf);\n\n }\n\n\n\n gfx_ok!(buffer => instance.buffer_unmap(buffer))\n\n}\n", "file_path": "deno_webgpu/src/buffer.rs", "rank": 52, "score": 172911.830873099 }, { "content": "pub fn derive_image_layout(\n\n usage: crate::TextureUses,\n\n aspects: crate::FormatAspects,\n\n) -> vk::ImageLayout {\n\n //Note: depth textures are always sampled with RODS layout\n\n let is_color = aspects.contains(crate::FormatAspects::COLOR);\n\n match usage {\n\n crate::TextureUses::UNINITIALIZED => vk::ImageLayout::UNDEFINED,\n\n crate::TextureUses::COPY_SRC => vk::ImageLayout::TRANSFER_SRC_OPTIMAL,\n\n crate::TextureUses::COPY_DST => vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n\n crate::TextureUses::RESOURCE if is_color => vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n crate::TextureUses::COLOR_TARGET => vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n\n crate::TextureUses::DEPTH_STENCIL_WRITE => {\n\n vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL\n\n }\n\n _ => {\n\n if usage == crate::TextureUses::PRESENT {\n\n vk::ImageLayout::PRESENT_SRC_KHR\n\n } else if is_color {\n\n vk::ImageLayout::GENERAL\n\n } else {\n\n vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 53, "score": 172911.830873099 }, { "content": "pub fn map_stencil_face(\n\n face: &wgt::StencilFaceState,\n\n compare_mask: u32,\n\n write_mask: u32,\n\n) -> vk::StencilOpState {\n\n vk::StencilOpState {\n\n fail_op: map_stencil_op(face.fail_op),\n\n pass_op: map_stencil_op(face.pass_op),\n\n depth_fail_op: map_stencil_op(face.depth_fail_op),\n\n compare_op: map_comparison(face.compare),\n\n compare_mask,\n\n write_mask,\n\n reference: 0,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 54, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_write_texture(\n\n state: &mut OpState,\n\n queue_rid: ResourceId,\n\n destination: super::command_encoder::GpuImageCopyTexture,\n\n data_layout: GpuImageDataLayout,\n\n size: wgpu_types::Extent3d,\n\n buf: ZeroCopyBuf,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let texture_resource = state\n\n .resource_table\n\n .get::<super::texture::WebGpuTexture>(destination.texture)?;\n\n let queue_resource = state.resource_table.get::<WebGpuQueue>(queue_rid)?;\n\n let queue = queue_resource.0;\n\n\n\n let destination = wgpu_core::command::ImageCopyTexture {\n\n texture: texture_resource.0,\n\n mip_level: destination.mip_level,\n\n origin: destination.origin,\n\n aspect: destination.aspect,\n", "file_path": "deno_webgpu/src/queue.rs", "rank": 55, "score": 172911.830873099 }, { "content": "pub fn map_filter_modes(\n\n min: wgt::FilterMode,\n\n mag: wgt::FilterMode,\n\n mip: wgt::FilterMode,\n\n) -> (u32, u32) {\n\n use wgt::FilterMode as Fm;\n\n\n\n let mag_filter = match mag {\n\n Fm::Nearest => glow::NEAREST,\n\n Fm::Linear => glow::LINEAR,\n\n };\n\n\n\n let min_filter = match (min, mip) {\n\n (Fm::Nearest, Fm::Nearest) => glow::NEAREST_MIPMAP_NEAREST,\n\n (Fm::Nearest, Fm::Linear) => glow::NEAREST_MIPMAP_LINEAR,\n\n (Fm::Linear, Fm::Nearest) => glow::LINEAR_MIPMAP_NEAREST,\n\n (Fm::Linear, Fm::Linear) => glow::LINEAR_MIPMAP_LINEAR,\n\n };\n\n\n\n (min_filter, mag_filter)\n\n}\n\n\n", "file_path": "wgpu-hal/src/gles/conv.rs", "rank": 56, "score": 172911.830873099 }, { "content": "pub fn map_subresource_range(\n\n range: &wgt::ImageSubresourceRange,\n\n texture_aspect: crate::FormatAspects,\n\n) -> vk::ImageSubresourceRange {\n\n vk::ImageSubresourceRange {\n\n aspect_mask: map_aspects(crate::FormatAspects::from(range.aspect) & texture_aspect),\n\n base_mip_level: range.base_mip_level,\n\n level_count: range\n\n .mip_level_count\n\n .map_or(vk::REMAINING_MIP_LEVELS, NonZeroU32::get),\n\n base_array_layer: range.base_array_layer,\n\n layer_count: range\n\n .array_layer_count\n\n .map_or(vk::REMAINING_ARRAY_LAYERS, NonZeroU32::get),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 57, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_surface_present(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n surface_rid: ResourceId,\n\n) -> Result<(), AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n let surface_resource = state.resource_table.get::<WebGpuSurface>(surface_rid)?;\n\n let surface = surface_resource.0;\n\n\n\n let _ = gfx_select!(device => instance.surface_present(surface))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "deno_webgpu/src/surface.rs", "rank": 58, "score": 172911.830873099 }, { "content": "pub fn map_primitive_topology(\n\n topology: wgt::PrimitiveTopology,\n\n) -> (mtl::MTLPrimitiveTopologyClass, mtl::MTLPrimitiveType) {\n\n use wgt::PrimitiveTopology as Pt;\n\n match topology {\n\n Pt::PointList => (\n\n mtl::MTLPrimitiveTopologyClass::Point,\n\n mtl::MTLPrimitiveType::Point,\n\n ),\n\n Pt::LineList => (\n\n mtl::MTLPrimitiveTopologyClass::Line,\n\n mtl::MTLPrimitiveType::Line,\n\n ),\n\n Pt::LineStrip => (\n\n mtl::MTLPrimitiveTopologyClass::Line,\n\n mtl::MTLPrimitiveType::LineStrip,\n\n ),\n\n Pt::TriangleList => (\n\n mtl::MTLPrimitiveTopologyClass::Triangle,\n\n mtl::MTLPrimitiveType::Triangle,\n\n ),\n\n Pt::TriangleStrip => (\n\n mtl::MTLPrimitiveTopologyClass::Triangle,\n\n mtl::MTLPrimitiveType::TriangleStrip,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/metal/conv.rs", "rank": 59, "score": 172911.830873099 }, { "content": "/// Break up possibly overlapping push constant ranges into a set of\n\n/// non-overlapping ranges which contain all the stage flags of the\n\n/// original ranges. This allows us to zero out (or write any value)\n\n/// to every possible value.\n\npub fn compute_nonoverlapping_ranges(\n\n ranges: &[wgt::PushConstantRange],\n\n) -> ArrayVec<wgt::PushConstantRange, { SHADER_STAGE_COUNT * 2 }> {\n\n if ranges.is_empty() {\n\n return ArrayVec::new();\n\n }\n\n debug_assert!(ranges.len() <= SHADER_STAGE_COUNT);\n\n\n\n let mut breaks: ArrayVec<PushConstantChange, { SHADER_STAGE_COUNT * 2 }> = ArrayVec::new();\n\n for range in ranges {\n\n breaks.push(PushConstantChange {\n\n stages: range.stages,\n\n offset: range.range.start,\n\n enable: true,\n\n });\n\n breaks.push(PushConstantChange {\n\n stages: range.stages,\n\n offset: range.range.end,\n\n enable: false,\n\n });\n", "file_path": "wgpu-core/src/command/bind.rs", "rank": 60, "score": 172911.830873099 }, { "content": "pub fn check_texture_dimension_size(\n\n dimension: wgt::TextureDimension,\n\n wgt::Extent3d {\n\n width,\n\n height,\n\n depth_or_array_layers,\n\n }: wgt::Extent3d,\n\n sample_size: u32,\n\n limits: &wgt::Limits,\n\n) -> Result<(), resource::TextureDimensionError> {\n\n use resource::{TextureDimensionError as Tde, TextureErrorDimension as Ted};\n\n use wgt::TextureDimension::*;\n\n\n\n let (extent_limits, sample_limit) = match dimension {\n\n D1 => ([limits.max_texture_dimension_1d, 1, 1], 1),\n\n D2 => (\n\n [\n\n limits.max_texture_dimension_2d,\n\n limits.max_texture_dimension_2d,\n\n limits.max_texture_array_layers,\n", "file_path": "wgpu-core/src/conv.rs", "rank": 61, "score": 172911.830873099 }, { "content": "pub fn map_blend_component(\n\n component: &wgt::BlendComponent,\n\n) -> (vk::BlendOp, vk::BlendFactor, vk::BlendFactor) {\n\n let op = map_blend_op(component.operation);\n\n let src = map_blend_factor(component.src_factor);\n\n let dst = map_blend_factor(component.dst_factor);\n\n (op, src, dst)\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 62, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_queue_submit(\n\n state: &mut OpState,\n\n queue_rid: ResourceId,\n\n command_buffers: Vec<ResourceId>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let queue_resource = state.resource_table.get::<WebGpuQueue>(queue_rid)?;\n\n let queue = queue_resource.0;\n\n\n\n let ids = command_buffers\n\n .iter()\n\n .map(|rid| {\n\n let buffer_resource = state\n\n .resource_table\n\n .get::<super::command_encoder::WebGpuCommandBuffer>(*rid)?;\n\n Ok(buffer_resource.0)\n\n })\n\n .collect::<Result<Vec<_>, AnyError>>()?;\n\n\n\n let maybe_err = gfx_select!(queue => instance.queue_submit(queue, &ids)).err();\n", "file_path": "deno_webgpu/src/queue.rs", "rank": 63, "score": 172911.830873099 }, { "content": "pub fn map_pipeline_statistics(\n\n types: wgt::PipelineStatisticsTypes,\n\n) -> vk::QueryPipelineStatisticFlags {\n\n use wgt::PipelineStatisticsTypes as Pst;\n\n let mut flags = vk::QueryPipelineStatisticFlags::empty();\n\n if types.contains(Pst::VERTEX_SHADER_INVOCATIONS) {\n\n flags |= vk::QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS;\n\n }\n\n if types.contains(Pst::CLIPPER_INVOCATIONS) {\n\n flags |= vk::QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS;\n\n }\n\n if types.contains(Pst::CLIPPER_PRIMITIVES_OUT) {\n\n flags |= vk::QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES;\n\n }\n\n if types.contains(Pst::FRAGMENT_SHADER_INVOCATIONS) {\n\n flags |= vk::QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS;\n\n }\n\n if types.contains(Pst::COMPUTE_SHADER_INVOCATIONS) {\n\n flags |= vk::QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS;\n\n }\n\n flags\n\n}\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 64, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_surface_configure(\n\n state: &mut OpState,\n\n args: SurfaceConfigureArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n let surface_resource = state\n\n .resource_table\n\n .get::<WebGpuSurface>(args.surface_rid)?;\n\n let surface = surface_resource.0;\n\n\n\n let conf = wgpu_types::SurfaceConfiguration::<Vec<wgpu_types::TextureFormat>> {\n\n usage: wgpu_types::TextureUsages::from_bits_truncate(args.usage),\n\n format: args.format,\n\n width: args.width,\n\n height: args.height,\n\n present_mode: args.present_mode.unwrap_or_default(),\n\n alpha_mode: args.alpha_mode,\n\n view_formats: args.view_formats,\n\n };\n\n\n\n let err = gfx_select!(device => instance.surface_configure(surface, device, &conf));\n\n\n\n Ok(WebGpuResult::maybe_err(err))\n\n}\n\n\n", "file_path": "deno_webgpu/src/surface.rs", "rank": 65, "score": 172911.830873099 }, { "content": "pub fn map_attachment_ops(\n\n op: crate::AttachmentOps,\n\n) -> (vk::AttachmentLoadOp, vk::AttachmentStoreOp) {\n\n let load_op = if op.contains(crate::AttachmentOps::LOAD) {\n\n vk::AttachmentLoadOp::LOAD\n\n } else {\n\n vk::AttachmentLoadOp::CLEAR\n\n };\n\n let store_op = if op.contains(crate::AttachmentOps::STORE) {\n\n vk::AttachmentStoreOp::STORE\n\n } else {\n\n vk::AttachmentStoreOp::DONT_CARE\n\n };\n\n (load_op, store_op)\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 66, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_create_buffer(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n size: u64,\n\n usage: u32,\n\n mapped_at_creation: bool,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let descriptor = wgpu_core::resource::BufferDescriptor {\n\n label: label.map(Cow::from),\n\n size,\n\n usage: wgpu_types::BufferUsages::from_bits(usage)\n\n .ok_or_else(|| type_error(\"usage is not valid\"))?,\n\n mapped_at_creation,\n", "file_path": "deno_webgpu/src/buffer.rs", "rank": 67, "score": 172911.830873099 }, { "content": "#[op]\n\npub fn op_webgpu_create_texture(\n\n state: &mut OpState,\n\n args: CreateTextureArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let descriptor = wgpu_core::resource::TextureDescriptor {\n\n label: args.label.map(Cow::from),\n\n size: args.size,\n\n mip_level_count: args.mip_level_count,\n\n sample_count: args.sample_count,\n\n dimension: args.dimension,\n\n format: args.format,\n\n usage: wgpu_types::TextureUsages::from_bits_truncate(args.usage),\n\n view_formats: args.view_formats,\n\n };\n", "file_path": "deno_webgpu/src/texture.rs", "rank": 68, "score": 172911.830873099 }, { "content": "pub fn map_vk_image_usage(usage: vk::ImageUsageFlags) -> crate::TextureUses {\n\n let mut bits = crate::TextureUses::empty();\n\n if usage.contains(vk::ImageUsageFlags::TRANSFER_SRC) {\n\n bits |= crate::TextureUses::COPY_SRC;\n\n }\n\n if usage.contains(vk::ImageUsageFlags::TRANSFER_DST) {\n\n bits |= crate::TextureUses::COPY_DST;\n\n }\n\n if usage.contains(vk::ImageUsageFlags::SAMPLED) {\n\n bits |= crate::TextureUses::RESOURCE;\n\n }\n\n if usage.contains(vk::ImageUsageFlags::COLOR_ATTACHMENT) {\n\n bits |= crate::TextureUses::COLOR_TARGET;\n\n }\n\n if usage.contains(vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT) {\n\n bits |= crate::TextureUses::DEPTH_STENCIL_READ | crate::TextureUses::DEPTH_STENCIL_WRITE;\n\n }\n\n if usage.contains(vk::ImageUsageFlags::STORAGE) {\n\n bits |= crate::TextureUses::STORAGE_READ | crate::TextureUses::STORAGE_READ_WRITE;\n\n }\n\n bits\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 69, "score": 172404.1419039137 }, { "content": "pub fn map_view_dimension(dim: wgt::TextureViewDimension) -> vk::ImageViewType {\n\n match dim {\n\n wgt::TextureViewDimension::D1 => vk::ImageViewType::TYPE_1D,\n\n wgt::TextureViewDimension::D2 => vk::ImageViewType::TYPE_2D,\n\n wgt::TextureViewDimension::D2Array => vk::ImageViewType::TYPE_2D_ARRAY,\n\n wgt::TextureViewDimension::Cube => vk::ImageViewType::CUBE,\n\n wgt::TextureViewDimension::CubeArray => vk::ImageViewType::CUBE_ARRAY,\n\n wgt::TextureViewDimension::D3 => vk::ImageViewType::TYPE_3D,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 70, "score": 172068.36384547182 }, { "content": "pub fn map_filter_mode(mode: wgt::FilterMode) -> d3d12::D3D12_FILTER_TYPE {\n\n match mode {\n\n wgt::FilterMode::Nearest => d3d12::D3D12_FILTER_TYPE_POINT,\n\n wgt::FilterMode::Linear => d3d12::D3D12_FILTER_TYPE_LINEAR,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 71, "score": 172068.36384547182 }, { "content": "pub fn map_buffer_usage_to_barrier(\n\n usage: crate::BufferUses,\n\n) -> (vk::PipelineStageFlags, vk::AccessFlags) {\n\n let mut stages = vk::PipelineStageFlags::empty();\n\n let mut access = vk::AccessFlags::empty();\n\n let shader_stages = vk::PipelineStageFlags::VERTEX_SHADER\n\n | vk::PipelineStageFlags::FRAGMENT_SHADER\n\n | vk::PipelineStageFlags::COMPUTE_SHADER;\n\n\n\n if usage.contains(crate::BufferUses::MAP_READ) {\n\n stages |= vk::PipelineStageFlags::HOST;\n\n access |= vk::AccessFlags::HOST_READ;\n\n }\n\n if usage.contains(crate::BufferUses::MAP_WRITE) {\n\n stages |= vk::PipelineStageFlags::HOST;\n\n access |= vk::AccessFlags::HOST_WRITE;\n\n }\n\n if usage.contains(crate::BufferUses::COPY_SRC) {\n\n stages |= vk::PipelineStageFlags::TRANSFER;\n\n access |= vk::AccessFlags::TRANSFER_READ;\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 72, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_bind_group(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n layout: ResourceId,\n\n entries: Vec<GpuBindGroupEntry>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let entries = entries\n\n .into_iter()\n\n .map(|entry| {\n\n Ok(wgpu_core::binding_model::BindGroupEntry {\n\n binding: entry.binding,\n\n resource: match entry.kind.as_str() {\n\n \"GPUSampler\" => {\n", "file_path": "deno_webgpu/src/binding.rs", "rank": 73, "score": 170715.2158048829 }, { "content": "pub fn map_texture_usage_to_barrier(\n\n usage: crate::TextureUses,\n\n) -> (vk::PipelineStageFlags, vk::AccessFlags) {\n\n let mut stages = vk::PipelineStageFlags::empty();\n\n let mut access = vk::AccessFlags::empty();\n\n let shader_stages = vk::PipelineStageFlags::VERTEX_SHADER\n\n | vk::PipelineStageFlags::FRAGMENT_SHADER\n\n | vk::PipelineStageFlags::COMPUTE_SHADER;\n\n\n\n if usage.contains(crate::TextureUses::COPY_SRC) {\n\n stages |= vk::PipelineStageFlags::TRANSFER;\n\n access |= vk::AccessFlags::TRANSFER_READ;\n\n }\n\n if usage.contains(crate::TextureUses::COPY_DST) {\n\n stages |= vk::PipelineStageFlags::TRANSFER;\n\n access |= vk::AccessFlags::TRANSFER_WRITE;\n\n }\n\n if usage.contains(crate::TextureUses::RESOURCE) {\n\n stages |= shader_stages;\n\n access |= vk::AccessFlags::SHADER_READ;\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 74, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_compute_pipeline(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n layout: GPUPipelineLayoutOrGPUAutoLayoutMode,\n\n compute: GpuProgrammableStage,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let pipeline_layout = match layout {\n\n GPUPipelineLayoutOrGPUAutoLayoutMode::Layout(rid) => {\n\n let id = state.resource_table.get::<WebGpuPipelineLayout>(rid)?;\n\n Some(id.0)\n\n }\n\n GPUPipelineLayoutOrGPUAutoLayoutMode::Auto(GPUAutoLayoutMode::Auto) => None,\n\n };\n", "file_path": "deno_webgpu/src/pipeline.rs", "rank": 75, "score": 170715.2158048829 }, { "content": "pub fn is_valid_copy_src_texture_format(\n\n format: wgt::TextureFormat,\n\n aspect: wgt::TextureAspect,\n\n) -> bool {\n\n use wgt::TextureAspect as Ta;\n\n use wgt::TextureFormat as Tf;\n\n match (format, aspect) {\n\n (Tf::Depth24Plus, _) | (Tf::Depth24PlusStencil8, Ta::DepthOnly) => false,\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "wgpu-core/src/conv.rs", "rank": 76, "score": 170715.2158048829 }, { "content": "pub fn map_extent_to_copy_size(\n\n extent: &wgt::Extent3d,\n\n dim: wgt::TextureDimension,\n\n) -> crate::CopyExtent {\n\n crate::CopyExtent {\n\n width: extent.width,\n\n height: extent.height,\n\n depth: match dim {\n\n wgt::TextureDimension::D1 | wgt::TextureDimension::D2 => 1,\n\n wgt::TextureDimension::D3 => extent.depth_or_array_layers,\n\n },\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/vulkan/conv.rs", "rank": 77, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_pipeline_layout(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n bind_group_layouts: Vec<u32>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let bind_group_layouts = bind_group_layouts\n\n .into_iter()\n\n .map(|rid| {\n\n let bind_group_layout = state.resource_table.get::<WebGpuBindGroupLayout>(rid)?;\n\n Ok(bind_group_layout.0)\n\n })\n\n .collect::<Result<Vec<_>, AnyError>>()?;\n\n\n", "file_path": "deno_webgpu/src/binding.rs", "rank": 78, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_render_pipeline(\n\n state: &mut OpState,\n\n args: CreateRenderPipelineArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let layout = match args.layout {\n\n GPUPipelineLayoutOrGPUAutoLayoutMode::Layout(rid) => {\n\n let pipeline_layout_resource = state.resource_table.get::<WebGpuPipelineLayout>(rid)?;\n\n Some(pipeline_layout_resource.0)\n\n }\n\n GPUPipelineLayoutOrGPUAutoLayoutMode::Auto(GPUAutoLayoutMode::Auto) => None,\n\n };\n\n\n\n let vertex_shader_module_resource = state\n\n .resource_table\n", "file_path": "deno_webgpu/src/pipeline.rs", "rank": 79, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_shader_module(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n code: String,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let source = wgpu_core::pipeline::ShaderModuleSource::Wgsl(Cow::from(code));\n\n\n\n let descriptor = wgpu_core::pipeline::ShaderModuleDescriptor {\n\n label: label.map(Cow::from),\n\n shader_bound_checks: wgpu_types::ShaderBoundChecks::default(),\n\n };\n\n\n\n gfx_put!(device => instance.device_create_shader_module(\n\n device,\n\n &descriptor,\n\n source,\n\n ()\n\n ) => state, WebGpuShaderModule)\n\n}\n", "file_path": "deno_webgpu/src/shader.rs", "rank": 80, "score": 170715.2158048829 }, { "content": "pub fn is_valid_copy_dst_texture_format(\n\n format: wgt::TextureFormat,\n\n aspect: wgt::TextureAspect,\n\n) -> bool {\n\n use wgt::TextureAspect as Ta;\n\n use wgt::TextureFormat as Tf;\n\n match (format, aspect) {\n\n (Tf::Depth24Plus | Tf::Depth32Float, _)\n\n | (Tf::Depth24PlusStencil8 | Tf::Depth32FloatStencil8, Ta::DepthOnly) => false,\n\n _ => true,\n\n }\n\n}\n\n\n\n#[cfg_attr(\n\n any(not(target_arch = \"wasm32\"), feature = \"emscripten\"),\n\n allow(unused)\n\n)]\n", "file_path": "wgpu-core/src/conv.rs", "rank": 81, "score": 170715.2158048829 }, { "content": "#[op]\n\npub fn op_webgpu_create_texture_view(\n\n state: &mut OpState,\n\n args: CreateTextureViewArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let texture_resource = state\n\n .resource_table\n\n .get::<WebGpuTexture>(args.texture_rid)?;\n\n let texture = texture_resource.0;\n\n\n\n let descriptor = wgpu_core::resource::TextureViewDescriptor {\n\n label: args.label.map(Cow::from),\n\n format: args.format,\n\n dimension: args.dimension,\n\n range: wgpu_types::ImageSubresourceRange {\n\n aspect: args.aspect,\n\n base_mip_level: args.base_mip_level,\n\n mip_level_count: std::num::NonZeroU32::new(args.mip_level_count.unwrap_or(0)),\n\n base_array_layer: args.base_array_layer,\n\n array_layer_count: std::num::NonZeroU32::new(args.array_layer_count.unwrap_or(0)),\n\n },\n\n };\n\n\n\n gfx_put!(texture => instance.texture_create_view(\n\n texture,\n\n &descriptor,\n\n ()\n\n ) => state, WebGpuTextureView)\n\n}\n", "file_path": "deno_webgpu/src/texture.rs", "rank": 82, "score": 170715.2158048829 }, { "content": "pub fn map_buffer_usage_to_state(usage: crate::BufferUses) -> d3d12::D3D12_RESOURCE_STATES {\n\n use crate::BufferUses as Bu;\n\n let mut state = d3d12::D3D12_RESOURCE_STATE_COMMON;\n\n\n\n if usage.intersects(Bu::COPY_SRC) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_COPY_SOURCE;\n\n }\n\n if usage.intersects(Bu::COPY_DST) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_COPY_DEST;\n\n }\n\n if usage.intersects(Bu::INDEX) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_INDEX_BUFFER;\n\n }\n\n if usage.intersects(Bu::VERTEX | Bu::UNIFORM) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_VERTEX_AND_CONSTANT_BUFFER;\n\n }\n\n if usage.intersects(Bu::STORAGE_READ_WRITE) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_UNORDERED_ACCESS;\n\n } else if usage.intersects(Bu::STORAGE_READ) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE\n\n | d3d12::D3D12_RESOURCE_STATE_NON_PIXEL_SHADER_RESOURCE;\n\n }\n\n if usage.intersects(Bu::INDIRECT) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_INDIRECT_ARGUMENT;\n\n }\n\n state\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 83, "score": 170124.81792711292 }, { "content": "pub fn map_texture_usage_to_state(usage: crate::TextureUses) -> d3d12::D3D12_RESOURCE_STATES {\n\n use crate::TextureUses as Tu;\n\n let mut state = d3d12::D3D12_RESOURCE_STATE_COMMON;\n\n //Note: `RESOLVE_SOURCE` and `RESOLVE_DEST` are not used here\n\n //Note: `PRESENT` is the same as `COMMON`\n\n if usage == crate::TextureUses::UNINITIALIZED {\n\n return state;\n\n }\n\n\n\n if usage.intersects(Tu::COPY_SRC) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_COPY_SOURCE;\n\n }\n\n if usage.intersects(Tu::COPY_DST) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_COPY_DEST;\n\n }\n\n if usage.intersects(Tu::RESOURCE) {\n\n state |= d3d12::D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE\n\n | d3d12::D3D12_RESOURCE_STATE_NON_PIXEL_SHADER_RESOURCE;\n\n }\n\n if usage.intersects(Tu::COLOR_TARGET) {\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 84, "score": 170124.81792711292 }, { "content": "pub fn map_acomposite_alpha_mode(_mode: wgt::CompositeAlphaMode) -> native::AlphaMode {\n\n native::AlphaMode::Ignore\n\n}\n", "file_path": "wgpu-hal/src/auxil/dxgi/conv.rs", "rank": 85, "score": 170100.31769272516 }, { "content": "pub fn map_texture_view_dimension(dim: wgt::TextureViewDimension) -> mtl::MTLTextureType {\n\n use mtl::MTLTextureType::*;\n\n use wgt::TextureViewDimension as Tvd;\n\n match dim {\n\n Tvd::D1 => D1,\n\n Tvd::D2 => D2,\n\n Tvd::D2Array => D2Array,\n\n Tvd::D3 => D3,\n\n Tvd::Cube => Cube,\n\n Tvd::CubeArray => CubeArray,\n\n }\n\n}\n\n\n", "file_path": "wgpu-hal/src/metal/conv.rs", "rank": 86, "score": 169795.29791038152 }, { "content": "#[op]\n\npub fn op_webgpu_render_bundle_encoder_finish(\n\n state: &mut OpState,\n\n render_bundle_encoder_rid: ResourceId,\n\n label: Option<String>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let render_bundle_encoder_resource = state\n\n .resource_table\n\n .take::<WebGpuRenderBundleEncoder>(render_bundle_encoder_rid)?;\n\n let render_bundle_encoder = Rc::try_unwrap(render_bundle_encoder_resource)\n\n .ok()\n\n .expect(\"unwrapping render_bundle_encoder_resource should succeed\")\n\n .0\n\n .into_inner();\n\n let instance = state.borrow::<super::Instance>();\n\n\n\n gfx_put!(render_bundle_encoder.parent() => instance.render_bundle_encoder_finish(\n\n render_bundle_encoder,\n\n &wgpu_core::command::RenderBundleDescriptor {\n\n label: label.map(Cow::from),\n\n },\n\n ()\n\n ) => state, WebGpuRenderBundle)\n\n}\n\n\n", "file_path": "deno_webgpu/src/bundle.rs", "rank": 87, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_command_encoder_finish(\n\n state: &mut OpState,\n\n command_encoder_rid: ResourceId,\n\n label: Option<String>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let command_encoder_resource = state\n\n .resource_table\n\n .take::<WebGpuCommandEncoder>(command_encoder_rid)?;\n\n let command_encoder = command_encoder_resource.0;\n\n let instance = state.borrow::<super::Instance>();\n\n\n\n let descriptor = wgpu_types::CommandBufferDescriptor {\n\n label: label.map(Cow::from),\n\n };\n\n\n\n gfx_put!(command_encoder => instance.command_encoder_finish(\n\n command_encoder,\n\n &descriptor\n\n ) => state, WebGpuCommandBuffer)\n\n}\n", "file_path": "deno_webgpu/src/command_encoder.rs", "rank": 88, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_buffer_get_mapped_range(\n\n state: &mut OpState,\n\n buffer_rid: ResourceId,\n\n offset: u64,\n\n size: Option<u64>,\n\n mut buf: ZeroCopyBuf,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let buffer_resource = state.resource_table.get::<WebGpuBuffer>(buffer_rid)?;\n\n let buffer = buffer_resource.0;\n\n\n\n let (slice_pointer, range_size) = gfx_select!(buffer => instance.buffer_get_mapped_range(\n\n buffer,\n\n offset,\n\n size\n\n ))\n\n .map_err(|e| DomExceptionOperationError::new(&e.to_string()))?;\n\n\n\n let slice = unsafe { std::slice::from_raw_parts_mut(slice_pointer, range_size as usize) };\n\n buf.copy_from_slice(slice);\n\n\n\n let rid = state\n\n .resource_table\n\n .add(WebGpuBufferMapped(slice_pointer, range_size as usize));\n\n\n\n Ok(WebGpuResult::rid(rid))\n\n}\n\n\n", "file_path": "deno_webgpu/src/buffer.rs", "rank": 89, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_create_render_bundle_encoder(\n\n state: &mut OpState,\n\n args: CreateRenderBundleEncoderArgs,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(args.device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let depth_stencil =\n\n args.depth_stencil_format\n\n .map(|format| wgpu_types::RenderBundleDepthStencil {\n\n format,\n\n depth_read_only: args.depth_read_only,\n\n stencil_read_only: args.stencil_read_only,\n\n });\n\n\n\n let descriptor = wgpu_core::command::RenderBundleEncoderDescriptor {\n\n label: args.label.map(Cow::from),\n\n color_formats: Cow::from(args.color_formats),\n", "file_path": "deno_webgpu/src/bundle.rs", "rank": 90, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_render_pass_end(\n\n state: &mut OpState,\n\n command_encoder_rid: ResourceId,\n\n render_pass_rid: ResourceId,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let command_encoder_resource =\n\n state\n\n .resource_table\n\n .get::<super::command_encoder::WebGpuCommandEncoder>(command_encoder_rid)?;\n\n let command_encoder = command_encoder_resource.0;\n\n let render_pass_resource = state\n\n .resource_table\n\n .take::<WebGpuRenderPass>(render_pass_rid)?;\n\n let render_pass = &render_pass_resource.0.borrow();\n\n let instance = state.borrow::<super::Instance>();\n\n\n\n gfx_ok!(command_encoder => instance.command_encoder_run_render_pass(command_encoder, render_pass))\n\n}\n\n\n", "file_path": "deno_webgpu/src/render_pass.rs", "rank": 91, "score": 168614.82648662306 }, { "content": "pub fn map_texture_usage_to_resource_flags(\n\n usage: crate::TextureUses,\n\n) -> d3d12::D3D12_RESOURCE_FLAGS {\n\n let mut flags = 0;\n\n\n\n if usage.contains(crate::TextureUses::COLOR_TARGET) {\n\n flags |= d3d12::D3D12_RESOURCE_FLAG_ALLOW_RENDER_TARGET;\n\n }\n\n if usage.intersects(\n\n crate::TextureUses::DEPTH_STENCIL_READ | crate::TextureUses::DEPTH_STENCIL_WRITE,\n\n ) {\n\n flags |= d3d12::D3D12_RESOURCE_FLAG_ALLOW_DEPTH_STENCIL;\n\n if !usage.contains(crate::TextureUses::RESOURCE) {\n\n flags |= d3d12::D3D12_RESOURCE_FLAG_DENY_SHADER_RESOURCE;\n\n }\n\n }\n\n if usage.contains(crate::TextureUses::STORAGE_READ_WRITE) {\n\n flags |= d3d12::D3D12_RESOURCE_FLAG_ALLOW_UNORDERED_ACCESS;\n\n }\n\n\n\n flags\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 92, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_render_bundle_encoder_draw(\n\n state: &mut OpState,\n\n render_bundle_encoder_rid: ResourceId,\n\n vertex_count: u32,\n\n instance_count: u32,\n\n first_vertex: u32,\n\n first_instance: u32,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let render_bundle_encoder_resource = state\n\n .resource_table\n\n .get::<WebGpuRenderBundleEncoder>(render_bundle_encoder_rid)?;\n\n\n\n wgpu_core::command::bundle_ffi::wgpu_render_bundle_draw(\n\n &mut render_bundle_encoder_resource.0.borrow_mut(),\n\n vertex_count,\n\n instance_count,\n\n first_vertex,\n\n first_instance,\n\n );\n\n\n\n Ok(WebGpuResult::empty())\n\n}\n\n\n", "file_path": "deno_webgpu/src/bundle.rs", "rank": 93, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_create_command_encoder(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let descriptor = wgpu_types::CommandEncoderDescriptor {\n\n label: label.map(Cow::from),\n\n };\n\n\n\n gfx_put!(device => instance.device_create_command_encoder(\n\n device,\n\n &descriptor,\n\n ()\n\n ) => state, WebGpuCommandEncoder)\n", "file_path": "deno_webgpu/src/command_encoder.rs", "rank": 94, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_surface_get_current_texture(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n surface_rid: ResourceId,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n let surface_resource = state.resource_table.get::<WebGpuSurface>(surface_rid)?;\n\n let surface = surface_resource.0;\n\n\n\n let output = gfx_select!(device => instance.surface_get_current_texture(surface, ()))?;\n\n\n\n match output.status {\n\n SurfaceStatus::Good | SurfaceStatus::Suboptimal => {\n\n let id = output.texture_id.unwrap();\n\n let rid = state.resource_table.add(crate::texture::WebGpuTexture(id));\n\n Ok(WebGpuResult::rid(rid))\n\n }\n\n _ => Err(AnyError::msg(\"Invalid Surface Status\")),\n\n }\n\n}\n\n\n", "file_path": "deno_webgpu/src/surface.rs", "rank": 95, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_render_pass_draw(\n\n state: &mut OpState,\n\n render_pass_rid: ResourceId,\n\n vertex_count: u32,\n\n instance_count: u32,\n\n first_vertex: u32,\n\n first_instance: u32,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let render_pass_resource = state\n\n .resource_table\n\n .get::<WebGpuRenderPass>(render_pass_rid)?;\n\n\n\n wgpu_core::command::render_ffi::wgpu_render_pass_draw(\n\n &mut render_pass_resource.0.borrow_mut(),\n\n vertex_count,\n\n instance_count,\n\n first_vertex,\n\n first_instance,\n\n );\n\n\n\n Ok(WebGpuResult::empty())\n\n}\n\n\n", "file_path": "deno_webgpu/src/render_pass.rs", "rank": 96, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_compute_pass_end(\n\n state: &mut OpState,\n\n command_encoder_rid: ResourceId,\n\n compute_pass_rid: ResourceId,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let command_encoder_resource =\n\n state\n\n .resource_table\n\n .get::<super::command_encoder::WebGpuCommandEncoder>(command_encoder_rid)?;\n\n let command_encoder = command_encoder_resource.0;\n\n let compute_pass_resource = state\n\n .resource_table\n\n .take::<WebGpuComputePass>(compute_pass_rid)?;\n\n let compute_pass = &compute_pass_resource.0.borrow();\n\n let instance = state.borrow::<super::Instance>();\n\n\n\n gfx_ok!(command_encoder => instance.command_encoder_run_compute_pass(\n\n command_encoder,\n\n compute_pass\n\n ))\n\n}\n\n\n", "file_path": "deno_webgpu/src/compute_pass.rs", "rank": 97, "score": 168614.82648662306 }, { "content": "#[op]\n\npub fn op_webgpu_create_bind_group_layout(\n\n state: &mut OpState,\n\n device_rid: ResourceId,\n\n label: Option<String>,\n\n entries: Vec<GpuBindGroupLayoutEntry>,\n\n) -> Result<WebGpuResult, AnyError> {\n\n let instance = state.borrow::<super::Instance>();\n\n let device_resource = state\n\n .resource_table\n\n .get::<super::WebGpuDevice>(device_rid)?;\n\n let device = device_resource.0;\n\n\n\n let entries = entries\n\n .into_iter()\n\n .map(|entry| {\n\n wgpu_types::BindGroupLayoutEntry {\n\n binding: entry.binding,\n\n visibility: wgpu_types::ShaderStages::from_bits(entry.visibility).unwrap(),\n\n ty: entry.binding_type.into(),\n\n count: None, // native-only\n", "file_path": "deno_webgpu/src/binding.rs", "rank": 98, "score": 168614.82648662306 }, { "content": "pub fn map_buffer_usage_to_resource_flags(usage: crate::BufferUses) -> d3d12::D3D12_RESOURCE_FLAGS {\n\n let mut flags = 0;\n\n if usage.contains(crate::BufferUses::STORAGE_READ_WRITE) {\n\n flags |= d3d12::D3D12_RESOURCE_FLAG_ALLOW_UNORDERED_ACCESS;\n\n }\n\n flags\n\n}\n\n\n", "file_path": "wgpu-hal/src/dx12/conv.rs", "rank": 99, "score": 167928.90096209952 } ]
Rust
linked-lists/fp-rust/persistent-list/src/third.rs
ctarrington/try-fp
63559ec6abd451c8a1decad5981a20fee498a171
#![warn(missing_docs)] use std::rc::Rc; pub struct PersistentList<T> { head: Link<T>, } type Link<T> = Option<Rc<Node<T>>>; struct Node<T> { next: Link<T>, element: T, } impl<T> PersistentList<T> { pub fn new() -> Self { Self { head: None } } } impl<T> Default for PersistentList<T> { fn default() -> Self { Self::new() } } impl<T> PersistentList<T> { pub fn prepend(&self, value: T) -> Self { Self { head: Some(Rc::new(Node { element: value, next: self.head.as_ref().map(|rc_node| Rc::clone(&rc_node)), })), } } pub fn tail(&self) -> Self { Self { head: self .head .as_ref() .and_then(|node| node.next.as_ref().map(|rc_node| Rc::clone(&rc_node))), } } pub fn head(&self) -> Option<&T> { self.head.as_ref().map(|node| &node.element) } } pub struct Iter<'a, T> { next: Option<&'a Node<T>>, } impl<T> PersistentList<T> { pub fn iter(&self) -> Iter<'_, T> { Iter { next: self.head.as_deref(), } } } impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.next.map(|node| { self.next = node.next.as_deref(); &node.element }) } } impl<T> Drop for PersistentList<T> { fn drop(&mut self) { let mut head = self.head.take(); while let Some(node) = head { if let Ok(mut node) = Rc::try_unwrap(node) { head = node.next.take(); } else { break; } } } } #[cfg(test)] mod test { use super::PersistentList; use std::cell::RefCell; #[test] fn simple() { let empty_list = PersistentList::new(); assert_eq!(empty_list.head(), None); assert_eq!(empty_list.tail().head(), None); let list_1 = empty_list.prepend(1); assert_eq!(empty_list.head(), None); assert_eq!(list_1.head(), Some(&1)); let list_321 = list_1.prepend(2).prepend(3); let list_21 = list_321.tail(); assert_eq!(list_321.head(), Some(&3)); assert_eq!(list_21.head(), Some(&2)); } #[test] fn iteration() { let empty_list: PersistentList<i32> = PersistentList::new(); let mut iter = empty_list.iter(); assert_eq!(iter.next(), None); let list = PersistentList::new().prepend(1).prepend(2).prepend(3); let mut iter = list.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.next(), Some(&1)); let list_421 = list.tail().prepend(4); let mut iter_321 = list.iter(); let mut iter_421 = list_421.iter(); assert_eq!(iter_321.next(), Some(&3)); assert_eq!(iter_321.next(), Some(&2)); assert_eq!(iter_321.next(), Some(&1)); assert_eq!(iter_421.next(), Some(&4)); assert_eq!(iter_421.next(), Some(&2)); assert_eq!(iter_421.next(), Some(&1)); } #[test] fn drop() { struct Watcher { events: RefCell<Vec<String>>, } impl Watcher { fn new() -> Self { Self { events: RefCell::new(vec![]), } } fn push(&self, value: String) { self.events.borrow_mut().push(value); } fn list(&self) -> String { self.events.borrow().join(",") } } struct Thing<'a> { value: i32, watcher: &'a Watcher, } impl<'a> Drop for Thing<'a> { fn drop(&mut self) { self.watcher.push(format!("dropping {} ", self.value)); } } let watcher = Watcher::new(); { let list_1: PersistentList<Thing> = PersistentList::default().prepend(Thing { value: 1, watcher: &watcher, }); assert_eq!(list_1.head().map(|thing| thing.value), Some(1)); { let list_321 = list_1 .prepend(Thing { value: 2, watcher: &watcher, }) .prepend(Thing { value: 3, watcher: &watcher, }); assert_eq!(list_321.head().map(|thing| thing.value), Some(3)); watcher.push("done with list_321".to_string()); } watcher.push("still using list_1".to_string()); assert_eq!(list_1.head().map(|thing| thing.value), Some(1)); watcher.push("done with list_1".to_string()); } assert_eq!(watcher.list(), "done with list_321,dropping 3 ,dropping 2 ,still using list_1,done with list_1,dropping 1 ".to_string()); } }
#![warn(missing_docs)] use std::rc::Rc; pub struct PersistentList<T> { head: Link<T>, } type Link<T> = Option<Rc<Node<T>>>; struct Node<T> { next: Link<T>, element: T, } impl<T> PersistentList<T> { pub fn new() -> Self { Self { head: None } } } impl<T> Default for PersistentList<T> { fn default() -> Self { Sel
events: RefCell::new(vec![]), } } fn push(&self, value: String) { self.events.borrow_mut().push(value); } fn list(&self) -> String { self.events.borrow().join(",") } } struct Thing<'a> { value: i32, watcher: &'a Watcher, } impl<'a> Drop for Thing<'a> { fn drop(&mut self) { self.watcher.push(format!("dropping {} ", self.value)); } } let watcher = Watcher::new(); { let list_1: PersistentList<Thing> = PersistentList::default().prepend(Thing { value: 1, watcher: &watcher, }); assert_eq!(list_1.head().map(|thing| thing.value), Some(1)); { let list_321 = list_1 .prepend(Thing { value: 2, watcher: &watcher, }) .prepend(Thing { value: 3, watcher: &watcher, }); assert_eq!(list_321.head().map(|thing| thing.value), Some(3)); watcher.push("done with list_321".to_string()); } watcher.push("still using list_1".to_string()); assert_eq!(list_1.head().map(|thing| thing.value), Some(1)); watcher.push("done with list_1".to_string()); } assert_eq!(watcher.list(), "done with list_321,dropping 3 ,dropping 2 ,still using list_1,done with list_1,dropping 1 ".to_string()); } }
f::new() } } impl<T> PersistentList<T> { pub fn prepend(&self, value: T) -> Self { Self { head: Some(Rc::new(Node { element: value, next: self.head.as_ref().map(|rc_node| Rc::clone(&rc_node)), })), } } pub fn tail(&self) -> Self { Self { head: self .head .as_ref() .and_then(|node| node.next.as_ref().map(|rc_node| Rc::clone(&rc_node))), } } pub fn head(&self) -> Option<&T> { self.head.as_ref().map(|node| &node.element) } } pub struct Iter<'a, T> { next: Option<&'a Node<T>>, } impl<T> PersistentList<T> { pub fn iter(&self) -> Iter<'_, T> { Iter { next: self.head.as_deref(), } } } impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.next.map(|node| { self.next = node.next.as_deref(); &node.element }) } } impl<T> Drop for PersistentList<T> { fn drop(&mut self) { let mut head = self.head.take(); while let Some(node) = head { if let Ok(mut node) = Rc::try_unwrap(node) { head = node.next.take(); } else { break; } } } } #[cfg(test)] mod test { use super::PersistentList; use std::cell::RefCell; #[test] fn simple() { let empty_list = PersistentList::new(); assert_eq!(empty_list.head(), None); assert_eq!(empty_list.tail().head(), None); let list_1 = empty_list.prepend(1); assert_eq!(empty_list.head(), None); assert_eq!(list_1.head(), Some(&1)); let list_321 = list_1.prepend(2).prepend(3); let list_21 = list_321.tail(); assert_eq!(list_321.head(), Some(&3)); assert_eq!(list_21.head(), Some(&2)); } #[test] fn iteration() { let empty_list: PersistentList<i32> = PersistentList::new(); let mut iter = empty_list.iter(); assert_eq!(iter.next(), None); let list = PersistentList::new().prepend(1).prepend(2).prepend(3); let mut iter = list.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.next(), Some(&1)); let list_421 = list.tail().prepend(4); let mut iter_321 = list.iter(); let mut iter_421 = list_421.iter(); assert_eq!(iter_321.next(), Some(&3)); assert_eq!(iter_321.next(), Some(&2)); assert_eq!(iter_321.next(), Some(&1)); assert_eq!(iter_421.next(), Some(&4)); assert_eq!(iter_421.next(), Some(&2)); assert_eq!(iter_421.next(), Some(&1)); } #[test] fn drop() { struct Watcher { events: RefCell<Vec<String>>, } impl Watcher { fn new() -> Self { Self {
random
[ { "content": "// the crux of the thing\n\nstruct Node {\n\n element: i32,\n\n next: Link,\n\n}\n\n\n\nimpl List {\n\n pub fn new() -> Self {\n\n List { head: Link::None }\n\n }\n\n\n\n pub fn push(&mut self, value: i32) {\n\n let popped_link = mem::replace(&mut self.head, Link::None);\n\n\n\n let new_node = Box::new(Node {\n\n element: value,\n\n next: popped_link,\n\n });\n\n\n\n self.head = Link::Some(new_node);\n\n }\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/first.rs", "rank": 0, "score": 26877.831588960245 }, { "content": "// the crux of the thing\n\nstruct Node<T> {\n\n element: T,\n\n next: Link<T>,\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn new() -> Self {\n\n List { head: None }\n\n }\n\n\n\n pub fn push(&mut self, value: T) {\n\n let boxed_node = Box::new(Node {\n\n element: value,\n\n next: self.head.take(),\n\n });\n\n\n\n self.head = Some(boxed_node);\n\n }\n\n\n\n pub fn peek_for_nosy_people(&self) -> Option<&T> {\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 2, "score": 25328.155640764126 }, { "content": "// something to hold the variation\n\ntype Link<T> = Option<Box<Node<T>>>;\n\n\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 4, "score": 21744.534327066576 }, { "content": "\n\n pub fn pop(&mut self) -> Option<i32> {\n\n let popped_link = mem::replace(&mut self.head, Link::None);\n\n\n\n let popped_value = match popped_link {\n\n Link::None => Option::None,\n\n Link::Some(boxed_node) => {\n\n let value = boxed_node.element;\n\n self.head = boxed_node.next;\n\n Option::Some(value)\n\n }\n\n };\n\n\n\n popped_value\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::List;\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/first.rs", "rank": 7, "score": 7.717336280562506 }, { "content": " next: self.head.as_deref().map(|node| &*node),\n\n }\n\n }\n\n}\n\n\n\n// we need a lifetime since a reference is used in the creation of the struct\n\npub struct ListIter<'a, T> {\n\n next: Option<&'a Node<T>>,\n\n}\n\n\n\nimpl<'a, T> Iterator for ListIter<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.map(|node| {\n\n // deref and then unbox\n\n self.next = node.next.as_deref().map(|node| &*node);\n\n &node.element\n\n })\n\n }\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 8, "score": 7.453304537006259 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::List;\n\n\n\n #[test]\n\n fn simple() {\n\n let mut list = List::new(); // head -> None\n\n assert_eq!(list.pop().is_none(), true);\n\n assert_eq!(list.peek().is_none(), true);\n\n assert_eq!(list.peek_for_nosy_people().is_none(), true);\n\n list.push(1); // head -> Some(1, None)\n\n list.push(2); // head -> Some(2, Some(1, None))\n\n list.push(3);\n\n\n\n assert_eq!(list.peek().unwrap(), &3);\n\n assert_eq!(list.peek_for_nosy_people().unwrap(), &3);\n\n assert_eq!(list.pop().unwrap(), 3);\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 12, "score": 5.566427228584235 }, { "content": " type Item = &'a mut T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.take().map(|node| {\n\n // deref and then unbox\n\n self.next = node.next.as_deref_mut();\n\n &mut node.element\n\n })\n\n }\n\n}\n\n\n\n// ------------------- Drop ----------------------------------\n\n\n\n// unpack and drop without copying\n\nimpl<T> Drop for List<T> {\n\n fn drop(&mut self) {\n\n let mut popped_link = self.head.take();\n\n while let Some(mut boxed_node) = popped_link {\n\n popped_link = boxed_node.next.take();\n\n }\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 14, "score": 5.139897929295699 }, { "content": " assert_eq!(Some(\"there\"), iterator.next());\n\n assert_eq!(Some(\"hi\"), iterator.next());\n\n assert_eq!(None, iterator.next());\n\n }\n\n\n\n #[test]\n\n fn iteration_iter() {\n\n let mut list = List::new();\n\n list.push(\"hi\");\n\n list.push(\"there\");\n\n\n\n let mut iterator = list.iter();\n\n assert_eq!(Some(&\"there\"), iterator.next());\n\n assert_eq!(Some(&\"hi\"), iterator.next());\n\n assert_eq!(None, iterator.next());\n\n\n\n assert_eq!(Some(\"there\"), list.pop());\n\n }\n\n\n\n #[test]\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 15, "score": 4.933444149600879 }, { "content": "\n\n pub fn pop(&mut self) -> Option<T> {\n\n let popped_link = self.head.take();\n\n\n\n popped_link.map(|boxed_node| {\n\n self.head = boxed_node.next;\n\n boxed_node.element\n\n })\n\n }\n\n}\n\n\n\n// ------------------- iteration over Ts ----------------------------------\n\n\n\n// into_iter provides an iterator over Ts after the list is moved into the ListIntoIter\n\nimpl<T> List<T> {\n\n // creates an iterator. The list is moved into the ListIntoIter and is no longer available\n\n pub fn into_iter(self) -> ListIntoIter<T> {\n\n ListIntoIter(self)\n\n }\n\n}\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 16, "score": 4.8160779485953125 }, { "content": "\n\n #[test]\n\n fn simple() {\n\n let mut list = List::new(); // head -> None\n\n assert_eq!(list.pop().is_none(), true);\n\n list.push(1); // head -> Some(1, None)\n\n list.push(2); // head -> Some(2, Some(1, None))\n\n list.push(3);\n\n assert_eq!(list.pop().unwrap(), 3);\n\n assert_eq!(list.pop().unwrap(), 2);\n\n list.push(4);\n\n list.push(5);\n\n assert_eq!(list.pop().unwrap(), 5);\n\n assert_eq!(list.pop().unwrap(), 4);\n\n assert_eq!(list.pop().unwrap(), 1);\n\n }\n\n}\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/first.rs", "rank": 17, "score": 4.670584498671891 }, { "content": " // goal is to avoid the move out of the Box so we need the as_ref\n\n // look what map and the magic dot in node.element did for us!\n\n // Levels of indirection just vanish!\n\n // https://doc.rust-lang.org/std/option/\n\n let boxed_node_option: Option<&Box<Node<T>>> = self.head.as_ref();\n\n boxed_node_option.map(|boxed_node: &Box<Node<T>>| {\n\n let ref_to_boxed_node: &Box<Node<T>> = boxed_node;\n\n let ref_to_element: &T = &ref_to_boxed_node.element;\n\n ref_to_element\n\n })\n\n }\n\n\n\n // idiomatic and dense but same as the nosy version\n\n pub fn peek(&self) -> Option<&T> {\n\n self.head.as_ref().map(|boxed_node| &boxed_node.element)\n\n }\n\n\n\n pub fn peek_mut(&mut self) -> Option<&mut T> {\n\n self.head.as_mut().map(|node| &mut node.element)\n\n }\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 19, "score": 4.442926766398223 }, { "content": "}\n\n\n\n// ------------------- iteration over mutable references to Ts ----------------------------------\n\n\n\n// create an iterator of mutable references to T given a mutable reference to the List\n\n// note the use of the anonymous lifetime\n\n// https://yegeun542.github.io/rust-edition-guide-ko/rust-2018/ownership-and-lifetimes/the-anonymous-lifetime.html\n\nimpl<T> List<T> {\n\n pub fn iter_mut(&mut self) -> ListIterMut<'_, T> {\n\n ListIterMut {\n\n next: self.head.as_deref_mut(),\n\n }\n\n }\n\n}\n\n\n\npub struct ListIterMut<'a, T> {\n\n next: Option<&'a mut Node<T>>,\n\n}\n\n\n\nimpl<'a, T> Iterator for ListIterMut<'a, T> {\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 20, "score": 4.130054506979444 }, { "content": "// following along with\n\n// https://rust-unofficial.github.io/too-many-lists/first.html\n\nuse std::mem;\n\n\n\n// something small to expose publicly\n\npub struct List {\n\n head: Link,\n\n}\n\n\n\n// something to hold the variation\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/first.rs", "rank": 21, "score": 4.0859755538301 }, { "content": "\n\n// wrap the list so we have a place to put the iteration logic\n\n// no need for a lifetime since the List is moved into it\n\npub struct ListIntoIter<T>(List<T>);\n\n\n\nimpl<T> Iterator for ListIntoIter<T> {\n\n type Item = T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.pop()\n\n }\n\n}\n\n\n\n// ------------------- iteration over references to Ts ----------------------------------\n\n\n\n// iter provides an iterator over references to Ts after the ListIter is given a ref to the List\n\nimpl<T> List<T> {\n\n // creates an iterator. The list is not consumed. Iterator provides references to the elements\n\n pub fn iter(&self) -> ListIter<T> {\n\n ListIter {\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 23, "score": 3.939427758893946 }, { "content": " fn iteration_iter_mut() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n let mut iterator = list.iter_mut();\n\n assert_eq!(Some(&mut 3), iterator.next());\n\n assert_eq!(Some(&mut 2), iterator.next());\n\n assert_eq!(Some(&mut 1), iterator.next());\n\n }\n\n\n\n // TODO: need a way to test that drop does not copy\n\n}\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 24, "score": 3.0933786251683015 }, { "content": " assert_eq!(list.pop().unwrap(), 2);\n\n list.push(4);\n\n list.push(5);\n\n assert_eq!(list.pop().unwrap(), 5);\n\n assert_eq!(list.pop().unwrap(), 4);\n\n assert_eq!(list.pop().unwrap(), 1);\n\n }\n\n\n\n #[test]\n\n fn mutable_peek() {\n\n let mut list = List::new(); // head -> None\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n assert_eq!(Some(&3), list.peek());\n\n list.peek_mut().map(|mutable_value| {\n\n *mutable_value = 33;\n\n });\n\n\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 26, "score": 2.857483166931218 }, { "content": " assert_eq!(Some(&33), list.peek());\n\n assert_eq!(Some(33), list.pop());\n\n }\n\n\n\n #[test]\n\n fn another_type() {\n\n let mut list = List::new();\n\n list.push(\"hi\");\n\n list.push(\"there\");\n\n assert_eq!(\"there\", list.pop().unwrap());\n\n assert_eq!(\"hi\", list.pop().unwrap());\n\n }\n\n\n\n #[test]\n\n fn iteration_into_iter() {\n\n let mut list = List::new();\n\n list.push(\"hi\");\n\n list.push(\"there\");\n\n\n\n let mut iterator = list.into_iter();\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 27, "score": 2.6954839094364926 }, { "content": "pub mod first;\n\npub mod second;\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/lib.rs", "rank": 28, "score": 2.503108182107182 }, { "content": "// following along with\n\n// https://rust-unofficial.github.io/too-many-lists/second.html\n\n\n\n// ------------------- Core data structure and List API ----------------------------------\n\n\n\n// something small to expose publicly\n\npub struct List<T> {\n\n head: Link<T>,\n\n}\n\n\n\n// something to hold the variation\n", "file_path": "linked-lists/non-fp-rust/linked_list/src/second.rs", "rank": 29, "score": 2.46587489076242 }, { "content": "pub mod third;\n", "file_path": "linked-lists/fp-rust/persistent-list/src/lib.rs", "rank": 31, "score": 2.2387356732954036 } ]
Rust
src/message/header/textual.rs
alexwennerberg/lettre
8caa135e33a7e2a79a4f8792f47d40ff2e56263f
use crate::message::utf8_b; use hyperx::{ header::{Formatter as HeaderFormatter, Header, RawLike}, Error as HeaderError, Result as HyperResult, }; use std::{fmt::Result as FmtResult, str::from_utf8}; macro_rules! text_header { ($(#[$attr:meta])* Header($type_name: ident, $header_name: expr )) => { #[derive(Debug, Clone, PartialEq)] $(#[$attr])* pub struct $type_name(String); impl Header for $type_name { fn header_name() -> &'static str { $header_name } fn parse_header<'a, T>(raw: &'a T) -> HyperResult<$type_name> where T: RawLike<'a>, Self: Sized, { raw.one() .ok_or(HeaderError::Header) .and_then(parse_text) .map($type_name) } fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult { fmt_text(&self.0, f) } } impl From<String> for $type_name { #[inline] fn from(text: String) -> Self { Self(text) } } impl AsRef<str> for $type_name { #[inline] fn as_ref(&self) -> &str { &self.0 } } }; } text_header!( Header(Subject, "Subject") ); text_header!( Header(Comments, "Comments") ); text_header!( Header(Keywords, "Keywords") ); text_header!( Header(InReplyTo, "In-Reply-To") ); text_header!( Header(References, "References") ); text_header!( Header(MessageId, "Message-Id") ); text_header!( Header(UserAgent, "User-Agent") ); text_header! { Header(ContentId, "Content-ID") } fn parse_text(raw: &[u8]) -> HyperResult<String> { if let Ok(src) = from_utf8(raw) { if let Some(txt) = utf8_b::decode(src) { return Ok(txt); } } Err(HeaderError::Header) } fn fmt_text(s: &str, f: &mut HeaderFormatter<'_, '_>) -> FmtResult { f.fmt_line(&utf8_b::encode(s)) } #[cfg(test)] mod test { use super::Subject; use hyperx::header::Headers; #[test] fn format_ascii() { let mut headers = Headers::new(); headers.set(Subject("Sample subject".into())); assert_eq!(format!("{}", headers), "Subject: Sample subject\r\n"); } #[test] fn format_utf8() { let mut headers = Headers::new(); headers.set(Subject("Тема сообщения".into())); assert_eq!( format!("{}", headers), "Subject: =?utf-8?b?0KLQtdC80LAg0YHQvtC+0LHRidC10L3QuNGP?=\r\n" ); } #[test] fn parse_ascii() { let mut headers = Headers::new(); headers.set_raw("Subject", "Sample subject"); assert_eq!( headers.get::<Subject>(), Some(&Subject("Sample subject".into())) ); } #[test] fn parse_utf8() { let mut headers = Headers::new(); headers.set_raw( "Subject", "=?utf-8?b?0KLQtdC80LAg0YHQvtC+0LHRidC10L3QuNGP?=", ); assert_eq!( headers.get::<Subject>(), Some(&Subject("Тема сообщения".into())) ); } }
use crate::message::utf8_b; use hyperx::{ header::{Formatter as HeaderFormatter, Header, RawLike}, Error as HeaderError, Result as HyperResult, }; use std::{fmt::Result as FmtResult, str::from_utf8}; macro_rules! text_header { ($(#[$attr:meta])* Header($type_name: ident, $header_name: expr )) => { #[derive(Debug, Clone, PartialEq)] $(#[$attr])* pub struct $type_name(String); impl Header for $type_name { fn header_name() -> &'static str { $header_name } fn parse_header<'a, T>(raw: &'a T) -> HyperResult<$type_name> where T: RawLike<'a>, Self: Sized, { raw.one() .ok_or(HeaderError::Header) .and_then(parse_text) .map($type_name) } fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult { fmt_text(&self.0, f) } } impl From<String> for $type_name { #[inline] fn from(text: String) -> Self { Self(text) } } impl AsRef<str> for $type_name { #[inline] fn as_ref(&self) -> &str { &self.0 } } }; } text_header!( Header(Subject, "Subject") ); text_header!( Header(Comments, "Comments") ); text_header!( Header(Keywords, "Keywords") ); text_header!( Header(InReplyTo, "In-Reply-To") ); text_header!( Header(References, "References") ); text_header!( Header(MessageId, "Message-Id") ); text_header!( Header(UserAgent, "User-Agent") ); text_header! { Header(ContentId, "Content-ID") } fn parse_text(raw: &[u8]) -> HyperResult<String> {
Err(HeaderError::Header) } fn fmt_text(s: &str, f: &mut HeaderFormatter<'_, '_>) -> FmtResult { f.fmt_line(&utf8_b::encode(s)) } #[cfg(test)] mod test { use super::Subject; use hyperx::header::Headers; #[test] fn format_ascii() { let mut headers = Headers::new(); headers.set(Subject("Sample subject".into())); assert_eq!(format!("{}", headers), "Subject: Sample subject\r\n"); } #[test] fn format_utf8() { let mut headers = Headers::new(); headers.set(Subject("Тема сообщения".into())); assert_eq!( format!("{}", headers), "Subject: =?utf-8?b?0KLQtdC80LAg0YHQvtC+0LHRidC10L3QuNGP?=\r\n" ); } #[test] fn parse_ascii() { let mut headers = Headers::new(); headers.set_raw("Subject", "Sample subject"); assert_eq!( headers.get::<Subject>(), Some(&Subject("Sample subject".into())) ); } #[test] fn parse_utf8() { let mut headers = Headers::new(); headers.set_raw( "Subject", "=?utf-8?b?0KLQtdC80LAg0YHQvtC+0LHRidC10L3QuNGP?=", ); assert_eq!( headers.get::<Subject>(), Some(&Subject("Тема сообщения".into())) ); } }
if let Ok(src) = from_utf8(raw) { if let Some(txt) = utf8_b::decode(src) { return Ok(txt); } }
if_condition
[ { "content": "pub fn encode(s: &str) -> String {\n\n if s.chars().all(allowed_char) {\n\n s.into()\n\n } else {\n\n format!(\"=?utf-8?b?{}?=\", base64::encode(s))\n\n }\n\n}\n\n\n", "file_path": "src/message/utf8_b.rs", "rank": 2, "score": 184179.84348529213 }, { "content": "pub fn decode(s: &str) -> Option<String> {\n\n s.strip_prefix(\"=?utf-8?b?\")\n\n .and_then(|stripped| stripped.strip_suffix(\"?=\"))\n\n .map_or_else(\n\n || Some(s.into()),\n\n |stripped| {\n\n let decoded = base64::decode(stripped).ok()?;\n\n let decoded = String::from_utf8(decoded).ok()?;\n\n Some(decoded)\n\n },\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{decode, encode};\n\n\n\n #[test]\n\n fn encode_ascii() {\n\n assert_eq!(&encode(\"Kayo. ?\"), \"Kayo. ?\");\n", "file_path": "src/message/utf8_b.rs", "rank": 3, "score": 175273.61241966137 }, { "content": "fn format_mailboxes<'a>(mbs: Iter<'a, Mailbox>, f: &mut HeaderFormatter<'_, '_>) -> FmtResult {\n\n f.fmt_line(&Mailboxes::from(\n\n mbs.map(|mb| mb.recode_name(utf8_b::encode))\n\n .collect::<Vec<_>>(),\n\n ))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{From, Mailbox, Mailboxes};\n\n use hyperx::header::Headers;\n\n\n\n #[test]\n\n fn format_single_without_name() {\n\n let from = Mailboxes::new().with(\"[email protected]\".parse().unwrap());\n\n\n\n let mut headers = Headers::new();\n\n headers.set(From(from));\n\n\n\n assert_eq!(format!(\"{}\", headers), \"From: [email protected]\\r\\n\");\n", "file_path": "src/message/header/mailbox.rs", "rank": 4, "score": 163709.20247267562 }, { "content": "fn parse_mailboxes(raw: &[u8]) -> HyperResult<Mailboxes> {\n\n if let Ok(src) = from_utf8(raw) {\n\n if let Ok(mbs) = src.parse() {\n\n return Ok(mbs);\n\n }\n\n }\n\n Err(HeaderError::Header)\n\n}\n\n\n", "file_path": "src/message/header/mailbox.rs", "rank": 5, "score": 153070.444426129 }, { "content": "/// In place conversion to CRLF line endings\n\nfn in_place_crlf_line_endings(string: &mut String) {\n\n let indices = find_all_lf_char_indices(&string);\n\n\n\n for i in indices {\n\n // this relies on `indices` being in reverse order\n\n string.insert(i, '\\r');\n\n }\n\n}\n\n\n", "file_path": "src/message/body.rs", "rank": 6, "score": 148805.74968868116 }, { "content": "fn parse_category(i: &str) -> IResult<&str, Category> {\n\n alt((\n\n map(tag(\"0\"), |_| Category::Syntax),\n\n map(tag(\"1\"), |_| Category::Information),\n\n map(tag(\"2\"), |_| Category::Connections),\n\n map(tag(\"3\"), |_| Category::Unspecified3),\n\n map(tag(\"4\"), |_| Category::Unspecified4),\n\n map(tag(\"5\"), |_| Category::MailSystem),\n\n ))(i)\n\n}\n\n\n", "file_path": "src/transport/smtp/response.rs", "rank": 7, "score": 128025.09891268885 }, { "content": "fn parse_detail(i: &str) -> IResult<&str, Detail> {\n\n alt((\n\n map(tag(\"0\"), |_| Detail::Zero),\n\n map(tag(\"1\"), |_| Detail::One),\n\n map(tag(\"2\"), |_| Detail::Two),\n\n map(tag(\"3\"), |_| Detail::Three),\n\n map(tag(\"4\"), |_| Detail::Four),\n\n map(tag(\"5\"), |_| Detail::Five),\n\n map(tag(\"6\"), |_| Detail::Six),\n\n map(tag(\"7\"), |_| Detail::Seven),\n\n map(tag(\"8\"), |_| Detail::Eight),\n\n map(tag(\"9\"), |_| Detail::Nine),\n\n ))(i)\n\n}\n\n\n\npub(crate) fn parse_response(i: &str) -> IResult<&str, Response> {\n\n let (i, lines) = many0(tuple((\n\n parse_code,\n\n preceded(tag(\"-\"), take_until(\"\\r\\n\")),\n\n tag(\"\\r\\n\"),\n", "file_path": "src/transport/smtp/response.rs", "rank": 8, "score": 128025.09891268885 }, { "content": "fn parse_code(i: &str) -> IResult<&str, Code> {\n\n let (i, severity) = parse_severity(i)?;\n\n let (i, category) = parse_category(i)?;\n\n let (i, detail) = parse_detail(i)?;\n\n Ok((\n\n i,\n\n Code {\n\n severity,\n\n category,\n\n detail,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/transport/smtp/response.rs", "rank": 9, "score": 128025.09891268885 }, { "content": "fn parse_severity(i: &str) -> IResult<&str, Severity> {\n\n alt((\n\n map(tag(\"2\"), |_| Severity::PositiveCompletion),\n\n map(tag(\"3\"), |_| Severity::PositiveIntermediate),\n\n map(tag(\"4\"), |_| Severity::TransientNegativeCompletion),\n\n map(tag(\"5\"), |_| Severity::PermanentNegativeCompletion),\n\n ))(i)\n\n}\n\n\n", "file_path": "src/transport/smtp/response.rs", "rank": 10, "score": 128025.09891268885 }, { "content": "/// Create a random MIME boundary.\n\n/// (Not cryptographically random)\n\nfn make_boundary() -> String {\n\n repeat_with(fastrand::alphanumeric).take(40).collect()\n\n}\n\n\n\nimpl MultiPartKind {\n\n fn to_mime<S: Into<String>>(&self, boundary: Option<S>) -> Mime {\n\n let boundary = boundary.map_or_else(make_boundary, Into::into);\n\n\n\n format!(\n\n \"multipart/{}; boundary=\\\"{}\\\"{}\",\n\n match self {\n\n Self::Mixed => \"mixed\",\n\n Self::Alternative => \"alternative\",\n\n Self::Related => \"related\",\n\n Self::Encrypted { .. } => \"encrypted\",\n\n Self::Signed { .. } => \"signed\",\n\n },\n\n boundary,\n\n match self {\n\n Self::Encrypted { protocol } => format!(\"; protocol=\\\"{}\\\"\", protocol),\n", "file_path": "src/message/mimebody.rs", "rank": 11, "score": 102042.39782329851 }, { "content": "/// Header which can contains multiple mailboxes\n\npub trait MailboxesHeader {\n\n fn join_mailboxes(&mut self, other: Self);\n\n}\n\n\n\nmacro_rules! mailbox_header {\n\n ($(#[$doc:meta])*($type_name: ident, $header_name: expr)) => {\n\n $(#[$doc])*\n\n #[derive(Debug, Clone, PartialEq)]\n\n pub struct $type_name(Mailbox);\n\n\n\n impl Header for $type_name {\n\n fn header_name() -> &'static str {\n\n $header_name\n\n }\n\n\n\n fn parse_header<'a, T>(raw: &'a T) -> HyperResult<Self> where\n\n T: RawLike<'a>,\n\n Self: Sized {\n\n raw.one()\n\n .ok_or(HeaderError::Header)\n", "file_path": "src/message/header/mailbox.rs", "rank": 12, "score": 93653.05604910369 }, { "content": "/// Checks whether it contains only US-ASCII characters,\n\n/// and no lines are longer than 1000 characters including the `\\n` character.\n\n///\n\n/// Most efficient content encoding available\n\nfn is_7bit_encoded(buf: &[u8]) -> bool {\n\n buf.is_ascii() && !contains_too_long_lines(buf)\n\n}\n\n\n", "file_path": "src/message/body.rs", "rank": 13, "score": 91034.95952774634 }, { "content": "/// Checks that no lines are longer than 1000 characters,\n\n/// including the `\\n` character.\n\n/// NOTE: 8bit isn't supported by all SMTP servers.\n\nfn is_8bit_encoded(buf: &[u8]) -> bool {\n\n !contains_too_long_lines(buf)\n\n}\n\n\n", "file_path": "src/message/body.rs", "rank": 14, "score": 91034.95952774634 }, { "content": "fn bench_simple_send(c: &mut Criterion) {\n\n let sender = SmtpTransport::builder_dangerous(\"127.0.0.1\")\n\n .port(2525)\n\n .build();\n\n\n\n c.bench_function(\"send email\", move |b| {\n\n b.iter(|| {\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n let result = black_box(sender.send(&email));\n\n assert!(result.is_ok());\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/transport_smtp.rs", "rank": 15, "score": 91034.95952774634 }, { "content": "fn bench_reuse_send(c: &mut Criterion) {\n\n let sender = SmtpTransport::builder_dangerous(\"127.0.0.1\")\n\n .port(2525)\n\n .build();\n\n c.bench_function(\"send email with connection reuse\", move |b| {\n\n b.iter(|| {\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n let result = black_box(sender.send(&email));\n\n assert!(result.is_ok());\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_simple_send, bench_reuse_send);\n\ncriterion_main!(benches);\n", "file_path": "benches/transport_smtp.rs", "rank": 16, "score": 91034.95952774634 }, { "content": "struct Inner {\n\n kind: Kind,\n\n source: Option<BoxError>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn new<E>(kind: Kind, source: Option<E>) -> Error\n\n where\n\n E: Into<BoxError>,\n\n {\n\n Error {\n\n inner: Box::new(Inner {\n\n kind,\n\n source: source.map(Into::into),\n\n }),\n\n }\n\n }\n\n\n\n /// Returns true if the error is from response\n\n pub fn is_response(&self) -> bool {\n", "file_path": "src/transport/smtp/error.rs", "rank": 17, "score": 90849.94100271421 }, { "content": "struct Inner {\n\n kind: Kind,\n\n source: Option<BoxError>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn new<E>(kind: Kind, source: Option<E>) -> Error\n\n where\n\n E: Into<BoxError>,\n\n {\n\n Error {\n\n inner: Box::new(Inner {\n\n kind,\n\n source: source.map(Into::into),\n\n }),\n\n }\n\n }\n\n\n\n /// Returns true if the error is from client\n\n pub fn is_client(&self) -> bool {\n", "file_path": "src/transport/sendmail/error.rs", "rank": 18, "score": 90849.94100271421 }, { "content": "struct Inner {\n\n kind: Kind,\n\n source: Option<BoxError>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn new<E>(kind: Kind, source: Option<E>) -> Error\n\n where\n\n E: Into<BoxError>,\n\n {\n\n Error {\n\n inner: Box::new(Inner {\n\n kind,\n\n source: source.map(Into::into),\n\n }),\n\n }\n\n }\n\n\n\n /// Returns true if the error is a file I/O error\n\n pub fn is_io(&self) -> bool {\n", "file_path": "src/transport/file/error.rs", "rank": 19, "score": 90849.94100271421 }, { "content": "/// Checks if there are lines that are longer than 1000 characters,\n\n/// including the `\\n` character.\n\nfn contains_too_long_lines(buf: &[u8]) -> bool {\n\n buf.len() > 1000 && buf.split(|&b| b == b'\\n').any(|line| line.len() > 999)\n\n}\n\n\n\nconst LINE_SEPARATOR: &[u8] = b\"\\r\\n\";\n\nconst LINE_MAX_LENGTH: usize = 78 - LINE_SEPARATOR.len();\n\n\n", "file_path": "src/message/body.rs", "rank": 20, "score": 88829.60464486887 }, { "content": "/// Find indices to all places where `\\r` should be inserted\n\n/// in order to make `s` have CRLF line endings\n\n///\n\n/// The list is reversed, which is more efficient.\n\nfn find_all_lf_char_indices(s: &str) -> Vec<usize> {\n\n let mut indices = Vec::new();\n\n\n\n let mut found_lf = false;\n\n for (i, c) in s.char_indices().rev() {\n\n if mem::take(&mut found_lf) && c != '\\r' {\n\n // the previous character was `\\n`, but this isn't a `\\r`\n\n indices.push(i + c.len_utf8());\n\n }\n\n\n\n found_lf = c == '\\n';\n\n }\n\n\n\n if found_lf {\n\n // the first character is `\\n`\n\n indices.push(0);\n\n }\n\n\n\n indices\n\n}\n", "file_path": "src/message/body.rs", "rank": 21, "score": 84094.3760676677 }, { "content": "fn main() {\n\n tracing_subscriber::fmt::init();\n\n\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n\n\n // Open a local connection on port 25\n\n let mailer = SmtpTransport::unencrypted_localhost();\n\n\n\n // Send the email\n\n match mailer.send(&email) {\n\n Ok(_) => println!(\"Email sent successfully!\"),\n\n Err(e) => panic!(\"Could not send email: {:?}\", e),\n\n }\n\n}\n", "file_path": "examples/smtp.rs", "rank": 22, "score": 55674.58437604927 }, { "content": " pub trait Sealed {}\n\n\n\n #[cfg(feature = \"tokio02\")]\n\n impl Sealed for Tokio02Executor {}\n\n\n\n #[cfg(feature = \"tokio1\")]\n\n impl Sealed for Tokio1Executor {}\n\n\n\n #[cfg(feature = \"async-std1\")]\n\n impl Sealed for AsyncStd1Executor {}\n\n}\n", "file_path": "src/executor.rs", "rank": 23, "score": 55413.4325439363 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SmtpInfo {\n\n /// Name sent during EHLO\n\n hello_name: ClientId,\n\n /// Server we are connecting to\n\n server: String,\n\n /// Port to connect to\n\n port: u16,\n\n /// TLS security configuration\n\n tls: Tls,\n\n /// Optional enforced authentication mechanism\n\n authentication: Vec<Mechanism>,\n\n /// Credentials\n\n credentials: Option<Credentials>,\n\n /// Define network timeout\n\n /// It can be changed later for specific needs (like a different timeout for each SMTP command)\n\n timeout: Option<Duration>,\n\n}\n\n\n\nimpl Default for SmtpInfo {\n\n fn default() -> Self {\n", "file_path": "src/transport/smtp/mod.rs", "rank": 24, "score": 54779.53391170186 }, { "content": "fn main() {\n\n // The html we want to send.\n\n let html = r#\"<!DOCTYPE html>\n\n<html lang=\"en\">\n\n<head>\n\n <meta charset=\"UTF-8\">\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n\n <title>Hello from Lettre!</title>\n\n</head>\n\n<body>\n\n <div style=\"display: flex; flex-direction: column; align-items: center;\">\n\n <h2 style=\"font-family: Arial, Helvetica, sans-serif;\">Hello from Lettre!</h2>\n\n <h4 style=\"font-family: Arial, Helvetica, sans-serif;\">A mailer library for Rust</h4>\n\n </div>\n\n</body>\n\n</html>\"#;\n\n\n\n // Build the message.\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n", "file_path": "examples/basic_html.rs", "rank": 25, "score": 54032.0840308521 }, { "content": "fn main() {\n\n tracing_subscriber::fmt::init();\n\n\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n\n\n let creds = Credentials::new(\"smtp_username\".to_string(), \"smtp_password\".to_string());\n\n\n\n // Open a remote connection to gmail using STARTTLS\n\n let mailer = SmtpTransport::starttls_relay(\"smtp.gmail.com\")\n\n .unwrap()\n\n .credentials(creds)\n\n .build();\n\n\n\n // Send the email\n\n match mailer.send(&email) {\n\n Ok(_) => println!(\"Email sent successfully!\"),\n\n Err(e) => panic!(\"Could not send email: {:?}\", e),\n\n }\n\n}\n", "file_path": "examples/smtp_starttls.rs", "rank": 26, "score": 54032.0840308521 }, { "content": "fn main() {\n\n tracing_subscriber::fmt::init();\n\n\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n\n\n // Use a custom certificate stored on disk to securely verify the server's certificate\n\n let pem_cert = fs::read(\"certificate.pem\").unwrap();\n\n let cert = Certificate::from_pem(&pem_cert).unwrap();\n\n let tls = TlsParameters::builder(\"smtp.server.com\".to_string())\n\n .add_root_certificate(cert)\n\n .build()\n\n .unwrap();\n\n\n\n let creds = Credentials::new(\"smtp_username\".to_string(), \"smtp_password\".to_string());\n", "file_path": "examples/smtp_selfsigned.rs", "rank": 27, "score": 54032.0840308521 }, { "content": "fn main() {\n\n tracing_subscriber::fmt::init();\n\n\n\n let email = Message::builder()\n\n .from(\"NoBody <[email protected]>\".parse().unwrap())\n\n .reply_to(\"Yuin <[email protected]>\".parse().unwrap())\n\n .to(\"Hei <[email protected]>\".parse().unwrap())\n\n .subject(\"Happy new year\")\n\n .body(String::from(\"Be happy!\"))\n\n .unwrap();\n\n\n\n let creds = Credentials::new(\"smtp_username\".to_string(), \"smtp_password\".to_string());\n\n\n\n // Open a remote connection to gmail\n\n let mailer = SmtpTransport::relay(\"smtp.gmail.com\")\n\n .unwrap()\n\n .credentials(creds)\n\n .build();\n\n\n\n // Send the email\n\n match mailer.send(&email) {\n\n Ok(_) => println!(\"Email sent successfully!\"),\n\n Err(e) => panic!(\"Could not send email: {:?}\", e),\n\n }\n\n}\n", "file_path": "examples/smtp_tls.rs", "rank": 28, "score": 54032.0840308521 }, { "content": "fn main() {\n\n // The recipient's name. We might obtain this from a form or their email address.\n\n let recipient = \"Hei\";\n\n\n\n // Create the html we want to send.\n\n let html = html! {\n\n head {\n\n title { \"Hello from Lettre!\" }\n\n style type=\"text/css\" {\n\n \"h2, h4 { font-family: Arial, Helvetica, sans-serif; }\"\n\n }\n\n }\n\n div style=\"display: flex; flex-direction: column; align-items: center;\" {\n\n h2 { \"Hello from Lettre!\" }\n\n // Substitute in the name of our recipient.\n\n p { \"Dear \" (recipient) \",\" }\n\n p { \"This email was sent with Lettre, a mailer library for Rust!\"}\n\n p {\n\n \"This example uses \"\n\n a href=\"https://crates.io/crates/maud\" { \"maud\" }\n", "file_path": "examples/maud_html.rs", "rank": 29, "score": 54032.0840308521 }, { "content": "/// A trait for something that takes an encoded [`Body`].\n\n///\n\n/// Used by [`MessageBuilder::body`][super::MessageBuilder::body] and\n\n/// [`SinglePartBuilder::body`][super::SinglePartBuilder::body],\n\n/// which can either take something that can be encoded into [`Body`]\n\n/// or a pre-encoded [`Body`].\n\n///\n\n/// If `encoding` is `None` the best encoding between `7bit`, `quoted-printable`\n\n/// and `base64` is chosen based on the input body. **Best option.**\n\n///\n\n/// If `encoding` is `Some` the supplied encoding is used.\n\n/// **NOTE:** if using the specified `encoding` would result into a malformed\n\n/// body, this will panic!\n\npub trait IntoBody {\n\n fn into_body(self, encoding: Option<ContentTransferEncoding>) -> Body;\n\n}\n\n\n\nimpl<T> IntoBody for T\n\nwhere\n\n T: Into<MaybeString>,\n\n{\n\n fn into_body(self, encoding: Option<ContentTransferEncoding>) -> Body {\n\n match encoding {\n\n Some(encoding) => Body::new_with_encoding(self, encoding).expect(\"invalid encoding\"),\n\n None => Body::new(self),\n\n }\n\n }\n\n}\n\n\n\nimpl IntoBody for Body {\n\n fn into_body(self, encoding: Option<ContentTransferEncoding>) -> Body {\n\n let _ = encoding;\n\n\n", "file_path": "src/message/body.rs", "rank": 30, "score": 53907.450030087326 }, { "content": "/// Blocking Transport method for emails\n\npub trait Transport {\n\n /// Response produced by the Transport\n\n type Ok;\n\n /// Error produced by the Transport\n\n type Error;\n\n\n\n /// Sends the email\n\n #[cfg(feature = \"builder\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"builder\")))]\n\n fn send(&self, message: &Message) -> Result<Self::Ok, Self::Error> {\n\n let raw = message.formatted();\n\n self.send_raw(message.envelope(), &raw)\n\n }\n\n\n\n fn send_raw(&self, envelope: &Envelope, email: &[u8]) -> Result<Self::Ok, Self::Error>;\n\n}\n\n\n\n/// Async Transport method for emails\n\n#[cfg(any(feature = \"tokio02\", feature = \"tokio1\", feature = \"async-std1\"))]\n\n#[cfg_attr(\n\n docsrs,\n\n doc(cfg(any(feature = \"tokio02\", feature = \"tokio1\", feature = \"async-std1\")))\n\n)]\n", "file_path": "src/transport/mod.rs", "rank": 31, "score": 53900.80655572223 }, { "content": "#[derive(Default, Clone, Copy, Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\nstruct ClientCodec {\n\n escape_count: u8,\n\n}\n\n\n\nimpl ClientCodec {\n\n /// Creates a new client codec\n\n pub fn new() -> Self {\n\n ClientCodec::default()\n\n }\n\n\n\n /// Adds transparency\n\n fn encode(&mut self, frame: &[u8], buf: &mut Vec<u8>) {\n\n match frame.len() {\n\n 0 => {\n\n match self.escape_count {\n\n 0 => buf.extend_from_slice(b\"\\r\\n.\\r\\n\"),\n\n 1 => buf.extend_from_slice(b\"\\n.\\r\\n\"),\n\n 2 => buf.extend_from_slice(b\".\\r\\n\"),\n\n _ => unreachable!(),\n\n }\n", "file_path": "src/transport/smtp/client/mod.rs", "rank": 32, "score": 53498.299725556426 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct SmtpConnectionQuitter;\n\n\n\nimpl CustomizeConnection<SmtpConnection, Error> for SmtpConnectionQuitter {\n\n fn on_release(&self, conn: SmtpConnection) {\n\n let mut conn = conn;\n\n if !conn.has_broken() {\n\n let _quit = conn.quit();\n\n }\n\n }\n\n}\n", "file_path": "src/transport/smtp/pool.rs", "rank": 33, "score": 53494.7247266069 }, { "content": "type Id = String;\n\n\n\n/// Writes the content and the envelope information to a file\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"file-transport\")))]\n\npub struct FileTransport {\n\n path: PathBuf,\n\n #[cfg(feature = \"file-transport-envelope\")]\n\n save_envelope: bool,\n\n}\n\n\n\n/// Asynchronously writes the content and the envelope information to a file\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\n#[cfg_attr(\n\n docsrs,\n\n doc(cfg(any(feature = \"tokio02\", feature = \"tokio1\", feature = \"async-std1\")))\n\n)]\n\n#[cfg(any(feature = \"async-std1\", feature = \"tokio02\", feature = \"tokio1\"))]\n", "file_path": "src/transport/file/mod.rs", "rank": 34, "score": 52797.67023223432 }, { "content": "#[async_trait]\n\npub trait AsyncTransport {\n\n /// Response produced by the Transport\n\n type Ok;\n\n /// Error produced by the Transport\n\n type Error;\n\n\n\n /// Sends the email\n\n #[cfg(feature = \"builder\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"builder\")))]\n\n // TODO take &Message\n\n async fn send(&self, message: Message) -> Result<Self::Ok, Self::Error> {\n\n let raw = message.formatted();\n\n let envelope = message.envelope();\n\n self.send_raw(&envelope, &raw).await\n\n }\n\n\n\n async fn send_raw(&self, envelope: &Envelope, email: &[u8]) -> Result<Self::Ok, Self::Error>;\n\n}\n", "file_path": "src/transport/mod.rs", "rank": 35, "score": 52509.087101656 }, { "content": "#[cfg(feature = \"rustls-tls\")]\n\nstruct InvalidCertsVerifier;\n\n\n\n#[cfg(feature = \"rustls-tls\")]\n\nimpl ServerCertVerifier for InvalidCertsVerifier {\n\n fn verify_server_cert(\n\n &self,\n\n _roots: &RootCertStore,\n\n _presented_certs: &[rustls::Certificate],\n\n _dns_name: DNSNameRef<'_>,\n\n _ocsp_response: &[u8],\n\n ) -> Result<ServerCertVerified, TLSError> {\n\n Ok(ServerCertVerified::assertion())\n\n }\n\n}\n", "file_path": "src/transport/smtp/client/tls.rs", "rank": 36, "score": 52300.53907698642 }, { "content": "/// A `Write`r that inserts a line separator `\\r\\n` every `max_line_length` bytes.\n\nstruct LineWrappingWriter<'a, W> {\n\n writer: &'a mut W,\n\n current_line_length: usize,\n\n max_line_length: usize,\n\n}\n\n\n\nimpl<'a, W> LineWrappingWriter<'a, W> {\n\n pub fn new(writer: &'a mut W, max_line_length: usize) -> Self {\n\n Self {\n\n writer,\n\n current_line_length: 0,\n\n max_line_length,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, W> Write for LineWrappingWriter<'a, W>\n\nwhere\n\n W: Write,\n\n{\n", "file_path": "src/message/body.rs", "rank": 37, "score": 49393.38716954474 }, { "content": "fn allowed_char(c: char) -> bool {\n\n c >= 1 as char && c <= 9 as char\n\n || c == 11 as char\n\n || c == 12 as char\n\n || c >= 14 as char && c <= 127 as char\n\n}\n\n\n", "file_path": "src/message/utf8_b.rs", "rank": 38, "score": 45169.66422513813 }, { "content": "#[async_trait]\n\npub trait Executor: Debug + Send + Sync + private::Sealed {\n\n #[doc(hidden)]\n\n #[cfg(feature = \"smtp-transport\")]\n\n async fn connect(\n\n hostname: &str,\n\n port: u16,\n\n hello_name: &ClientId,\n\n tls: &Tls,\n\n ) -> Result<AsyncSmtpConnection, Error>;\n\n\n\n #[doc(hidden)]\n\n #[cfg(feature = \"file-transport-envelope\")]\n\n async fn fs_read(path: &Path) -> IoResult<Vec<u8>>;\n\n\n\n #[doc(hidden)]\n\n #[cfg(feature = \"file-transport\")]\n\n async fn fs_write(path: &Path, contents: &[u8]) -> IoResult<()>;\n\n}\n\n\n\n/// Async [`Executor`] using `tokio` `0.2.x`\n", "file_path": "src/executor.rs", "rank": 39, "score": 42314.68495351299 }, { "content": " EmailMissingLocalPart,\n\n /// Invalid email: missing domain\n\n EmailMissingDomain,\n\n /// Cannot parse filename for attachment\n\n CannotParseFilename,\n\n /// IO error\n\n Io(std::io::Error),\n\n /// Non-ASCII chars\n\n NonAsciiChars,\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {\n\n match self {\n\n Error::MissingFrom => f.write_str(\"missing source address, invalid envelope\"),\n\n Error::MissingTo => f.write_str(\"missing destination address, invalid envelope\"),\n\n Error::TooManyFrom => f.write_str(\"there can only be one source address\"),\n\n Error::EmailMissingAt => f.write_str(\"missing @ in email address\"),\n\n Error::EmailMissingLocalPart => f.write_str(\"missing local part in email address\"),\n\n Error::EmailMissingDomain => f.write_str(\"missing domain in email address\"),\n", "file_path": "src/error.rs", "rank": 40, "score": 39648.81128343343 }, { "content": " Error::CannotParseFilename => f.write_str(\"could not parse attachment filename\"),\n\n Error::NonAsciiChars => f.write_str(\"contains non-ASCII chars\"),\n\n Error::Io(e) => e.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Error {\n\n Error::Io(err)\n\n }\n\n}\n\n\n\nimpl StdError for Error {}\n", "file_path": "src/error.rs", "rank": 41, "score": 39646.682007456795 }, { "content": "//! Error type for email messages\n\n\n\nuse std::{\n\n error::Error as StdError,\n\n fmt::{self, Display, Formatter},\n\n};\n\n\n\n// FIXME message-specific errors\n\n/// Error type for email content\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Missing from in envelope\n\n MissingFrom,\n\n /// Missing to in envelope\n\n MissingTo,\n\n /// Can only be one from in envelope\n\n TooManyFrom,\n\n /// Invalid email: missing at\n\n EmailMissingAt,\n\n /// Invalid email: missing local part\n", "file_path": "src/error.rs", "rank": 42, "score": 39643.92580722915 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut builder = f.debug_struct(\"lettre::transport::file::Error\");\n\n\n\n builder.field(\"kind\", &self.inner.kind);\n\n\n\n if let Some(ref source) = self.inner.source {\n\n builder.field(\"source\", source);\n\n }\n\n\n\n builder.finish()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.inner.kind {\n\n Kind::Io => f.write_str(\"response error\")?,\n\n #[cfg(feature = \"file-transport-envelope\")]\n\n Kind::Envelope => f.write_str(\"internal client error\")?,\n\n };\n", "file_path": "src/transport/file/error.rs", "rank": 43, "score": 36211.037700465626 }, { "content": " matches!(self.inner.kind, Kind::Client)\n\n }\n\n\n\n /// Returns true if the error comes from the response\n\n pub fn is_response(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Response)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum Kind {\n\n /// Error parsing a response\n\n Response,\n\n /// Internal client error\n\n Client,\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut builder = f.debug_struct(\"lettre::transport::sendmail::Error\");\n", "file_path": "src/transport/sendmail/error.rs", "rank": 44, "score": 36210.8294615328 }, { "content": " Response,\n\n /// Internal client error\n\n Client,\n\n /// Connection error\n\n Connection,\n\n /// Underlying network i/o error\n\n Network,\n\n /// TLS error\n\n #[cfg_attr(docsrs, doc(cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))))]\n\n #[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\n Tls,\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut builder = f.debug_struct(\"lettre::transport::smtp::Error\");\n\n\n\n builder.field(\"kind\", &self.inner.kind);\n\n\n\n if let Some(ref source) = self.inner.source {\n", "file_path": "src/transport/smtp/error.rs", "rank": 45, "score": 36208.966317259285 }, { "content": "\n\n builder.field(\"kind\", &self.inner.kind);\n\n\n\n if let Some(ref source) = self.inner.source {\n\n builder.field(\"source\", source);\n\n }\n\n\n\n builder.finish()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.inner.kind {\n\n Kind::Response => f.write_str(\"response error\")?,\n\n Kind::Client => f.write_str(\"internal client error\")?,\n\n };\n\n\n\n if let Some(ref e) = self.inner.source {\n\n write!(f, \": {}\", e)?;\n", "file_path": "src/transport/sendmail/error.rs", "rank": 46, "score": 36206.901872195696 }, { "content": " builder.field(\"source\", source);\n\n }\n\n\n\n builder.finish()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.inner.kind {\n\n Kind::Response => f.write_str(\"response error\")?,\n\n Kind::Client => f.write_str(\"internal client error\")?,\n\n Kind::Network => f.write_str(\"network error\")?,\n\n Kind::Connection => f.write_str(\"Connection error\")?,\n\n #[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\n Kind::Tls => f.write_str(\"tls error\")?,\n\n Kind::Transient(ref code) => {\n\n write!(f, \"transient error ({})\", code)?;\n\n }\n\n Kind::Permanent(ref code) => {\n", "file_path": "src/transport/smtp/error.rs", "rank": 47, "score": 36206.72909872647 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n self.inner.source.as_ref().map(|e| {\n\n let r: &(dyn std::error::Error + 'static) = &**e;\n\n r\n\n })\n\n }\n\n}\n\n\n\npub(crate) fn response<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Response, Some(e))\n\n}\n\n\n\npub(crate) fn client<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Client, Some(e))\n\n}\n", "file_path": "src/transport/sendmail/error.rs", "rank": 48, "score": 36206.24527847202 }, { "content": "//! Error and result type for sendmail transport\n\n\n\nuse crate::BoxError;\n\nuse std::{error::Error as StdError, fmt};\n\n\n\n/// The Errors that may occur when sending an email over sendmail\n\npub struct Error {\n\n inner: Box<Inner>,\n\n}\n\n\n", "file_path": "src/transport/sendmail/error.rs", "rank": 49, "score": 36206.22898337697 }, { "content": "//! Error and result type for file transport\n\n\n\nuse crate::BoxError;\n\nuse std::{error::Error as StdError, fmt};\n\n\n\n/// The Errors that may occur when sending an email over SMTP\n\npub struct Error {\n\n inner: Box<Inner>,\n\n}\n\n\n", "file_path": "src/transport/file/error.rs", "rank": 50, "score": 36206.22898337697 }, { "content": "\n\n if let Some(ref e) = self.inner.source {\n\n write!(f, \": {}\", e)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n self.inner.source.as_ref().map(|e| {\n\n let r: &(dyn std::error::Error + 'static) = &**e;\n\n r\n\n })\n\n }\n\n}\n\n\n\npub(crate) fn io<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Io, Some(e))\n\n}\n\n\n\n#[cfg(feature = \"file-transport-envelope\")]\n\npub(crate) fn envelope<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Envelope, Some(e))\n\n}\n", "file_path": "src/transport/file/error.rs", "rank": 51, "score": 36205.585771898746 }, { "content": "//! Error and result type for SMTP clients\n\n\n\nuse crate::{\n\n transport::smtp::response::{Code, Severity},\n\n BoxError,\n\n};\n\nuse std::{error::Error as StdError, fmt};\n\n\n\n// Inspired by https://github.com/seanmonstar/reqwest/blob/a8566383168c0ef06c21f38cbc9213af6ff6db31/src/error.rs\n\n\n\n/// The Errors that may occur when sending an email over SMTP\n\npub struct Error {\n\n inner: Box<Inner>,\n\n}\n\n\n", "file_path": "src/transport/smtp/error.rs", "rank": 52, "score": 36204.827384670214 }, { "content": " write!(f, \"permanent error ({})\", code)?;\n\n }\n\n };\n\n\n\n if let Some(ref e) = self.inner.source {\n\n write!(f, \": {}\", e)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n self.inner.source.as_ref().map(|e| {\n\n let r: &(dyn std::error::Error + 'static) = &**e;\n\n r\n\n })\n\n }\n\n}\n", "file_path": "src/transport/smtp/error.rs", "rank": 53, "score": 36203.04924195756 }, { "content": " let mut source = self.source();\n\n\n\n while let Some(err) = source {\n\n if let Some(io_err) = err.downcast_ref::<std::io::Error>() {\n\n return io_err.kind() == std::io::ErrorKind::TimedOut;\n\n }\n\n\n\n source = err.source();\n\n }\n\n\n\n false\n\n }\n\n\n\n /// Returns true if the error is from TLS\n\n #[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\n #[cfg_attr(docsrs, doc(cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))))]\n\n pub fn is_tls(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Tls)\n\n }\n\n\n", "file_path": "src/transport/smtp/error.rs", "rank": 54, "score": 36202.224966381764 }, { "content": " matches!(self.inner.kind, Kind::Io)\n\n }\n\n\n\n /// Returns true if the error is an envelope serialization or deserialization error\n\n #[cfg(feature = \"file-transport-envelope\")]\n\n pub fn is_envelope(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Envelope)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum Kind {\n\n /// File I/O error\n\n Io,\n\n /// Envelope serialization/deserialization error\n\n #[cfg(feature = \"file-transport-envelope\")]\n\n Envelope,\n\n}\n\n\n\nimpl fmt::Debug for Error {\n", "file_path": "src/transport/file/error.rs", "rank": 55, "score": 36201.337569969946 }, { "content": "\n\npub(crate) fn code(c: Code) -> Error {\n\n match c.severity {\n\n Severity::TransientNegativeCompletion => Error::new::<Error>(Kind::Transient(c), None),\n\n Severity::PermanentNegativeCompletion => Error::new::<Error>(Kind::Permanent(c), None),\n\n _ => client(\"Unknown error code\"),\n\n }\n\n}\n\n\n\npub(crate) fn response<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Response, Some(e))\n\n}\n\n\n\npub(crate) fn client<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Client, Some(e))\n\n}\n\n\n\npub(crate) fn network<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Network, Some(e))\n\n}\n", "file_path": "src/transport/smtp/error.rs", "rank": 56, "score": 36199.58111898888 }, { "content": " matches!(self.inner.kind, Kind::Response)\n\n }\n\n\n\n /// Returns true if the error is from client\n\n pub fn is_client(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Client)\n\n }\n\n\n\n /// Returns true if the error is a transient SMTP error\n\n pub fn is_transient(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Transient(_))\n\n }\n\n\n\n /// Returns true if the error is a permanent SMTP error\n\n pub fn is_permanent(&self) -> bool {\n\n matches!(self.inner.kind, Kind::Permanent(_))\n\n }\n\n\n\n /// Returns true if the error is caused by a timeout\n\n pub fn is_timeout(&self) -> bool {\n", "file_path": "src/transport/smtp/error.rs", "rank": 57, "score": 36199.31380256773 }, { "content": "\n\npub(crate) fn connection<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Connection, Some(e))\n\n}\n\n\n\n#[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\npub(crate) fn tls<E: Into<BoxError>>(e: E) -> Error {\n\n Error::new(Kind::Tls, Some(e))\n\n}\n", "file_path": "src/transport/smtp/error.rs", "rank": 58, "score": 36199.217039762654 }, { "content": " /// Returns the status code, if the error was generated from a response.\n\n pub fn status(&self) -> Option<Code> {\n\n match self.inner.kind {\n\n Kind::Transient(code) | Kind::Permanent(code) => Some(code),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum Kind {\n\n /// Transient SMTP error, 4xx reply code\n\n ///\n\n /// [RFC 5321, section 4.2.1](https://tools.ietf.org/html/rfc5321#section-4.2.1)\n\n Transient(Code),\n\n /// Permanent SMTP error, 5xx reply code\n\n ///\n\n /// [RFC 5321, section 4.2.1](https://tools.ietf.org/html/rfc5321#section-4.2.1)\n\n Permanent(Code),\n\n /// Error parsing a response\n", "file_path": "src/transport/smtp/error.rs", "rank": 59, "score": 36197.74751569059 }, { "content": "use hyperx::{\n\n header::{Formatter as HeaderFormatter, Header, RawLike},\n\n Error as HeaderError, Result as HyperResult,\n\n};\n\nuse std::{fmt::Result as FmtResult, str::from_utf8};\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\n/// Message format version, defined in [RFC2045](https://tools.ietf.org/html/rfc2045#section-4)\n\npub struct MimeVersion {\n\n major: u8,\n\n minor: u8,\n\n}\n\n\n\npub const MIME_VERSION_1_0: MimeVersion = MimeVersion::new(1, 0);\n\n\n\nimpl MimeVersion {\n\n pub const fn new(major: u8, minor: u8) -> Self {\n\n MimeVersion { major, minor }\n\n }\n\n\n", "file_path": "src/message/header/special.rs", "rank": 61, "score": 36077.18942792344 }, { "content": " #[inline]\n\n pub const fn major(self) -> u8 {\n\n self.major\n\n }\n\n\n\n #[inline]\n\n pub const fn minor(self) -> u8 {\n\n self.minor\n\n }\n\n}\n\n\n\nimpl Default for MimeVersion {\n\n fn default() -> Self {\n\n MIME_VERSION_1_0\n\n }\n\n}\n\n\n\nimpl Header for MimeVersion {\n\n fn header_name() -> &'static str {\n\n \"MIME-Version\"\n", "file_path": "src/message/header/special.rs", "rank": 63, "score": 36073.398925510184 }, { "content": " fn from(this: $type_name) -> Mailbox {\n\n this.0\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! mailboxes_header {\n\n ($(#[$doc:meta])*($type_name: ident, $header_name: expr)) => {\n\n $(#[$doc])*\n\n #[derive(Debug, Clone, PartialEq)]\n\n pub struct $type_name(pub(crate) Mailboxes);\n\n\n\n impl MailboxesHeader for $type_name {\n\n fn join_mailboxes(&mut self, other: Self) {\n\n self.0.extend(other.0);\n\n }\n\n }\n\n\n\n impl Header for $type_name {\n", "file_path": "src/message/header/mailbox.rs", "rank": 64, "score": 36070.63883059619 }, { "content": " fn header_name() -> &'static str {\n\n $header_name\n\n }\n\n\n\n fn parse_header<'a, T>(raw: &'a T) -> HyperResult<$type_name>\n\n where\n\n T: RawLike<'a>,\n\n Self: Sized,\n\n {\n\n raw.one()\n\n .ok_or(HeaderError::Header)\n\n .and_then(parse_mailboxes)\n\n .map($type_name)\n\n }\n\n\n\n fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult {\n\n format_mailboxes(self.0.iter(), f)\n\n }\n\n }\n\n\n", "file_path": "src/message/header/mailbox.rs", "rank": 65, "score": 36068.883530015555 }, { "content": "use hyperx::{\n\n header::{Formatter as HeaderFormatter, Header, RawLike},\n\n Error as HeaderError, Result as HyperResult,\n\n};\n\nuse std::{\n\n fmt::{Display, Formatter as FmtFormatter, Result as FmtResult},\n\n str::{from_utf8, FromStr},\n\n};\n\n\n\n/// `Content-Transfer-Encoding` of the body\n\n///\n\n/// The `Message` builder takes care of choosing the most\n\n/// efficient encoding based on the chosen body, so in most\n\n/// use-caches this header shouldn't be set manually.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum ContentTransferEncoding {\n\n SevenBit,\n\n QuotedPrintable,\n\n Base64,\n\n // 8BITMIME\n", "file_path": "src/message/header/content.rs", "rank": 66, "score": 36068.79355328515 }, { "content": " .and_then(parse_mailboxes)\n\n .and_then(|mbs| {\n\n mbs.into_single().ok_or(HeaderError::Header)\n\n }).map($type_name)\n\n }\n\n\n\n fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult {\n\n f.fmt_line(&self.0.recode_name(utf8_b::encode))\n\n }\n\n }\n\n\n\n impl std::convert::From<Mailbox> for $type_name {\n\n #[inline]\n\n fn from(mailbox: Mailbox) -> Self {\n\n Self(mailbox)\n\n }\n\n }\n\n\n\n impl std::convert::From<$type_name> for Mailbox {\n\n #[inline]\n", "file_path": "src/message/header/mailbox.rs", "rank": 68, "score": 36066.91737503027 }, { "content": "}\n\n\n\nimpl FromStr for ContentTransferEncoding {\n\n type Err = String;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"7bit\" => Ok(Self::SevenBit),\n\n \"quoted-printable\" => Ok(Self::QuotedPrintable),\n\n \"base64\" => Ok(Self::Base64),\n\n \"8bit\" => Ok(Self::EightBit),\n\n \"binary\" => Ok(Self::Binary),\n\n _ => Err(s.into()),\n\n }\n\n }\n\n}\n\n\n\nimpl Header for ContentTransferEncoding {\n\n fn header_name() -> &'static str {\n\n \"Content-Transfer-Encoding\"\n\n }\n", "file_path": "src/message/header/content.rs", "rank": 69, "score": 36066.87379684017 }, { "content": "use crate::message::{\n\n mailbox::{Mailbox, Mailboxes},\n\n utf8_b,\n\n};\n\nuse hyperx::{\n\n header::{Formatter as HeaderFormatter, Header, RawLike},\n\n Error as HeaderError, Result as HyperResult,\n\n};\n\nuse std::{fmt::Result as FmtResult, slice::Iter, str::from_utf8};\n\n\n\n/// Header which can contains multiple mailboxes\n", "file_path": "src/message/header/mailbox.rs", "rank": 70, "score": 36065.69948142509 }, { "content": "\n\n // FIXME HeaderError->HeaderError, same for result\n\n fn parse_header<'a, T>(raw: &'a T) -> HyperResult<Self>\n\n where\n\n T: RawLike<'a>,\n\n Self: Sized,\n\n {\n\n raw.one()\n\n .ok_or(HeaderError::Header)\n\n .and_then(|r| from_utf8(r).map_err(|_| HeaderError::Header))\n\n .and_then(|s| {\n\n s.parse::<ContentTransferEncoding>()\n\n .map_err(|_| HeaderError::Header)\n\n })\n\n }\n\n\n\n fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult {\n\n f.fmt_line(&format!(\"{}\", self))\n\n }\n\n}\n", "file_path": "src/message/header/content.rs", "rank": 71, "score": 36064.9609002403 }, { "content": " }\n\n\n\n fn parse_header<'a, T>(raw: &'a T) -> HyperResult<Self>\n\n where\n\n T: RawLike<'a>,\n\n Self: Sized,\n\n {\n\n raw.one().ok_or(HeaderError::Header).and_then(|r| {\n\n let mut s = from_utf8(r).map_err(|_| HeaderError::Header)?.split('.');\n\n\n\n let major = s.next().ok_or(HeaderError::Header)?;\n\n let minor = s.next().ok_or(HeaderError::Header)?;\n\n let major = major.parse().map_err(|_| HeaderError::Header)?;\n\n let minor = minor.parse().map_err(|_| HeaderError::Header)?;\n\n Ok(MimeVersion::new(major, minor))\n\n })\n\n }\n\n\n\n fn fmt_header(&self, f: &mut HeaderFormatter<'_, '_>) -> FmtResult {\n\n f.fmt_line(&format!(\"{}.{}\", self.major, self.minor))\n", "file_path": "src/message/header/special.rs", "rank": 72, "score": 36063.65228020348 }, { "content": " impl std::convert::From<Mailboxes> for $type_name {\n\n #[inline]\n\n fn from(mailboxes: Mailboxes) -> Self {\n\n Self(mailboxes)\n\n }\n\n }\n\n\n\n impl std::convert::From<$type_name> for Mailboxes {\n\n #[inline]\n\n fn from(this: $type_name) -> Mailboxes {\n\n this.0\n\n }\n\n }\n\n };\n\n}\n\n\n\nmailbox_header! {\n\n /**\n\n\n\n `Sender` header\n", "file_path": "src/message/header/mailbox.rs", "rank": 73, "score": 36059.08266078293 }, { "content": " EightBit,\n\n Binary,\n\n}\n\n\n\nimpl Default for ContentTransferEncoding {\n\n fn default() -> Self {\n\n ContentTransferEncoding::Base64\n\n }\n\n}\n\n\n\nimpl Display for ContentTransferEncoding {\n\n fn fmt(&self, f: &mut FmtFormatter<'_>) -> FmtResult {\n\n f.write_str(match *self {\n\n Self::SevenBit => \"7bit\",\n\n Self::QuotedPrintable => \"quoted-printable\",\n\n Self::Base64 => \"base64\",\n\n Self::EightBit => \"8bit\",\n\n Self::Binary => \"binary\",\n\n })\n\n }\n", "file_path": "src/message/header/content.rs", "rank": 74, "score": 36058.42305299376 }, { "content": "//! Headers widely used in email messages\n\n\n\nmod content;\n\nmod mailbox;\n\nmod special;\n\nmod textual;\n\n\n\npub use self::{content::*, mailbox::*, special::*, textual::*};\n\n\n\npub use hyperx::header::{\n\n Charset, ContentDisposition, ContentLocation, ContentType, Date, DispositionParam,\n\n DispositionType, Header, Headers, HttpDate as EmailDate,\n\n};\n", "file_path": "src/message/header/mod.rs", "rank": 75, "score": 36057.21169156983 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{MimeVersion, MIME_VERSION_1_0};\n\n use hyperx::header::Headers;\n\n\n\n #[test]\n\n fn format_mime_version() {\n\n let mut headers = Headers::new();\n\n\n\n headers.set(MIME_VERSION_1_0);\n\n\n\n assert_eq!(format!(\"{}\", headers), \"MIME-Version: 1.0\\r\\n\");\n\n\n\n headers.set(MimeVersion::new(0, 1));\n\n\n\n assert_eq!(format!(\"{}\", headers), \"MIME-Version: 0.1\\r\\n\");\n\n }\n", "file_path": "src/message/header/special.rs", "rank": 76, "score": 36056.13322467881 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::ContentTransferEncoding;\n\n use hyperx::header::Headers;\n\n\n\n #[test]\n\n fn format_content_transfer_encoding() {\n\n let mut headers = Headers::new();\n\n\n\n headers.set(ContentTransferEncoding::SevenBit);\n\n\n\n assert_eq!(\n\n format!(\"{}\", headers),\n\n \"Content-Transfer-Encoding: 7bit\\r\\n\"\n\n );\n\n\n\n headers.set(ContentTransferEncoding::Base64);\n\n\n\n assert_eq!(\n", "file_path": "src/message/header/content.rs", "rank": 77, "score": 36055.879908945724 }, { "content": " }\n\n\n\n #[test]\n\n fn format_single_with_name() {\n\n let from = Mailboxes::new().with(\"K. <[email protected]>\".parse().unwrap());\n\n\n\n let mut headers = Headers::new();\n\n headers.set(From(from));\n\n\n\n assert_eq!(format!(\"{}\", headers), \"From: K. <[email protected]>\\r\\n\");\n\n }\n\n\n\n #[test]\n\n fn format_multi_without_name() {\n\n let from = Mailboxes::new()\n\n .with(\"[email protected]\".parse().unwrap())\n\n .with(\"[email protected]\".parse().unwrap());\n\n\n\n let mut headers = Headers::new();\n\n headers.set(From(from));\n", "file_path": "src/message/header/mailbox.rs", "rank": 81, "score": 36049.85520270367 }, { "content": " let mut headers = Headers::new();\n\n headers.set_raw(\"From\", \"[email protected]\");\n\n\n\n assert_eq!(headers.get::<From>(), Some(&From(from)));\n\n }\n\n\n\n #[test]\n\n fn parse_single_with_name() {\n\n let from = vec![\"K. <[email protected]>\".parse().unwrap()].into();\n\n\n\n let mut headers = Headers::new();\n\n headers.set_raw(\"From\", \"K. <[email protected]>\");\n\n\n\n assert_eq!(headers.get::<From>(), Some(&From(from)));\n\n }\n\n\n\n #[test]\n\n fn parse_multi_without_name() {\n\n let from: Vec<Mailbox> = vec![\n\n \"[email protected]\".parse().unwrap(),\n", "file_path": "src/message/header/mailbox.rs", "rank": 82, "score": 36049.828201841745 }, { "content": " \"[email protected]\".parse().unwrap(),\n\n ];\n\n\n\n let mut headers = Headers::new();\n\n headers.set_raw(\"From\", \"[email protected], [email protected]\");\n\n\n\n assert_eq!(headers.get::<From>(), Some(&From(from.into())));\n\n }\n\n\n\n #[test]\n\n fn parse_multi_with_name() {\n\n let from: Vec<Mailbox> = vec![\n\n \"K. <[email protected]>\".parse().unwrap(),\n\n \"Pony P. <[email protected]>\".parse().unwrap(),\n\n ];\n\n\n\n let mut headers = Headers::new();\n\n headers.set_raw(\"From\", \"K. <[email protected]>, Pony P. <[email protected]>\");\n\n\n\n assert_eq!(headers.get::<From>(), Some(&From(from.into())));\n", "file_path": "src/message/header/mailbox.rs", "rank": 83, "score": 36049.5699070762 }, { "content": "\n\n #[test]\n\n fn parse_mime_version() {\n\n let mut headers = Headers::new();\n\n\n\n headers.set_raw(\"MIME-Version\", \"1.0\");\n\n\n\n assert_eq!(headers.get::<MimeVersion>(), Some(&MIME_VERSION_1_0));\n\n\n\n headers.set_raw(\"MIME-Version\", \"0.1\");\n\n\n\n assert_eq!(headers.get::<MimeVersion>(), Some(&MimeVersion::new(0, 1)));\n\n }\n\n}\n", "file_path": "src/message/header/special.rs", "rank": 84, "score": 36049.251991250145 }, { "content": " }\n\n\n\n #[test]\n\n fn parse_single_with_utf8_name() {\n\n let from: Vec<Mailbox> = vec![\"Кайо <[email protected]>\".parse().unwrap()];\n\n\n\n let mut headers = Headers::new();\n\n headers.set_raw(\"From\", \"=?utf-8?b?0JrQsNC50L4=?= <[email protected]>\");\n\n\n\n assert_eq!(headers.get::<From>(), Some(&From(from.into())));\n\n }\n\n}\n", "file_path": "src/message/header/mailbox.rs", "rank": 85, "score": 36049.113414387204 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn format_single_with_utf8_name() {\n\n let from = vec![\"Кайо <[email protected]>\".parse().unwrap()];\n\n\n\n let mut headers = Headers::new();\n\n headers.set(From(from.into()));\n\n\n\n assert_eq!(\n\n format!(\"{}\", headers),\n\n \"From: =?utf-8?b?0JrQsNC50L4=?= <[email protected]>\\r\\n\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_single_without_name() {\n\n let from = vec![\"[email protected]\".parse().unwrap()].into();\n\n\n", "file_path": "src/message/header/mailbox.rs", "rank": 86, "score": 36048.74640783129 }, { "content": " format!(\"{}\", headers),\n\n \"Content-Transfer-Encoding: base64\\r\\n\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_content_transfer_encoding() {\n\n let mut headers = Headers::new();\n\n\n\n headers.set_raw(\"Content-Transfer-Encoding\", \"7bit\");\n\n\n\n assert_eq!(\n\n headers.get::<ContentTransferEncoding>(),\n\n Some(&ContentTransferEncoding::SevenBit)\n\n );\n\n\n\n headers.set_raw(\"Content-Transfer-Encoding\", \"base64\");\n\n\n\n assert_eq!(\n\n headers.get::<ContentTransferEncoding>(),\n\n Some(&ContentTransferEncoding::Base64)\n\n );\n\n }\n\n}\n", "file_path": "src/message/header/content.rs", "rank": 87, "score": 36048.74243239019 }, { "content": "\n\n assert_eq!(\n\n format!(\"{}\", headers),\n\n \"From: [email protected], [email protected]\\r\\n\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn format_multi_with_name() {\n\n let from = vec![\n\n \"K. <[email protected]>\".parse().unwrap(),\n\n \"Pony P. <[email protected]>\".parse().unwrap(),\n\n ];\n\n\n\n let mut headers = Headers::new();\n\n headers.set(From(from.into()));\n\n\n\n assert_eq!(\n\n format!(\"{}\", headers),\n\n \"From: K. <[email protected]>, Pony P. <[email protected]>\\r\\n\"\n", "file_path": "src/message/header/mailbox.rs", "rank": 88, "score": 36048.63427216864 }, { "content": "\n\nmailboxes_header! {\n\n /**\n\n\n\n `Reply-To` header\n\n\n\n This header contains [`Mailboxes`][self::Mailboxes].\n\n\n\n */\n\n (ReplyTo, \"Reply-To\")\n\n}\n\n\n\nmailboxes_header! {\n\n /**\n\n\n\n `To` header\n\n\n\n This header contains [`Mailboxes`][self::Mailboxes].\n\n\n\n */\n", "file_path": "src/message/header/mailbox.rs", "rank": 89, "score": 36046.345876120475 }, { "content": " (To, \"To\")\n\n}\n\n\n\nmailboxes_header! {\n\n /**\n\n\n\n `Cc` header\n\n\n\n This header contains [`Mailboxes`][self::Mailboxes].\n\n\n\n */\n\n (Cc, \"Cc\")\n\n}\n\n\n\nmailboxes_header! {\n\n /**\n\n\n\n `Bcc` header\n\n\n\n This header contains [`Mailboxes`][self::Mailboxes].\n\n\n\n */\n\n (Bcc, \"Bcc\")\n\n}\n\n\n", "file_path": "src/message/header/mailbox.rs", "rank": 90, "score": 36046.302717288 }, { "content": "\n\n This header contains [`Mailbox`][self::Mailbox] associated with sender.\n\n\n\n ```no_test\n\n header::Sender(\"Mr. Sender <[email protected]>\".parse().unwrap())\n\n ```\n\n */\n\n (Sender, \"Sender\")\n\n}\n\n\n\nmailboxes_header! {\n\n /**\n\n\n\n `From` header\n\n\n\n This header contains [`Mailboxes`][self::Mailboxes].\n\n\n\n */\n\n (From, \"From\")\n\n}\n", "file_path": "src/message/header/mailbox.rs", "rank": 91, "score": 36046.11609211583 }, { "content": " /// let address = Address::new(\"example\", \"email.com\")?;\n\n /// let mailbox = Mailbox::new(None, address);\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn new(name: Option<String>, email: Address) -> Self {\n\n Mailbox { name, email }\n\n }\n\n\n\n /// Encode addressee name using function\n\n pub(crate) fn recode_name<F>(&self, f: F) -> Self\n\n where\n\n F: FnOnce(&str) -> String,\n\n {\n\n Mailbox::new(self.name.clone().map(|s| f(&s)), self.email.clone())\n\n }\n\n}\n\n\n\nimpl Display for Mailbox {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n", "file_path": "src/message/mailbox/types.rs", "rank": 93, "score": 35.92789615612592 }, { "content": "//! Utils for string manipulation\n\n\n\nuse std::fmt::{Display, Formatter, Result as FmtResult};\n\n\n\n/// Encode a string as xtext\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub struct XText<'a>(pub &'a str);\n\n\n\nimpl<'a> Display for XText<'a> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n let mut rest = self.0;\n\n while let Some(idx) = rest.find(|c| c < '!' || c == '+' || c == '=') {\n\n let (start, end) = rest.split_at(idx);\n\n f.write_str(start)?;\n\n\n\n let mut end_iter = end.char_indices();\n\n let (_, c) = end_iter.next().expect(\"char\");\n\n write!(f, \"+{:X}\", c as u8)?;\n\n\n", "file_path": "src/transport/smtp/util.rs", "rank": 94, "score": 29.33003313732365 }, { "content": "\n\n pub fn builder(domain: String) -> TlsParametersBuilder {\n\n TlsParametersBuilder::new(domain)\n\n }\n\n\n\n /// Creates a new `TlsParameters` using native-tls\n\n #[cfg(feature = \"native-tls\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"native-tls\")))]\n\n pub fn new_native(domain: String) -> Result<Self, Error> {\n\n TlsParametersBuilder::new(domain).build_native()\n\n }\n\n\n\n /// Creates a new `TlsParameters` using rustls\n\n #[cfg(feature = \"rustls-tls\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"rustls-tls\")))]\n\n pub fn new_rustls(domain: String) -> Result<Self, Error> {\n\n TlsParametersBuilder::new(domain).build_rustls()\n\n }\n\n\n\n pub fn domain(&self) -> &str {\n", "file_path": "src/transport/smtp/client/tls.rs", "rank": 95, "score": 26.747391166903213 }, { "content": " self.reverse_path\n\n .iter()\n\n .chain(self.forward_path.iter())\n\n .any(|a| !a.is_ascii())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"builder\")]\n\nimpl TryFrom<&Headers> for Envelope {\n\n type Error = Error;\n\n\n\n fn try_from(headers: &Headers) -> Result<Self, Self::Error> {\n\n let from = match headers.get::<header::Sender>() {\n\n // If there is a Sender, use it\n\n Some(sender) => Some(Mailbox::from(sender.clone()).email),\n\n // ... else try From\n\n None => match headers.get::<header::From>() {\n\n Some(header::From(a)) => {\n\n let from: Vec<Mailbox> = a.clone().into();\n\n if from.len() > 1 {\n", "file_path": "src/address/envelope.rs", "rank": 96, "score": 26.45523307093175 }, { "content": "\n\n impl<'de> Visitor<'de> for MailboxVisitor {\n\n type Value = Mailbox;\n\n\n\n fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {\n\n formatter.write_str(\"mailbox string or object\")\n\n }\n\n\n\n fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>\n\n where\n\n E: DeError,\n\n {\n\n s.parse().map_err(DeError::custom)\n\n }\n\n\n\n fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>\n\n where\n\n V: MapAccess<'de>,\n\n {\n\n let mut name = None;\n", "file_path": "src/message/mailbox/serde.rs", "rank": 97, "score": 26.241286747812925 }, { "content": "#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub struct Rset;\n\n\n\nimpl Display for Rset {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"RSET\\r\\n\")\n\n }\n\n}\n\n\n\n/// AUTH command\n\n#[derive(PartialEq, Clone, Debug)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub struct Auth {\n\n mechanism: Mechanism,\n\n credentials: Credentials,\n\n challenge: Option<String>,\n\n response: Option<String>,\n\n}\n\n\n\nimpl Display for Auth {\n", "file_path": "src/transport/smtp/commands.rs", "rank": 98, "score": 26.136724348693928 }, { "content": " pub fn multipart(mut self, part: MultiPart) -> Self {\n\n self.parts.push(Part::Multi(part));\n\n self\n\n }\n\n\n\n /// Get the boundary of multipart contents\n\n pub fn boundary(&self) -> String {\n\n let content_type = &self.headers.get::<ContentType>().unwrap().0;\n\n content_type.get_param(\"boundary\").unwrap().as_str().into()\n\n }\n\n\n\n /// Get the headers from the multipart\n\n pub fn headers(&self) -> &Headers {\n\n &self.headers\n\n }\n\n\n\n /// Get a mutable reference to the headers\n\n pub fn headers_mut(&mut self) -> &mut Headers {\n\n &mut self.headers\n\n }\n", "file_path": "src/message/mimebody.rs", "rank": 99, "score": 26.120784171124015 } ]
Rust
src/io/pts.rs
I3ck/rust-3d
5139ab5ebdab7ad2a1f49422bc852d751fbfc4c1
/* Copyright 2020 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use crate::*; use std::{ fmt, io::{BufRead, Error as ioError}, iter::FusedIterator, marker::PhantomData, }; use super::{types::*, utils::*}; pub struct PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { read: R, is_done: bool, i_line: usize, line_buffer: Vec<u8>, n_vertices: Option<usize>, n_vertices_added: usize, phantom_p: PhantomData<P>, } impl<P, R> PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { pub fn new(read: R) -> Self { Self { read, is_done: false, i_line: 0, line_buffer: Vec::new(), n_vertices: None, n_vertices_added: 0, phantom_p: PhantomData, } } #[inline(always)] pub fn fetch_one(line: &[u8]) -> PtsResult<P> { let mut words = to_words_skip_empty(line); let x = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; let y = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; let z = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; Ok(P::new(x, y, z)) } } impl<P, R> Iterator for PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { type Item = PtsIOResult<DataReserve<P>>; #[inline(always)] fn next(&mut self) -> Option<Self::Item> { if self.is_done { return None; } while let Ok(line) = fetch_line(&mut self.read, &mut self.line_buffer) { self.i_line += 1; if line.is_empty() { continue; } match self.n_vertices { None => { let mut words = to_words_skip_empty(line); self.n_vertices = match words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::VertexCount) .line(self.i_line, line) { Ok(n) => Some(n), Err(e) => { self.is_done = true; return Some(Err(e)); } }; return Some(Ok(DataReserve::Reserve(self.n_vertices.unwrap()))); } Some(n) => { if self.n_vertices_added < n { self.n_vertices_added += 1; return Some( Self::fetch_one(line) .map(|x| DataReserve::Data(x)) .line(self.i_line, line) .map_err(|e| { self.is_done = true; e }), ); } else { self.n_vertices_added = 0; let mut words = to_words_skip_empty(line); self.n_vertices = match words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::VertexCount) .line(self.i_line, line) { Ok(n) => Some(n), Err(e) => { self.is_done = true; return Some(Err(e)); } }; return Some(Ok(DataReserve::Reserve(self.n_vertices.unwrap()))); } } } } self.is_done = true; None } } impl<P, R> FusedIterator for PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { } pub fn load_pts<IP, P, R>(read: R, ip: &mut IP) -> PtsIOResult<()> where IP: IsPushable<P>, P: IsBuildable3D, R: BufRead, { let iterator = PtsIterator::new(read); for rd in iterator { match rd? { DataReserve::Reserve(x) => ip.reserve(x), DataReserve::Data(x) => ip.push(x), } } Ok(()) } pub enum PtsError { AccessFile, VertexCount, Vertex, } pub type PtsIOResult<T> = IOResult<T, PtsError>; type PtsResult<T> = std::result::Result<T, PtsError>; impl fmt::Debug for PtsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::AccessFile => write!(f, "Unable to access file"), Self::VertexCount => write!(f, "Unable to parse vertex count"), Self::Vertex => write!(f, "Unable to parse vertex"), } } } impl fmt::Display for PtsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self) } } impl From<ioError> for PtsError { fn from(_error: ioError) -> Self { PtsError::AccessFile } }
/* Copyright 2020 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use crate::*; use std::{ fmt, io::{BufRead, Error as ioError}, iter::FusedIterator, marker::PhantomData, }; use super::{types::*, utils::*}; pub struct PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { read: R, is_done: bool, i_line: usize, line_buffer: Vec<u8>, n_vertices: Option<usize>, n_vertices_added: usize, phantom_p: PhantomData<P>, } impl<P, R> PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { pub fn new(read: R) -> Self { Self { read, is_done: false, i_line: 0, line_buffer: Vec::new(), n_vertices: None, n_vertices_added: 0, phantom_p: PhantomData, } } #[inline(always)] pub fn fetch_one(line: &[u8]) -> PtsResult<P> { let mut words = to_words_skip_empty(line); let x = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; let y = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; let z = words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::Vertex)?; Ok(P::new(x, y, z)) } } impl<P, R> Iterator for PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { type Item = PtsIOResult<DataReserve<P>>; #[inline(always)] fn next(&mut self) -> Option<Self::Item> { if self.is_done { return None; } while let Ok(line) = fetch_line(&mut self.read, &mut self.line_buffer) { self.i_line += 1; if line.is_empty() { continue; } match self.n_vertices { None => { let mut words = to_words_skip_empty(line); self.n_vertices = match words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::VertexCount) .line(self.i_line, line) { Ok(n) => Some(n), Err(e) => { self.is_done = true; return Some(Err(e)); } }; return Some(Ok(DataReserve::Reserve(self.n_vertices.unwrap()))); } Some(n) => { if self.n_vertices_added < n { self.n_vertices_added += 1; return Some( Self::fetch_one(line) .map(|x| DataReserve::Data(x)) .line(self.i_line, line) .map_err(|e| { self.is_done = true; e }), ); } else { self.n_vertices_added = 0; let mut words = to_words_skip_empty(line); self.n_vertices = match words .next() .and_then(|word| from_ascii(word)) .ok_or(PtsError::VertexCount) .line(self.i_line, line) { Ok(n) => Some(n), Err(e) => { self.is_done = true; return Some(Err(e)); } }; return Some(Ok(DataReserve::Reserve(self.n_vertices.unwrap()))); } } } } self.is_done = true; None } } impl<P, R> FusedIterator for PtsIterator<P, R> where P: IsBuildable3D, R: BufRead, { } pub fn load_pts<IP, P, R>(read: R, ip: &mut IP) -> PtsIOResult<()> where IP: IsPushable<P>, P: IsBuildable3D, R: BufRead, { let iterator = PtsIterator::new(read); for rd in iterator { match rd? { DataReserve::Reserve(x) => ip.reserve(x), DataReserve::Data(x) => ip.push(x), } } Ok(()) } pub enum PtsError { AccessFile, VertexCount, Vertex, } pub type PtsIOResult<T> = IOResult<T, PtsError>; type PtsResult<T> = std::result::Result<T, PtsError>; impl fmt::Debug for PtsError {
} impl fmt::Display for PtsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self) } } impl From<ioError> for PtsError { fn from(_error: ioError) -> Self { PtsError::AccessFile } }
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::AccessFile => write!(f, "Unable to access file"), Self::VertexCount => write!(f, "Unable to parse vertex count"), Self::Vertex => write!(f, "Unable to parse vertex"), } }
function_block-full_function
[ { "content": "/// Splits an ASCII line into its words, skipping empty elements\n\npub fn to_words_skip_empty(line: &[u8]) -> impl Iterator<Item = &[u8]> {\n\n line.split(|x| *x == b' ' || *x == b'\\t').skip_empty()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 0, "score": 505942.53733688314 }, { "content": "#[inline(always)]\n\npub fn fetch_line<'a, R>(read: &mut R, line_buffer: &'a mut Vec<u8>) -> FetchLineResult<&'a [u8]>\n\nwhere\n\n R: BufRead,\n\n{\n\n line_buffer.clear();\n\n let n_read = read.read_until(b'\\n', line_buffer)?;\n\n if n_read == 0 {\n\n return Err(FetchLineError);\n\n }\n\n\n\n // We must drop the '\\n' we read_until for sure\n\n // And might also have to drop additional whitespace\n\n let mut ignore_end = 1;\n\n for i in 1..line_buffer.len() {\n\n if (line_buffer[line_buffer.len() - i - 1] as char).is_whitespace() {\n\n ignore_end += 1;\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/io/utils.rs", "rank": 2, "score": 454910.1478119269 }, { "content": "/// Loads IsPushable<Is3D> from the .off file format\n\npub fn load_off_points<IP, P, R>(read: R, ip: &mut IP) -> OffIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = OffPointsIterator::new(read);\n\n\n\n for rd in iterator {\n\n match rd? {\n\n DataReserve::Reserve(x) => ip.reserve(x),\n\n DataReserve::Data(x) => ip.push(x),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/off.rs", "rank": 3, "score": 453295.99670701846 }, { "content": "/// Loads a IsPushable<Is3D> as x y z coordinates from .psl files\n\npub fn load_psl<IP, P, R>(read: R, ip: &mut IP) -> PslResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: Read,\n\n{\n\n let iterator = PslIterator::new(read);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/psl.rs", "rank": 4, "score": 448367.719197443 }, { "content": "/// Loads a IsPushable<Is2D> as x y coordinates. E.g. used to load the .xy file format or .csv files\n\npub fn load_xy<IP, P, R>(read: R, ip: &mut IP) -> XyIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable2D,\n\n R: BufRead,\n\n{\n\n let iterator = XyIterator::new(read);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .xy file operations\n\npub enum XyError {\n\n EstimateDelimiter,\n", "file_path": "src/io/xy.rs", "rank": 5, "score": 443637.6950092979 }, { "content": "/// Loads a IsPushable<Is3D> as x y z coordinates. E.g. used to load the .xyz file format or .csv file\n\npub fn load_xyz<IP, P, R>(read: R, ip: &mut IP) -> XyzIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = XyzIterator::new(read);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .xyz file operations\n\npub enum XyzError {\n\n EstimateDelimiter,\n", "file_path": "src/io/xyz.rs", "rank": 6, "score": 443637.6950092979 }, { "content": "/// Loads points from .ptx file into IsPushable<Is3D>\n\npub fn load_ptx<IP, P, R>(read: R, ip: &mut IP) -> PtxIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D + IsMatrix4Transformable,\n\n R: BufRead,\n\n{\n\n let iterator = PtxIterator::new(read);\n\n\n\n for rd in iterator {\n\n match rd? {\n\n DataReserve::Reserve(x) => ip.reserve(x),\n\n DataReserve::Data(x) => ip.push(x),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ptx.rs", "rank": 7, "score": 443627.4240706493 }, { "content": "/// Loads points from .las file into IsPushable<IsBuildable3D>\n\npub fn load_las<IP, P, R>(read: R, ip: &mut IP) -> LasResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead + Seek,\n\n{\n\n let iterator = LasIterator::new(read)?;\n\n\n\n for rd in iterator {\n\n match rd? {\n\n DataReserve::Reserve(x) => ip.reserve(x),\n\n DataReserve::Data(x) => ip.push(x),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/las/load.rs", "rank": 8, "score": 443627.4240706494 }, { "content": "/// Loads a IsPushable<Is3D> as x y z coordinates from gcode\n\npub fn load_gcode_points<IP, P, R>(read: R, ip: &mut IP) -> GcodeResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = GcodeIterator::new(read);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/gcode.rs", "rank": 9, "score": 443622.2750499957 }, { "content": "/// Loads IsPushable<Is3D> from the .obj file format\n\npub fn load_obj_points<IP, P, R>(read: R, ip: &mut IP) -> ObjIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = ObjPointsIterator::new(read);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .obj file operations\n\npub enum ObjError {\n\n AccessFile,\n\n InvalidMeshIndices,\n\n Face,\n\n Vertex,\n\n}\n\n\n\n/// Result type for .obj file operations\n\npub type ObjIOResult<T> = IOResult<T, ObjError>;\n", "file_path": "src/io/obj.rs", "rank": 10, "score": 439064.13382031594 }, { "content": "/// Loads the points from the .ply file into IsPushable<Is3D>\n\npub fn load_ply_points<IP, P, R>(read: R, ip: &mut IP) -> PlyIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = PlyPointsIterator::new(read)?;\n\n\n\n for rp in iterator {\n\n match rp? {\n\n DataReserve::Reserve(x) => ip.reserve(x),\n\n DataReserve::Data(x) => ip.push(x),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n//------------------------------------------------------------------------------\n\n//------------------------------------------------------------------------------\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 11, "score": 434667.7622542421 }, { "content": "#[inline(always)]\n\npub fn skip_bytes<R>(read: &mut R, n_bytes: usize) -> std::io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buffer = [0u8; 1];\n\n for _ in 0..n_bytes {\n\n let _ = read.read_exact(&mut buffer)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Skip number of elements\n", "file_path": "src/io/utils.rs", "rank": 12, "score": 432402.12427900016 }, { "content": "#[inline(always)]\n\npub fn read_vertex_type<BR, R>(read: &mut R, t: VertexType) -> PlyResult<f64>\n\nwhere\n\n BR: IsByteReader,\n\n R: Read,\n\n{\n\n Ok(match t {\n\n VertexType::Float => BR::read_f32(read)? as f64,\n\n VertexType::Double => BR::read_f64(read)?,\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 13, "score": 426942.33833343704 }, { "content": "#[inline(always)]\n\npub fn read_face_type<BR, R>(read: &mut R, t: FaceType) -> PlyResult<usize>\n\nwhere\n\n BR: IsByteReader,\n\n R: Read,\n\n{\n\n Ok(match t {\n\n FaceType::Char => BR::read_i8(read)? as usize,\n\n FaceType::UChar => BR::read_u8(read)? as usize,\n\n FaceType::Short => BR::read_i16(read)? as usize,\n\n FaceType::UShort => BR::read_u16(read)? as usize,\n\n FaceType::Int => BR::read_i32(read)? as usize,\n\n FaceType::UInt => BR::read_u32(read)? as usize,\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 14, "score": 414742.85543160856 }, { "content": "/// Loads an IsMesh3D from the off file format\n\npub fn load_off_mesh<EM, P, R>(read: R, mesh: &mut EM) -> OffIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n{\n\n let iterator = OffMeshIterator::<P, R>::new(read);\n\n\n\n for rd in iterator {\n\n match rd? {\n\n FaceDataReserve::Face([a, b, c]) => {\n\n mesh.try_add_connection(VId(a), VId(b), VId(c))\n\n .map_err(|_| OffError::InvalidMeshIndices)\n\n .simple()?;\n\n }\n\n FaceDataReserve::ReserveDataFaces(n_vertices, n_faces) => {\n\n mesh.reserve_vertices(n_vertices);\n\n mesh.reserve_faces(n_faces);\n\n }\n\n FaceDataReserve::Data(x) => {\n\n mesh.add_vertex(x);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/off.rs", "rank": 15, "score": 361905.75759236934 }, { "content": "fn read_stl_vertex<P>(line: &[u8]) -> Option<P>\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n let mut words = to_words_skip_empty(line);\n\n\n\n // skip \"vertex\"\n\n words.next()?;\n\n\n\n let x = from_ascii(words.next()?)?;\n\n let y = from_ascii(words.next()?)?;\n\n let z = from_ascii(words.next()?)?;\n\n\n\n Some(P::new(x, y, z))\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/stl.rs", "rank": 16, "score": 356367.1531659133 }, { "content": "/// Loads an IsMesh3D from the .obj file format\n\npub fn load_obj_mesh<EM, P, R>(read: R, mesh: &mut EM) -> ObjIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n{\n\n let iterator = ObjMeshIterator::new(read);\n\n\n\n for rd in iterator {\n\n match rd? {\n\n FaceData::Data(x) => {\n\n mesh.add_vertex(x);\n\n }\n\n FaceData::Face([a, b, c]) => {\n\n mesh.try_add_connection(VId(a), VId(b), VId(c))\n\n .map_err(|_| ObjError::InvalidMeshIndices)\n\n .simple()?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/obj.rs", "rank": 17, "score": 349388.85670455854 }, { "content": "#[inline(always)]\n\nfn fetch_point<R, P>(read: &mut R) -> PslResult<P>\n\nwhere\n\n R: Read,\n\n P: IsBuildable3D,\n\n{\n\n let x = LittleReader::read_f32(read)?;\n\n let y = LittleReader::read_f32(read)?;\n\n let z = LittleReader::read_f32(read)?;\n\n\n\n Ok(P::new(x as f64, y as f64, z as f64))\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .psl file operations\n\npub enum PslError {\n\n AccessFile,\n\n}\n\n\n\n/// Result type for .psl file operations\n", "file_path": "src/io/psl.rs", "rank": 18, "score": 348864.1607604684 }, { "content": "/// Loads an IsMesh3D from the .ply file format\n\npub fn load_ply_mesh<EM, P, R>(read: R, mesh: &mut EM) -> PlyIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = PlyMeshIterator::new(read)?;\n\n\n\n for fr in iterator {\n\n match fr? {\n\n FaceDataReserve::Data(p) => {\n\n mesh.add_vertex(p);\n\n }\n\n FaceDataReserve::ReserveDataFaces(n_d, n_f) => {\n\n mesh.reserve_vertices(n_d);\n\n mesh.reserve_faces(n_f);\n\n }\n\n FaceDataReserve::Face([a, b, c]) => {\n\n mesh.try_add_connection(VId(a), VId(b), VId(c))\n\n .or(Err(PlyError::InvalidMeshIndices))\n\n .simple()?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 19, "score": 345539.43552791886 }, { "content": "/// Estimates the used delimiter within a string\n\npub fn estimate_delimiter(minimum_count: usize, line: &[u8]) -> Option<u8> {\n\n for candidate in [b' ', b';', b',', b'\\t'].iter() {\n\n if line.iter().filter(|c| **c == *candidate).count() >= minimum_count {\n\n return Some(*candidate);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 20, "score": 340331.5190633712 }, { "content": "#[inline(always)]\n\npub fn collect_index_line(line: &[u8]) -> Option<[usize; 3]> {\n\n let mut words = to_words_skip_empty(line);\n\n if words.next()? != b\"3\" {\n\n return None;\n\n }\n\n\n\n let a = from_ascii(words.next()?)?;\n\n let b = from_ascii(words.next()?)?;\n\n let c = from_ascii(words.next()?)?;\n\n\n\n Some([a, b, c])\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/utils.rs", "rank": 21, "score": 335237.2262327421 }, { "content": "#[inline(always)]\n\nfn fetch_pass_header_return_n_lines<R>(read: &mut R) -> PslResult<i32>\n\nwhere\n\n R: Read,\n\n{\n\n let n_lines = LittleReader::read_i32(read)?;\n\n let _scanner_id = LittleReader::read_i32(read)?;\n\n\n\n // reserved 14*i32\n\n {\n\n let mut buffer = [0u8; 56];\n\n read.read_exact(&mut buffer)?;\n\n }\n\n\n\n Ok(n_lines)\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/psl.rs", "rank": 22, "score": 333605.15988392953 }, { "content": "#[inline(always)]\n\nfn fetch_line_header_return_n_points<R>(read: &mut R) -> PslResult<i32>\n\nwhere\n\n R: Read,\n\n{\n\n let n_points = LittleReader::read_i32(read)?;\n\n\n\n // ijk 3*f32\n\n {\n\n let mut buffer = [0u8; 12];\n\n read.read_exact(&mut buffer)?;\n\n }\n\n\n\n // reserved 12*i32\n\n {\n\n let mut buffer = [0u8; 48];\n\n read.read_exact(&mut buffer)?;\n\n }\n\n\n\n Ok(n_points)\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/psl.rs", "rank": 23, "score": 333605.15988392953 }, { "content": "#[inline(always)]\n\nfn fetch_vertex<P>(line: &[u8]) -> OffResult<P>\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n let mut words = to_words_skip_empty(line);\n\n\n\n let x = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::Vertex)?;\n\n\n\n let y = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::Vertex)?;\n\n\n\n let z = words\n\n .next()\n\n .and_then(|word| from_ascii(word))\n\n .ok_or(OffError::Vertex)?;\n\n\n\n Ok(P::new(x, y, z))\n\n}\n", "file_path": "src/io/off.rs", "rank": 24, "score": 328141.05738992675 }, { "content": "/// Loads points from .stl file as triplets into IsPushable<IsBuildable3D>\n\npub fn load_stl_triplets<IP, P, R, IPN>(\n\n read: R,\n\n format: StlFormat,\n\n ip: &mut IP,\n\n face_normals: &mut IPN,\n\n) -> StlIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n IPN: IsPushable<P>,\n\n{\n\n let iterator = StlIterator::new(read, format)?;\n\n\n\n for fr in iterator {\n\n match fr? {\n\n DataReserve::Reserve(n) => {\n\n ip.reserve(3 * n);\n\n face_normals.reserve(n);\n\n }\n", "file_path": "src/io/stl.rs", "rank": 25, "score": 322708.06984125916 }, { "content": "fn is_ascii<R>(read: &mut R, format: StlFormat) -> StlResult<bool>\n\nwhere\n\n R: BufRead,\n\n{\n\n let solid = \"solid\".as_bytes();\n\n let mut buffer = [0u8; 5];\n\n\n\n let mut result = true;\n\n read.read_exact(&mut buffer)?;\n\n\n\n for i in 0..5 {\n\n if buffer[i] != solid[i] {\n\n result = false\n\n }\n\n }\n\n\n\n // It is important to always consume the bytes above, even if format defines the result\n\n Ok(match format {\n\n StlFormat::Ascii => true,\n\n StlFormat::Binary => false,\n\n StlFormat::Auto => result,\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/stl.rs", "rank": 26, "score": 322401.16152213514 }, { "content": "/// Returns all until delimiter\n\npub fn until_bytes<'a>(line: &'a [u8], delimiter: u8) -> &'a [u8] {\n\n line.split(|x| *x == delimiter).next().unwrap_or(&[])\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 27, "score": 320378.7848063078 }, { "content": "#[inline(always)]\n\nfn fetch_vertex<P>(line: &[u8]) -> ObjResult<P>\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n let mut words = to_words_skip_empty(line);\n\n\n\n // skip \"v\"\n\n words.next().ok_or(ObjError::Vertex)?;\n\n\n\n let x = words\n\n .next()\n\n .and_then(|w| from_ascii(w))\n\n .ok_or(ObjError::Vertex)?;\n\n\n\n let y = words\n\n .next()\n\n .and_then(|w| from_ascii(w))\n\n .ok_or(ObjError::Vertex)?;\n\n\n\n let z = words\n\n .next()\n\n .and_then(|w| from_ascii(w))\n\n .ok_or(ObjError::Vertex)?;\n\n\n\n Ok(P::new(x, y, z))\n\n}\n\n\n", "file_path": "src/io/obj.rs", "rank": 28, "score": 318919.3596079021 }, { "content": "fn read_stl_normal<P>(line: &[u8]) -> Option<P>\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n let mut words = to_words_skip_empty(line);\n\n\n\n // skip \"facet\"\n\n words.next()?;\n\n\n\n // skip \"normal\"\n\n words.next()?;\n\n\n\n let i = from_ascii(words.next()?)?;\n\n let j = from_ascii(words.next()?)?;\n\n let k = from_ascii(words.next()?)?;\n\n\n\n Some(P::new(i, j, k))\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/stl.rs", "rank": 29, "score": 318829.91596483224 }, { "content": "/// Loads an IsMesh3D from the .ply file format if possible, otherwise tries loading point data. Returning which of the two was possible\n\npub fn load_ply_either<EM, IP, P, R>(\n\n mut read: R,\n\n mesh: &mut EM,\n\n ip: &mut IP,\n\n) -> PlyIOResult<MeshOrPoints>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let mut line_buffer = Vec::new();\n\n let mut i_line = 0;\n\n\n\n match load_header(&mut read, &mut line_buffer, &mut i_line)? {\n\n Header::Full(header) => {\n\n mesh.reserve_vertices(header.vertex.count);\n\n mesh.reserve_faces(header.face.count);\n\n\n\n match header.format {\n", "file_path": "src/io/ply/load.rs", "rank": 30, "score": 318308.2387814213 }, { "content": "#[inline(always)]\n\npub fn skip_n<I>(i: &mut I, n: usize)\n\nwhere\n\n I: Iterator,\n\n{\n\n for _ in 0..n {\n\n i.next();\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/utils.rs", "rank": 31, "score": 306697.36858314165 }, { "content": "#[inline(always)]\n\nfn fetch_header_return_n_passes<R>(read: &mut R) -> PslResult<i32>\n\nwhere\n\n R: Read,\n\n{\n\n // header\n\n {\n\n let mut buffer = [0u8; 4];\n\n read.read_exact(&mut buffer)?;\n\n }\n\n\n\n let _version = LittleReader::read_i32(read)?;\n\n\n\n // comments\n\n {\n\n let mut buffer = [0u8; 128];\n\n read.read_exact(&mut buffer)?;\n\n }\n\n\n\n let n_passes = LittleReader::read_i32(read)?;\n\n\n", "file_path": "src/io/psl.rs", "rank": 32, "score": 303265.4457650775 }, { "content": "fn load_mesh_binary<BR, EM, P, R>(read: &mut R, mesh: &mut EM, header: FullHeader) -> PlyResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n R: Read,\n\n BR: IsByteReader,\n\n{\n\n let iterator = PlyBinaryMeshIterator::<BR, _, _>::new(read, header);\n\n\n\n for fd in iterator {\n\n match fd? {\n\n io::types::FaceData::Data(p) => {\n\n mesh.add_vertex(p);\n\n }\n\n io::types::FaceData::Face([a, b, c]) => {\n\n mesh.try_add_connection(VId(a), VId(b), VId(c))\n\n .or(Err(PlyError::InvalidMeshIndices))?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 33, "score": 295703.7619093994 }, { "content": "#[inline(always)]\n\nfn read_stl_triangle<R>(read: &mut R) -> StlResult<StlTriangle>\n\nwhere\n\n R: Read,\n\n{\n\n // size for StlTriangle + u16 garbage\n\n let mut buffer = [0u8; 50];\n\n read.read_exact(&mut buffer)?;\n\n\n\n Ok(StlTriangle {\n\n n: array_from_bytes_le!(f32, 3, &buffer[0..12])?,\n\n x: array_from_bytes_le!(f32, 3, &buffer[12..24])?,\n\n y: array_from_bytes_le!(f32, 3, &buffer[24..36])?,\n\n z: array_from_bytes_le!(f32, 3, &buffer[36..48])?,\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/stl.rs", "rank": 34, "score": 280181.54473207967 }, { "content": "fn load_points_ascii<IP, P, R>(\n\n read: &mut R,\n\n ip: &mut IP,\n\n header: PartialHeader,\n\n i_line: usize,\n\n) -> PlyIOResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = PlyAsciiPointsIterator::new(read, header, i_line);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 35, "score": 274882.39578801487 }, { "content": "/// Helper function to sort a Vec of Is3D by y\n\npub fn sort_vec_3d_y<P>(xs: &mut Vec<P>)\n\nwhere\n\n P: Is3D,\n\n{\n\n xs.sort_by(|a, b| {\n\n a.y()\n\n .partial_cmp(&b.y())\n\n .or_else(|| a.z().partial_cmp(&b.z()))\n\n .or_else(|| a.x().partial_cmp(&b.x()))\n\n .unwrap_or(Ordering::Equal)\n\n });\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 36, "score": 274208.9240347279 }, { "content": "/// Helper function to sort a Vec of Is2D by x\n\npub fn sort_vec_2d_x<P>(xs: &mut Vec<P>)\n\nwhere\n\n P: Is2D,\n\n{\n\n xs.sort_by(|a, b| {\n\n a.x()\n\n .partial_cmp(&b.x())\n\n .or_else(|| a.y().partial_cmp(&b.y()))\n\n .unwrap_or(Ordering::Equal)\n\n });\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 37, "score": 274208.9240347279 }, { "content": "/// Helper function to sort a Vec of Is3D by x\n\npub fn sort_vec_3d_x<P>(xs: &mut Vec<P>)\n\nwhere\n\n P: Is3D,\n\n{\n\n xs.sort_by(|a, b| {\n\n a.x()\n\n .partial_cmp(&b.x())\n\n .or_else(|| a.y().partial_cmp(&b.y()))\n\n .or_else(|| a.z().partial_cmp(&b.z()))\n\n .unwrap_or(Ordering::Equal)\n\n });\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 38, "score": 274208.9240347279 }, { "content": "/// Helper function to sort a Vec of Is2D by y\n\npub fn sort_vec_2d_y<P>(xs: &mut Vec<P>)\n\nwhere\n\n P: Is2D,\n\n{\n\n xs.sort_by(|a, b| {\n\n a.y()\n\n .partial_cmp(&b.y())\n\n .or_else(|| a.x().partial_cmp(&b.x()))\n\n .unwrap_or(Ordering::Equal)\n\n });\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 39, "score": 274208.9240347279 }, { "content": "/// Helper function to sort a Vec of Is3D by z\n\npub fn sort_vec_3d_z<P>(xs: &mut Vec<P>)\n\nwhere\n\n P: Is3D,\n\n{\n\n xs.sort_by(|a, b| {\n\n a.z()\n\n .partial_cmp(&b.z())\n\n .or_else(|| a.x().partial_cmp(&b.x()))\n\n .or_else(|| a.y().partial_cmp(&b.y()))\n\n .unwrap_or(Ordering::Equal)\n\n });\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 40, "score": 274208.9240347279 }, { "content": "fn load_header<R>(mut read: R) -> LasResult<HeaderRaw>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buffer = [0u8; 375];\n\n read.read_exact(&mut buffer)?;\n\n\n\n Ok(HeaderRaw {\n\n file_signature: array_from_bytes_le!(u8, 4, &buffer[0..4])?, //4 4\n\n file_source_id: u16::from_le_bytes(buffer[4..6].try_into()?), //2 6\n\n global_encoding: u16::from_le_bytes(buffer[6..8].try_into()?), //2 8\n\n guid1: u32::from_le_bytes(buffer[8..12].try_into()?), //4 12\n\n guid2: u16::from_le_bytes(buffer[12..14].try_into()?), //2 14\n\n guid3: u16::from_le_bytes(buffer[14..16].try_into()?), //2 16\n\n guid4: buffer[16..24].try_into()?, //8 24\n\n version_major: u8::from_le_bytes(buffer[24..25].try_into()?), //1 25\n\n version_minor: u8::from_le_bytes(buffer[25..26].try_into()?), //1 26\n\n system_identifier: array_from_bytes_le!(u8, 32, &buffer[26..58])?, //32 58\n\n generating_software: array_from_bytes_le!(u8, 32, &buffer[58..90])?, //32 90\n\n file_creation_day: u16::from_le_bytes(buffer[90..92].try_into()?), //2 92\n", "file_path": "src/io/las/load.rs", "rank": 41, "score": 268256.5811298876 }, { "content": "fn load_points_binary<BR, IP, P, R>(\n\n read: &mut R,\n\n ip: &mut IP,\n\n header: PartialHeader,\n\n) -> PlyResult<()>\n\nwhere\n\n IP: IsPushable<P>,\n\n P: IsBuildable3D,\n\n R: Read,\n\n BR: IsByteReader,\n\n{\n\n let iterator = PlyBinaryPointsIterator::<BR, _, _>::new(read, header);\n\n\n\n for p in iterator {\n\n ip.push(p?)\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/ply/load.rs", "rank": 42, "score": 265906.12586406 }, { "content": "struct StlAsciiIterator<P, R>\n\nwhere\n\n P: IsBuildable3D,\n\n R: Read,\n\n{\n\n read: R,\n\n is_done: bool,\n\n header_read: bool,\n\n i_line: usize,\n\n line_buffer: Vec<u8>,\n\n phantom: PhantomData<P>,\n\n}\n\n\n\nimpl<P, R> StlAsciiIterator<P, R>\n\nwhere\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n pub fn new(read: R) -> Self {\n\n Self {\n", "file_path": "src/io/stl.rs", "rank": 43, "score": 261065.8890955507 }, { "content": "struct StlBinaryIterator<P, R>\n\nwhere\n\n P: IsBuildable3D,\n\n R: Read,\n\n{\n\n read: R,\n\n is_done: bool,\n\n header_read: bool,\n\n n_triangles: usize,\n\n current: usize,\n\n phantom: PhantomData<P>, //@todo others name this phantom_p, unecessary there in most cases\n\n}\n\n\n\nimpl<P, R> StlBinaryIterator<P, R>\n\nwhere\n\n P: IsBuildable3D,\n\n R: Read,\n\n{\n\n pub fn new(read: R) -> Self {\n\n Self {\n", "file_path": "src/io/stl.rs", "rank": 44, "score": 261065.8890955507 }, { "content": "enum BinaryOrAsciiIterator<P, R>\n\nwhere\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n Binary(StlBinaryIterator<P, R>),\n\n Ascii(StlAsciiIterator<P, R>),\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/stl.rs", "rank": 45, "score": 261026.56504025002 }, { "content": "/// Trims white space at the start of the input\n\npub fn trim_start(text: &[u8]) -> &[u8] {\n\n let mut to_drop = 0;\n\n for c in text {\n\n if !(*c == b' ' || *c == b'\\t') {\n\n break;\n\n }\n\n to_drop += 1;\n\n }\n\n\n\n &text[to_drop..]\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Reads a FromStr from ASCII bytes\n", "file_path": "src/io/utils.rs", "rank": 46, "score": 259125.6240304574 }, { "content": "/// Adds two Is3D values\n\npub fn add<P, Q>(p: &P, q: &Q) -> P\n\nwhere\n\n P: IsBuildable3D,\n\n Q: Is3D,\n\n{\n\n P::new(p.x() + q.x(), p.y() + q.y(), p.z() + q.z())\n\n}\n", "file_path": "src/utils.rs", "rank": 47, "score": 257091.73384088365 }, { "content": "fn read_stl_facet<P, R>(\n\n read: &mut R,\n\n line_buffer: &mut Vec<u8>,\n\n i_line: &mut usize,\n\n) -> StlIOResult<[P; 4]>\n\nwhere\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let mut line: &[u8];\n\n\n\n line = trim_start(fetch_line(read, line_buffer).index(*i_line)?);\n\n *i_line += 1;\n\n\n\n if line.starts_with(b\"endsolid\") {\n\n return Err(StlError::LoadFileEndReached).line(*i_line, line);\n\n }\n\n\n\n if !line.starts_with(b\"facet\") {\n\n return Err(StlError::Facet).line(*i_line, line);\n", "file_path": "src/io/stl.rs", "rank": 48, "score": 254400.6075362278 }, { "content": "#[inline(always)]\n\npub fn hash_f64<H>(x: f64, state: &mut H)\n\nwhere\n\n H: Hasher,\n\n{\n\n x.to_bits().hash(state);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 49, "score": 252921.63756142085 }, { "content": "/// Loads a Mesh from .stl file with duplicate vertices\n\npub fn load_stl_mesh_duped<EM, P, R, IPN>(\n\n read: R,\n\n format: StlFormat,\n\n mesh: &mut EM,\n\n face_normals: &mut IPN,\n\n) -> StlIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n IPN: IsPushable<P>,\n\n{\n\n let iterator = StlIterator::new(read, format)?;\n\n\n\n for fr in iterator {\n\n match fr? {\n\n DataReserve::Reserve(n) => {\n\n mesh.reserve_vertices(3 * n);\n\n mesh.reserve_faces(n);\n\n }\n", "file_path": "src/io/stl.rs", "rank": 50, "score": 251744.0086134454 }, { "content": "/// Loads a Mesh from .stl file with unique vertices, dropping invalid triangles\n\npub fn load_stl_mesh_unique<EM, P, R, IPN>(\n\n read: R,\n\n format: StlFormat,\n\n mesh: &mut EM,\n\n face_normals: &mut IPN,\n\n) -> StlIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D + Clone,\n\n R: BufRead,\n\n IPN: IsPushable<P>,\n\n{\n\n let mut map = FnvHashMap::default();\n\n let iterator = StlIterator::<P, R>::new(read, format)?;\n\n\n\n for fr in iterator {\n\n match fr? {\n\n DataReserve::Reserve(n) => {\n\n //Can't reserve vertices since not sure how many are unique\n\n mesh.reserve_faces(n);\n", "file_path": "src/io/stl.rs", "rank": 51, "score": 251743.89331782213 }, { "content": "fn distance_point_line<P, Q, R>(p: &P, l1: &Q, l2: &R) -> f64\n\nwhere\n\n P: Is2D,\n\n Q: Is2D,\n\n R: Is2D,\n\n{\n\n let a1 = l1.x();\n\n let a2 = l1.y();\n\n\n\n let b1 = l2.x();\n\n let b2 = l2.y();\n\n\n\n let c1 = p.x();\n\n let c2 = p.y();\n\n\n\n let x = (a1 * a1 * c1 - a1 * a2 * b2 + a1 * a2 * c2 - 2.0 * a1 * b1 * c1 + a1 * b2 * b2\n\n - a1 * b2 * c2\n\n + a2 * a2 * b1\n\n - a2 * b1 * b2\n\n - a2 * b1 * c2\n\n + b1 * b1 * c1\n\n + b1 * b2 * c2)\n\n / (a1 * a1 - 2.0 * a1 * b1 + a2 * a2 - 2.0 * a2 * b2 + b1 * b1 + b2 * b2);\n\n\n\n let y = ((a2 - b2) * x + a1 * b2 - a2 * b1) / (a1 - b1);\n\n\n\n ((x - p.x()).powi(2) + (y - p.y()).powi(2)).sqrt()\n\n}\n", "file_path": "src/douglas_peucker_2d.rs", "rank": 52, "score": 250135.2881488454 }, { "content": "/// Douglas Peucker algorithm for 2D https://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm\n\npub fn douglas_peucker_2d<P>(mut pc: PointCloud2D<P>, epsilon: f64) -> PointCloud2D<P>\n\nwhere\n\n P: Is2D + Clone,\n\n{\n\n if pc.len() < 1 {\n\n return pc;\n\n }\n\n\n\n let mut dmax = 0.0;\n\n let mut index = 0;\n\n let end = pc.len() - 1;\n\n\n\n for i in 1..end {\n\n let d = distance_point_line(&pc.data[i], &pc.data[0], &pc.data[end]);\n\n if d > dmax {\n\n index = i;\n\n dmax = d;\n\n }\n\n }\n\n\n", "file_path": "src/douglas_peucker_2d.rs", "rank": 53, "score": 244394.73379389278 }, { "content": "#[inline(always)]\n\npub fn point_with_order<P>(fst: f64, snd: f64, third: f64, order: VertexOrder) -> P\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n match order {\n\n VertexOrder::Xyz => P::new(fst, snd, third),\n\n VertexOrder::Xzy => P::new(fst, third, snd),\n\n VertexOrder::Yxz => P::new(snd, fst, third),\n\n VertexOrder::Yzx => P::new(snd, third, fst),\n\n VertexOrder::Zxy => P::new(third, fst, snd),\n\n VertexOrder::Zyx => P::new(third, snd, fst),\n\n }\n\n}\n", "file_path": "src/io/ply/utils.rs", "rank": 54, "score": 241856.86833937862 }, { "content": "#[inline(always)]\n\nfn fetch_face(line: &[u8]) -> ObjResult<[usize; 3]> {\n\n let mut words = to_words_skip_empty(line);\n\n\n\n // skip \"f\"\n\n words.next().ok_or(ObjError::Face)?;\n\n\n\n let mut tmp = words.next().ok_or(ObjError::Face)?;\n\n let a: usize = from_ascii(until_bytes(tmp, b'/')).ok_or(ObjError::Face)?;\n\n\n\n tmp = words.next().ok_or(ObjError::Face)?;\n\n let b: usize = from_ascii(until_bytes(tmp, b'/')).ok_or(ObjError::Face)?;\n\n\n\n tmp = words.next().ok_or(ObjError::Face)?;\n\n let c: usize = from_ascii(until_bytes(tmp, b'/')).ok_or(ObjError::Face)?;\n\n\n\n //@todo could fail if 0 in file\n\n //obj indexing starts at 1\n\n Ok([a - 1, b - 1, c - 1])\n\n}\n", "file_path": "src/io/obj.rs", "rank": 55, "score": 237782.3425608208 }, { "content": "#[inline(always)]\n\nfn read_matrix_row(line: &[u8]) -> Option<[f64; 4]> {\n\n let mut words = to_words_skip_empty(line);\n\n\n\n let a = from_ascii(words.next()?)?;\n\n let b = from_ascii(words.next()?)?;\n\n let c = from_ascii(words.next()?)?;\n\n let d = from_ascii(words.next()?)?;\n\n\n\n Some([a, b, c, d])\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Error type for .ptx file operations\n\npub enum PtxError {\n\n LoadFileEndReached,\n\n AccessFile,\n\n Columns,\n\n Rows,\n\n Matrix,\n\n Point,\n\n}\n\n\n\n/// Result type for .ptx file operations\n\npub type PtxIOResult<T> = IOResult<T, PtxError>;\n", "file_path": "src/io/ptx.rs", "rank": 56, "score": 237725.46626126062 }, { "content": "#[inline(always)]\n\n#[allow(unused)]\n\npub fn slice_from_bytes_be<FB>(bytes: &[u8], target: &mut [FB]) -> FromBytesResult<()>\n\nwhere\n\n FB: FromBytes,\n\n{\n\n let size = std::mem::size_of::<FB>();\n\n\n\n if size * bytes.len() != target.len() {\n\n return Err(FromBytesError::SizeMismatch);\n\n }\n\n\n\n for i in 0..target.len() {\n\n target[i] = FB::from_be_slice(&bytes[i * size..(i + 1) * size])?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/from_bytes.rs", "rank": 57, "score": 235220.41247900052 }, { "content": "/// Collects all intersections between a ray and mesh\n\npub fn collect_intersections_ray_mesh<P, M>(ray: &Ray3D, mesh: &M, intersections: &mut Vec<P>)\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D + Sub<Output = P> + Clone,\n\n{\n\n let nf = mesh.num_faces();\n\n\n\n for i in 0..nf {\n\n let [v1, v2, v3] = mesh.face_vertices(FId(i)).unwrap(); // safe\n\n //println!(\"face_vertices\");\n\n if let Some(intersection) = intersection_ray_triangle(ray, &v1, &v2, &v3) {\n\n intersections.push(intersection);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 58, "score": 233055.64222283062 }, { "content": "#[inline(always)]\n\n#[allow(unused)]\n\npub fn slice_from_bytes_le<FB>(bytes: &[u8], target: &mut [FB]) -> FromBytesResult<()>\n\nwhere\n\n FB: FromBytes,\n\n{\n\n let size = std::mem::size_of::<FB>();\n\n\n\n if bytes.len() != size * target.len() {\n\n return Err(FromBytesError::SizeMismatch);\n\n }\n\n\n\n for i in 0..target.len() {\n\n target[i] = FB::from_le_slice(&bytes[i * size..(i + 1) * size])?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/io/from_bytes.rs", "rank": 59, "score": 232155.56027293648 }, { "content": "/// Positions the object in such a way that its center is at origin\n\npub fn center<T>(x: &mut T)\n\nwhere\n\n T: HasBoundingBox3DMaybe + IsMovable3D,\n\n{\n\n if let Some(bb) = x.bounding_box_maybe() {\n\n let center = bb.center_bb();\n\n x.move_by(-center.x(), -center.y(), -center.z());\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 60, "score": 232041.38646701287 }, { "content": "#[inline(always)]\n\npub fn from_ascii<T>(bytes: &[u8]) -> Option<T>\n\nwhere\n\n T: FromStr,\n\n{\n\n if bytes.is_ascii() {\n\n unsafe { T::from_str(std::str::from_utf8_unchecked(bytes)).ok() }\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n/// Fetch a single line\n", "file_path": "src/io/utils.rs", "rank": 61, "score": 228778.94348467496 }, { "content": "/// Returns all until delimiter\n\npub fn until<'a>(line: &'a str, delimiter: &str) -> &'a str {\n\n line.split(delimiter).next().unwrap_or(\"\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 62, "score": 228633.49810685936 }, { "content": "/// Checks whether haystack contains needle\n\npub fn contains<T>(haystack: &[T], needle: &[T]) -> bool\n\nwhere\n\n T: PartialEq,\n\n{\n\n haystack.windows(needle.len()).any(|x| x == needle)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 63, "score": 226221.23138502502 }, { "content": "#[inline(always)]\n\npub fn conn<P>(p_from: &P, p_to: &P) -> P\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n P::new(\n\n p_to.x() - p_from.x(),\n\n p_to.y() - p_from.y(),\n\n p_to.z() - p_from.z(),\n\n )\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 64, "score": 225738.1559624875 }, { "content": "/// Creates an ellipse with the given center, a, b and resolution\n\npub fn ellipse<P>(center: &P, n_points: usize, ap: Positive, bp: Positive) -> PointCloud2D<P>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let p_dist = PI / (n_points - 1) as f64;\n\n let a = *ap;\n\n let b = *bp;\n\n let angle: f64 = 0.0;\n\n\n\n for i in 0..n_points {\n\n let radians = (i as f64) * p_dist;\n\n pc.push(P::new(\n\n center.x() + a * radians.cos() * angle.cos() - b * radians.sin() * angle.sin(),\n\n center.y() + a * radians.cos() * angle.sin() + b * radians.sin() * angle.cos(),\n\n ));\n\n }\n\n pc\n\n}\n", "file_path": "src/factory_2d.rs", "rank": 65, "score": 221489.60027831994 }, { "content": "#[inline(always)]\n\npub fn center_3d<P>(p1: &P, p2: &P) -> P\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n P::new(\n\n p1.x() + (p2.x() - p1.x()) / 2.0,\n\n p1.y() + (p2.y() - p1.y()) / 2.0,\n\n p1.z() + (p2.z() - p1.z()) / 2.0,\n\n )\n\n}\n\n\n\n/// Returns the cross product between a Is3D and a IsBuildable3D\n", "file_path": "src/functions.rs", "rank": 66, "score": 220653.872220514 }, { "content": "#[inline(always)]\n\npub fn center_2d<P>(p1: &P, p2: &P) -> P\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n P::new(\n\n p1.x() + (p2.x() - p1.x()) / 2.0,\n\n p1.y() + (p2.y() - p1.y()) / 2.0,\n\n )\n\n}\n\n\n\n/// Returns the center of two IsBuildable3D\n", "file_path": "src/functions.rs", "rank": 67, "score": 220653.872220514 }, { "content": "/// Creates an arc with the given center, diameter, resolution and start and end angles in radians\n\npub fn arc<P>(\n\n center: &P,\n\n n_points: usize,\n\n diameter: Positive,\n\n start: Rad,\n\n end: Rad,\n\n) -> PointCloud2D<P>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let d = *diameter;\n\n let p_dist = (end.0 - start.0).abs() / (n_points - 1) as f64;\n\n\n\n for i in 0..n_points {\n\n let radians = start.0 + (i as f64) * p_dist;\n\n pc.push(P::new(\n\n center.x() + d / 2.0 * radians.cos(),\n\n center.y() + d / 2.0 * radians.sin(),\n\n ));\n\n }\n\n pc\n\n}\n\n\n", "file_path": "src/factory_2d.rs", "rank": 68, "score": 218379.99170508952 }, { "content": "/// Saves an IsMesh3D in the ASCII .stl file format\n\npub fn save_stl_ascii<M, P, W>(write: &mut W, mesh: &M) -> StlResult<()>\n\nwhere\n\n M: IsMesh3D<P>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n write.write_all(b\"solid STL generated by rust-3d\\n\")?;\n\n\n\n for i in 0..mesh.num_faces() {\n\n let [v1, v2, v3] = mesh.face_vertices(FId(i)).unwrap(); // safe since iterating num_faces\n\n let n = mesh.face_normal(FId(i)).unwrap(); // safe since iterating num_faces\n\n let buffer = \"facet normal \".to_string()\n\n + &str_exp(&n)\n\n + \"\\n\"\n\n + \" outer loop\\n\"\n\n + \" vertex \"\n\n + &str_exp(&v1)\n\n + \"\\n\"\n\n + \" vertex \"\n\n + &str_exp(&v2)\n", "file_path": "src/io/stl.rs", "rank": 69, "score": 217878.4039391568 }, { "content": "/// Calculates the normals of a mesh\n\npub fn normals_of_mesh<P, M>(mesh: &M) -> Vec<Norm3D>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D + Default + Clone,\n\n{\n\n let n = mesh.num_vertices();\n\n let nf = mesh.num_faces();\n\n let mut ns = vec![P::default(); n];\n\n\n\n for i in 0..nf {\n\n let face = mesh.face_vertex_ids(FId(i)).unwrap(); // safe\n\n let [v1, v2, v3] = mesh.face_vertices(FId(i)).unwrap(); // safe\n\n let v12 = conn(&v1, &v2);\n\n let v13 = conn(&v1, &v3);\n\n let n = Norm3D::new(cross(&v12, &v13)).unwrap_or(Norm3D::norm_z());\n\n for j in 0..3 {\n\n let new = add(&ns[face.vid(j).unwrap().0], &n); // safe since iterating 0..3\n\n ns[face.vid(j).unwrap().0] = new; // safe since iterating 0..3\n\n }\n\n }\n\n\n\n ns.into_iter()\n\n .map(|x| Norm3D::new(x).unwrap_or(Norm3D::norm_z()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 70, "score": 216502.44957759554 }, { "content": "/// Saves an IsMesh3D in the ASCII .ply file format\n\npub fn save_ply_ascii<M, P, W>(write: &mut W, mesh: &M) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let header = \"ply\\n\".to_string()\n\n + \"format ascii 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &mesh.num_vertices().to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n\n + \"property float y\\n\"\n\n + \"property float z\\n\"\n\n + \"element face \"\n\n + &mesh.num_faces().to_string()\n\n + \"\\n\"\n\n + \"property list uchar uint vertex_indices\\n\"\n\n + \"end_header\\n\";\n", "file_path": "src/io/ply/save.rs", "rank": 71, "score": 215192.75631531051 }, { "content": "/// Returns the Cosine interpolation of the given base points\n\npub fn interpolate_cosine<P>(\n\n base_points: &PointCloud2D<P>,\n\n n_points: usize,\n\n) -> Result<PointCloud2D<P>>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n if base_points.len() < 2 {\n\n return Err(ErrorKind::TooFewPoints);\n\n }\n\n\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let p_dist = base_points.length() / (n_points - 1) as f64;\n\n\n\n for i in 0..n_points {\n\n let mut traveled: f64 = 0.0;\n\n let mut traveled_before: f64 = 0.0;\n\n\n\n for j in 1..base_points.len() {\n\n let ref p_prev = base_points.data[j - 1];\n", "file_path": "src/interpolate_2d.rs", "rank": 72, "score": 214798.2152816311 }, { "content": "/// Returns the linear interpolation of the given base points\n\npub fn interpolation_linear<P>(\n\n base_points: &PointCloud2D<P>,\n\n n_points: usize,\n\n) -> Result<PointCloud2D<P>>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n if base_points.len() < 2 {\n\n return Err(ErrorKind::TooFewPoints);\n\n }\n\n\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let p_dist = base_points.length() / (n_points - 1) as f64;\n\n\n\n for i in 0..n_points {\n\n let mut traveled: f64 = 0.0;\n\n let mut traveled_before: f64 = 0.0;\n\n\n\n for j in 1..base_points.len() {\n\n //@todo fails if path too small, handle this\n", "file_path": "src/interpolate_2d.rs", "rank": 73, "score": 214798.2152816311 }, { "content": "/// Returns the Bezier interpolation of the given base points\n\npub fn interpolate_bezier<P>(\n\n base_points: &PointCloud2D<P>,\n\n n_points: usize,\n\n) -> Result<PointCloud2D<P>>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n if base_points.len() < 2 {\n\n return Err(ErrorKind::TooFewPoints);\n\n }\n\n\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let p_dist = 1.0 / (n_points as f64);\n\n\n\n for i in 0..n_points {\n\n pc.push(control_polygon(\n\n base_points,\n\n base_points.len() - 1,\n\n (i as f64) * p_dist,\n\n ));\n\n }\n\n Ok(pc)\n\n}\n\n\n", "file_path": "src/interpolate_2d.rs", "rank": 74, "score": 214798.2152816311 }, { "content": "/// Creates a involut circle with the given center, diameter, resolution and start and end angles in radians\n\npub fn involut_circle<P>(\n\n center: &P,\n\n n_points: usize,\n\n diameter: Positive,\n\n start: Rad,\n\n end: Rad,\n\n) -> PointCloud2D<P>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n let mut pc = PointCloud2D::with_capacity(n_points);\n\n let d = *diameter;\n\n let p_dist = (end.0 - start.0).abs() / (n_points - 1) as f64;\n\n\n\n for i in 0..n_points {\n\n let current = (i as f64) * p_dist;\n\n pc.push(P::new(\n\n center.x() + d / 2.0 * (current.cos() + current * current.sin()),\n\n center.y() + d / 2.0 * (current.sin() - current * current.cos()),\n\n ));\n\n }\n\n pc\n\n}\n\n\n", "file_path": "src/factory_2d.rs", "rank": 75, "score": 214793.02787856373 }, { "content": "/// Scales the object to the required size\n\npub fn set_size<T>(x: &mut T, size: [Positive; 3])\n\nwhere\n\n T: HasBoundingBox3DMaybe + IsMatrix4Transformable,\n\n{\n\n if let Some(bb) = x.bounding_box_maybe() {\n\n let m = Matrix4::scale(\n\n *size[0] / *bb.size_x(),\n\n *size[1] / *bb.size_y(),\n\n *size[2] / *bb.size_z(),\n\n );\n\n x.transform(&m);\n\n }\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 76, "score": 210830.40790593706 }, { "content": "/// Trait for adding line information to (error) types\n\npub trait LineInfoResult<T, E>: Sized {\n\n fn simple(self) -> Result<T, WithLineInfo<E>>;\n\n fn index(self, i: usize) -> Result<T, WithLineInfo<E>>;\n\n fn line(self, i: usize, line: &[u8]) -> Result<T, WithLineInfo<E>>;\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\npub enum DataReserve<T> {\n\n Data(T),\n\n Reserve(usize),\n\n}\n\n//@todo implement From<T> and use\n\n\n\n//------------------------------------------------------------------------------\n\n\n\npub enum FaceData<T> {\n\n Face([usize; 3]), //@todo VId or usize?\n\n Data(T),\n\n}\n", "file_path": "src/io/types.rs", "rank": 77, "score": 210337.10443021427 }, { "content": "/// Loading a .ply header\n\npub fn load_header<R>(\n\n read: &mut R,\n\n line_buffer: &mut Vec<u8>,\n\n i_line: &mut usize,\n\n) -> PlyIOResult<Header>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut vertex_order = [Xyz::X, Xyz::X, Xyz::X];\n\n let mut i_vertex_order = 0;\n\n\n\n let mut ply_found = false;\n\n let mut read_state = HeaderReadState::Meta;\n\n let mut opt_format = None;\n\n let mut opt_n_vertices: Option<usize> = None;\n\n let mut opt_n_faces: Option<usize> = None;\n\n\n\n let mut opt_fst_type = None;\n\n let mut opt_snd_type = None;\n\n let mut opt_third_type = None;\n", "file_path": "src/io/ply/header.rs", "rank": 78, "score": 210072.3886225773 }, { "content": "/// Returns the distance between two Is2D\n\npub fn dist_2d<P, Q>(p: &P, q: &Q) -> f64\n\nwhere\n\n P: Is2D,\n\n Q: Is2D,\n\n{\n\n sqr_dist_2d(p, q).sqrt()\n\n}\n\n\n", "file_path": "src/distances_2d.rs", "rank": 79, "score": 209764.1413516858 }, { "content": "/// Returns the distance between two Is3D\n\npub fn dist_3d<P, Q>(p: &P, q: &Q) -> f64\n\nwhere\n\n P: Is3D,\n\n Q: Is3D,\n\n{\n\n sqr_dist_3d(p, q).sqrt()\n\n}\n\n\n", "file_path": "src/distances_3d.rs", "rank": 80, "score": 209764.1413516858 }, { "content": "/// Calculates the normal of a face given by three vertices\n\npub fn normal_of_face<P>(v1: &P, v2: &P, v3: &P) -> Norm3D\n\nwhere\n\n P: IsBuildable3D,\n\n{\n\n let v12 = conn(v1, v2);\n\n let v23 = conn(v2, v3);\n\n Norm3D::new(cross(&v12, &v23)).unwrap_or(Norm3D::norm_z())\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 81, "score": 207220.14884561658 }, { "content": "/// Returns the squared distance between two Is2D\n\npub fn sqr_dist_2d<P, Q>(p: &P, q: &Q) -> f64\n\nwhere\n\n P: Is2D,\n\n Q: Is2D,\n\n{\n\n (p.x() - q.x()).powi(2) + (p.y() - q.y()).powi(2)\n\n}\n", "file_path": "src/distances_2d.rs", "rank": 82, "score": 207022.98030792325 }, { "content": "/// Returns the squared distance between two Is3D\n\npub fn sqr_dist_3d<P, Q>(p: &P, q: &Q) -> f64\n\nwhere\n\n P: Is3D,\n\n Q: Is3D,\n\n{\n\n (p.x() - q.x()).powi(2) + (p.y() - q.y()).powi(2) + (p.z() - q.z()).powi(2)\n\n}\n", "file_path": "src/distances_3d.rs", "rank": 83, "score": 207022.98030792325 }, { "content": "/// IsPlane3D is a trait used for planes within 3D space\n\npub trait IsPlane3D<P, N>: Sized\n\nwhere\n\n P: Is3D,\n\n N: IsNormalized3D,\n\n{\n\n /// Should return a new plane with the given origin, u and v vectors\n\n fn new(origin: P, u: N, v: N) -> Self;\n\n /// Should return the origin of the plane\n\n fn origin(&self) -> P;\n\n /// Should return the u vector of the plane\n\n fn u(&self) -> N;\n\n /// Should return the v vector of the plane\n\n fn v(&self) -> N;\n\n}\n", "file_path": "src/is_plane_3d.rs", "rank": 84, "score": 206777.20551939146 }, { "content": "/// Saves an IsMesh3D in the binary .ply file format\n\npub fn save_ply_binary<M, P, W>(write: &mut W, mesh: &M, precision: &Precision) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let header = match precision {\n\n Precision::P32 => {\n\n \"ply\\n\".to_string()\n\n + \"format binary_big_endian 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &mesh.num_vertices().to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n\n + \"property float y\\n\"\n\n + \"property float z\\n\"\n\n + \"element face \"\n\n + &mesh.num_faces().to_string()\n\n + \"\\n\"\n", "file_path": "src/io/ply/save.rs", "rank": 85, "score": 204470.8494997604 }, { "content": "fn load_mesh_ascii<EM, P, R>(\n\n read: &mut R,\n\n mesh: &mut EM,\n\n header: FullHeader,\n\n i_line: &mut usize,\n\n) -> PlyIOResult<()>\n\nwhere\n\n EM: IsFaceEditableMesh<P, Face3> + IsVertexEditableMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n R: BufRead,\n\n{\n\n let iterator = PlyAsciiMeshIterator::new(read, header, *i_line);\n\n\n\n for fd in iterator {\n\n match fd? {\n\n io::types::FaceData::Data(p) => {\n\n mesh.add_vertex(p);\n\n }\n\n io::types::FaceData::Face([a, b, c]) => {\n\n mesh.try_add_connection(VId(a), VId(b), VId(c))\n\n .or(Err(PlyError::InvalidMeshIndices))\n\n .simple()?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/io/ply/load.rs", "rank": 86, "score": 203802.0103755397 }, { "content": "#[inline(always)]\n\npub fn cross<P, U>(first: &P, other: &U) -> U\n\nwhere\n\n P: Is3D,\n\n U: IsBuildable3D,\n\n{\n\n let x = first.y() * other.z() - first.z() * other.y();\n\n let y = first.z() * other.x() - first.x() * other.z();\n\n let z = first.x() * other.y() - first.y() * other.x();\n\n U::new(x, y, z)\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 87, "score": 202667.35920961696 }, { "content": "/// Convex hull algorithm returning a Vec of the hull where the points are ordered according to the hull\n\n/// Using Andrew's monotone chain convex hull algorithm https://en.wikibooks.org/wiki/Algorithm_Implementation/Geometry/Convex_hull/Monotone_chain\n\npub fn convex_hull_2d<RA, P>(ra: &RA) -> Vec<P>\n\nwhere\n\n RA: IsRandomAccessible<P>,\n\n P: IsBuildable2D + Clone,\n\n{\n\n let n = ra.len();\n\n\n\n let mut sorted = PointCloud2D::new();\n\n sorted.append_ra(ra);\n\n sorted.sort_x();\n\n let sorted = sorted;\n\n\n\n let mut lower = Vec::<P>::new();\n\n for i in 0..n {\n\n while lower.len() >= 2\n\n && ccw(&lower[lower.len() - 2], &lower[lower.len() - 1], &sorted[i]) <= 0.0\n\n {\n\n lower.pop().unwrap(); //safe, since only called if len > 0\n\n }\n\n lower.push(sorted[i].clone());\n", "file_path": "src/convex_hull_2d.rs", "rank": 88, "score": 200079.3279593258 }, { "content": "fn command(line: &[u8]) -> Option<[Option<f64>; 3]> {\n\n let mut n_found = 0;\n\n let mut x = None;\n\n let mut y = None;\n\n let mut z = None;\n\n let words = line.split(|x| *x == b' ');\n\n\n\n for word in words {\n\n if n_found == 3 {\n\n break;\n\n }\n\n\n\n let n = word.len();\n\n\n\n if n == 0 {\n\n continue;\n\n }\n\n\n\n if word[0] == b';' {\n\n break;\n", "file_path": "src/io/gcode.rs", "rank": 89, "score": 198904.443935943 }, { "content": "//@todo more generic types?\n\n/// Finds the intersection between a ray and triangle\n\npub fn intersection_ray_triangle<P>(ray: &Ray3D, v1: &P, v2: &P, v3: &P) -> Option<P>\n\nwhere\n\n P: IsBuildable3D + Sub<Output = P> + Clone,\n\n{\n\n let orig = &ray.line.anchor;\n\n let dir = &ray.line.dir;\n\n let n = normal_of_face(v1, v2, v3);\n\n\n\n let w1 = orig - v1;\n\n let a = -n.dot(&w1);\n\n let b = n.dot(dir);\n\n\n\n if b == 0.0 {\n\n return None;\n\n } //@todo eps\n\n\n\n let r = a / b;\n\n\n\n if r <= 0.0 {\n\n return None;\n", "file_path": "src/functions.rs", "rank": 90, "score": 198449.41610736676 }, { "content": "/// Saves an IsRandomAccessible<Is3D> as x y z coordinates with a specified delimiter between coordinates and positions. E.g. used to create the .xyz file format or .csv files\n\npub fn save_xyz<RA, P, W>(\n\n write: &mut W,\n\n ra: &RA,\n\n delim_coord: &str,\n\n delim_pos: &str,\n\n) -> XyzResult<()>\n\nwhere\n\n RA: IsRandomAccessible<P>,\n\n P: Is3D,\n\n W: Write,\n\n{\n\n let n = ra.len();\n\n for i in 0..n {\n\n let ref p = ra[i];\n\n let buffer = p.x().to_string()\n\n + delim_coord\n\n + &p.y().to_string()\n\n + delim_coord\n\n + &p.z().to_string()\n\n + delim_pos;\n\n write.write_all(buffer.as_bytes())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/xyz.rs", "rank": 91, "score": 197507.228841174 }, { "content": "/// Saves an IsMesh3D in the ASCII .ply file format with additional colors\n\npub fn save_ply_ascii_colored<M, P, W>(write: &mut W, mesh: &M, colors: &Vec<Rgb>) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let n_vertices = mesh.num_vertices();\n\n let n_faces = mesh.num_faces();\n\n\n\n if n_vertices != colors.len() {\n\n return Err(PlyError::ColorArrayIncorrectLength);\n\n }\n\n\n\n let header = \"ply\\n\".to_string()\n\n + \"format ascii 1.0\\n\"\n\n + \"comment Created by rust-3d\\n\"\n\n + \"element vertex \"\n\n + &n_vertices.to_string()\n\n + \"\\n\"\n\n + \"property float x\\n\"\n", "file_path": "src/io/ply/save.rs", "rank": 92, "score": 197236.04802807333 }, { "content": "/// Creates a 2D rectangle from given center width and height\n\npub fn rectangle<P>(center: &P, width: Positive, height: Positive) -> PointCloud2D<P>\n\nwhere\n\n P: IsBuildable2D,\n\n{\n\n let mut pc = PointCloud2D::with_capacity(4);\n\n let w = *width;\n\n let h = *height;\n\n pc.push(P::new(center.x() - w / 2.0, center.y() - h / 2.0));\n\n pc.push(P::new(center.x() + w / 2.0, center.y() - h / 2.0));\n\n pc.push(P::new(center.x() + w / 2.0, center.y() + h / 2.0));\n\n pc.push(P::new(center.x() - w / 2.0, center.y() + h / 2.0));\n\n pc\n\n}\n\n\n", "file_path": "src/factory_2d.rs", "rank": 94, "score": 196651.0137995613 }, { "content": "/// Saves an IsRandomAccessible<Is2D> as x y coordinates with a specified delimiter between coordinates and positions. E.g. used to create the .xy file format or .csv files\n\npub fn save_xy<RA, P, W>(write: &mut W, ra: &RA, delim_coord: &str, delim_pos: &str) -> XyResult<()>\n\nwhere\n\n RA: IsRandomAccessible<P>,\n\n P: Is2D,\n\n W: Write,\n\n{\n\n let n = ra.len();\n\n for i in 0..n {\n\n let ref p = ra[i];\n\n let buffer = p.x().to_string() + delim_coord + &p.y().to_string() + delim_pos;\n\n write.write_all(buffer.as_bytes())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/xy.rs", "rank": 95, "score": 194829.3573268419 }, { "content": "/// Returns the distance between two IsND in case their number of dimensions match\n\npub fn dist_nd<P, U>(p1: &P, p2: &U) -> Result<f64>\n\nwhere\n\n P: IsND,\n\n U: IsND,\n\n{\n\n sqr_dist_nd(p1, p2).map(|x| x.sqrt())\n\n}\n\n\n", "file_path": "src/distances_nd.rs", "rank": 96, "score": 190458.0589382434 }, { "content": "/// Saves an IsMesh3D in the binary .ply file format with additional colors\n\npub fn save_ply_binary_colored<M, P, W>(\n\n write: &mut W,\n\n mesh: &M,\n\n precision: &Precision,\n\n colors: &Vec<Rgb>,\n\n) -> PlyResult<()>\n\nwhere\n\n M: IsMesh<P, Face3>,\n\n P: IsBuildable3D,\n\n W: Write,\n\n{\n\n let n_vertices = mesh.num_vertices();\n\n let n_faces = mesh.num_faces();\n\n\n\n if n_vertices != colors.len() {\n\n return Err(PlyError::ColorArrayIncorrectLength);\n\n }\n\n\n\n let header = match precision {\n\n Precision::P32 => {\n", "file_path": "src/io/ply/save.rs", "rank": 97, "score": 189208.87803149808 }, { "content": "/// Returns the squared distance between two IsND in case their number of dimensions match\n\npub fn sqr_dist_nd<P, U>(p1: &P, p2: &U) -> Result<f64>\n\nwhere\n\n P: IsND,\n\n U: IsND,\n\n{\n\n if P::n_dimensions() != U::n_dimensions() {\n\n return Err(ErrorKind::DimensionsDontMatch);\n\n }\n\n\n\n let mut result: f64 = 0.0;\n\n for i in 0..P::n_dimensions() {\n\n if let (Some(val1), Some(val2)) = (p1.position_nd(i), p2.position_nd(i)) {\n\n result += (val1 - val2).powi(2);\n\n } else {\n\n return Err(ErrorKind::IncorrectDimension);\n\n }\n\n }\n\n Ok(result)\n\n}\n", "file_path": "src/distances_nd.rs", "rank": 98, "score": 188242.06941852716 }, { "content": "fn vertex_to_face<P, M>(mesh: &M) -> Vec<FnvHashSet<usize>>\n\nwhere\n\n M: IsMesh<P, Face3> + Default,\n\n P: Is3D,\n\n{\n\n let nv = mesh.num_vertices();\n\n let nf = mesh.num_faces();\n\n let mut v_to_f = vec![FnvHashSet::default(); nv];\n\n\n\n for i in 0..nf {\n\n let f = mesh.face_vertex_ids(FId(i)).unwrap(); // safe\n\n v_to_f[f.a.0].insert(i);\n\n v_to_f[f.b.0].insert(i);\n\n v_to_f[f.c.0].insert(i);\n\n }\n\n\n\n v_to_f\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n", "file_path": "src/unify_faces.rs", "rank": 99, "score": 187742.87268305788 } ]
Rust
chain-abci/src/storage/tx.rs
calvinlauco/chain
ef40ea2c44f8e1aca8a36e50ff10172879df3491
use crate::enclave_bridge::EnclaveProxy; use crate::storage::account::AccountStorage; use crate::storage::account::AccountWrapper; use crate::storage::COL_TX_META; use bit_vec::BitVec; use chain_core::state::account::{to_stake_key, StakedState, StakedStateAddress}; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::fee::Fee; use chain_core::tx::TransactionId; use chain_core::tx::TxObfuscated; use chain_core::tx::{TxAux, TxEnclaveAux}; use chain_tx_validation::{ verify_node_join, verify_unbonding, verify_unjailed, verify_unjailing, witness::verify_tx_recover_address, ChainInfo, Error, NodeInfo, }; use enclave_protocol::{EnclaveRequest, EnclaveResponse}; use kvdb::KeyValueDB; use starling::constants::KEY_LEN; use std::sync::Arc; pub type StarlingFixedKey = [u8; KEY_LEN]; pub fn get_account( account_address: &StakedStateAddress, last_root: &StarlingFixedKey, accounts: &AccountStorage, ) -> Result<StakedState, Error> { let account_key = to_stake_key(account_address); let account = accounts.get_one(last_root, &account_key); match account { Err(_e) => Err(Error::IoError) /* FIXME: Err(Error::IoError(std::io::Error::new( std::io::ErrorKind::Other, e, )))*/, Ok(None) => Err(Error::AccountNotFound), Ok(Some(AccountWrapper(a))) => Ok(a), } } fn check_spent_input_lookup(inputs: &[TxoPointer], db: Arc<dyn KeyValueDB>) -> Result<(), Error> { if inputs.is_empty() { return Err(Error::NoInputs); } for txin in inputs.iter() { let txo = db.get(COL_TX_META, &txin.id[..]); match txo { Ok(Some(v)) => { let input_index = txin.index as usize; let bv = BitVec::from_bytes(&v).get(input_index); if bv.is_none() { return Err(Error::InvalidInput); } if bv.unwrap() { return Err(Error::InputSpent); } } Ok(None) => { return Err(Error::InvalidInput); } Err(_e) => { return Err(Error::IoError); } } } Ok(()) } pub fn verify_enclave_tx<T: EnclaveProxy>( tx_validator: &mut T, txaux: &TxEnclaveAux, extra_info: ChainInfo, last_account_root_hash: &StarlingFixedKey, db: Arc<dyn KeyValueDB>, accounts: &AccountStorage, ) -> Result<(Fee, Option<StakedState>), Error> { match txaux { TxEnclaveAux::TransferTx { inputs, no_of_outputs, payload, } => { check_spent_input_lookup(&inputs, db)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::TransferTx { inputs: inputs.clone(), no_of_outputs: *no_of_outputs, payload: payload.clone(), }, None, extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } TxEnclaveAux::DepositStakeTx { tx, payload } => { let maccount = get_account(&tx.to_staked_account, last_account_root_hash, accounts); let account = match maccount { Ok(a) => Some(a), Err(Error::AccountNotFound) => None, Err(e) => { return Err(e); } }; if let Some(ref account) = account { verify_unjailed(account)?; } check_spent_input_lookup(&tx.inputs, db)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::DepositStakeTx { tx: tx.clone(), payload: payload.clone(), }, account, extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } TxEnclaveAux::WithdrawUnbondedStakeTx { payload: TxObfuscated { key_from, init_vector, txpayload, txid, }, witness, no_of_outputs, } => { let account_address = verify_tx_recover_address(&witness, &txid); if let Err(_e) = account_address { return Err(Error::EcdsaCrypto); } let account = get_account(&account_address.unwrap(), last_account_root_hash, accounts)?; verify_unjailed(&account)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::WithdrawUnbondedStakeTx { payload: TxObfuscated { key_from: *key_from, init_vector: *init_vector, txpayload: txpayload.clone(), txid: *txid, }, witness: witness.clone(), no_of_outputs: *no_of_outputs, }, Some(account), extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } } } pub fn verify_public_tx( txaux: &TxAux, extra_info: ChainInfo, node_info: NodeInfo, last_account_root_hash: &StarlingFixedKey, accounts: &AccountStorage, ) -> Result<(Fee, Option<StakedState>), Error> { match txaux { TxAux::EnclaveTx(_) => unreachable!("should be handled by verify_enclave_tx"), TxAux::UnbondStakeTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_unbonding(maintx, extra_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } TxAux::UnjailTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_unjailing(maintx, extra_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } TxAux::NodeJoinTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_node_join(maintx, extra_info, node_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } } }
use crate::enclave_bridge::EnclaveProxy; use crate::storage::account::AccountStorage; use crate::storage::account::AccountWrapper; use crate::storage::COL_TX_META; use bit_vec::BitVec; use chain_core::state::account::{to_stake_key, StakedState, StakedStateAddress}; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::fee::Fee; use chain_core::tx::TransactionId; use chain_core::tx::TxObfuscated; use chain_core::tx::{TxAux, TxEnclaveAux}; use chain_tx_validation::{ verify_node_join, verify_unbonding, verify_unjailed, verify_unjailing, witness::verify_tx_recover_address, ChainInfo, Error, NodeInfo, }; use enclave_protocol::{EnclaveRequest, EnclaveResponse}; use kvdb::KeyValueDB; use starling::constants::KEY_LEN; use std::sync::Arc; pub type StarlingFixedKey = [u8; KEY_LEN]; pub fn get_account( account_address: &StakedStateAddress, last_root: &StarlingFixedKey, accounts: &AccountStorage, ) -> Result<StakedState, Error> { let account_key = to_stake_key(account_address); let account = accounts.get_one(last_root, &account_key); match account { Err(_e) => Err(Error::IoErro
fn check_spent_input_lookup(inputs: &[TxoPointer], db: Arc<dyn KeyValueDB>) -> Result<(), Error> { if inputs.is_empty() { return Err(Error::NoInputs); } for txin in inputs.iter() { let txo = db.get(COL_TX_META, &txin.id[..]); match txo { Ok(Some(v)) => { let input_index = txin.index as usize; let bv = BitVec::from_bytes(&v).get(input_index); if bv.is_none() { return Err(Error::InvalidInput); } if bv.unwrap() { return Err(Error::InputSpent); } } Ok(None) => { return Err(Error::InvalidInput); } Err(_e) => { return Err(Error::IoError); } } } Ok(()) } pub fn verify_enclave_tx<T: EnclaveProxy>( tx_validator: &mut T, txaux: &TxEnclaveAux, extra_info: ChainInfo, last_account_root_hash: &StarlingFixedKey, db: Arc<dyn KeyValueDB>, accounts: &AccountStorage, ) -> Result<(Fee, Option<StakedState>), Error> { match txaux { TxEnclaveAux::TransferTx { inputs, no_of_outputs, payload, } => { check_spent_input_lookup(&inputs, db)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::TransferTx { inputs: inputs.clone(), no_of_outputs: *no_of_outputs, payload: payload.clone(), }, None, extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } TxEnclaveAux::DepositStakeTx { tx, payload } => { let maccount = get_account(&tx.to_staked_account, last_account_root_hash, accounts); let account = match maccount { Ok(a) => Some(a), Err(Error::AccountNotFound) => None, Err(e) => { return Err(e); } }; if let Some(ref account) = account { verify_unjailed(account)?; } check_spent_input_lookup(&tx.inputs, db)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::DepositStakeTx { tx: tx.clone(), payload: payload.clone(), }, account, extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } TxEnclaveAux::WithdrawUnbondedStakeTx { payload: TxObfuscated { key_from, init_vector, txpayload, txid, }, witness, no_of_outputs, } => { let account_address = verify_tx_recover_address(&witness, &txid); if let Err(_e) = account_address { return Err(Error::EcdsaCrypto); } let account = get_account(&account_address.unwrap(), last_account_root_hash, accounts)?; verify_unjailed(&account)?; let response = tx_validator.process_request(EnclaveRequest::new_tx_request( TxEnclaveAux::WithdrawUnbondedStakeTx { payload: TxObfuscated { key_from: *key_from, init_vector: *init_vector, txpayload: txpayload.clone(), txid: *txid, }, witness: witness.clone(), no_of_outputs: *no_of_outputs, }, Some(account), extra_info, )); match response { EnclaveResponse::VerifyTx(r) => r, _ => Err(Error::EnclaveRejected), } } } } pub fn verify_public_tx( txaux: &TxAux, extra_info: ChainInfo, node_info: NodeInfo, last_account_root_hash: &StarlingFixedKey, accounts: &AccountStorage, ) -> Result<(Fee, Option<StakedState>), Error> { match txaux { TxAux::EnclaveTx(_) => unreachable!("should be handled by verify_enclave_tx"), TxAux::UnbondStakeTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_unbonding(maintx, extra_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } TxAux::UnjailTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_unjailing(maintx, extra_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } TxAux::NodeJoinTx(maintx, witness) => { match verify_tx_recover_address(&witness, &maintx.id()) { Ok(account_address) => { let account = get_account(&account_address, last_account_root_hash, accounts)?; verify_node_join(maintx, extra_info, node_info, account) } Err(_) => { Err(Error::EcdsaCrypto) } } } } }
r) /* FIXME: Err(Error::IoError(std::io::Error::new( std::io::ErrorKind::Other, e, )))*/, Ok(None) => Err(Error::AccountNotFound), Ok(Some(AccountWrapper(a))) => Ok(a), } }
function_block-function_prefixed
[ { "content": "/// Verifies if the account is unjailed\n\npub fn verify_unjailed(account: &StakedState) -> Result<(), Error> {\n\n if account.is_jailed() {\n\n Err(Error::AccountJailed)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// information needed for NodeJoinRequestTx verification\n\npub struct NodeInfo<'a> {\n\n /// minimal required stake\n\n pub minimal_stake: Coin,\n\n /// current validator addresses\n\n pub tendermint_validator_addresses:\n\n &'a BTreeMap<TendermintValidatorAddress, StakedStateAddress>,\n\n /// current validator staking addresses\n\n pub validator_voting_power: &'a BTreeMap<StakedStateAddress, TendermintVotePower>,\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 0, "score": 261514.7040712302 }, { "content": "/// the tree used in StakedState storage db has a hardcoded 32-byte keys,\n\n/// this computes a key as blake2s(StakedState.address) where\n\n/// the StakedState address itself is ETH-style address (20 bytes from keccak hash of public key)\n\npub fn to_stake_key(address: &StakedStateAddress) -> [u8; HASH_SIZE_256] {\n\n // TODO: prefix with zero\n\n match address {\n\n StakedStateAddress::BasicRedeem(a) => hash256::<Blake2s>(a),\n\n }\n\n}\n\n\n\nimpl Default for StakedState {\n\n fn default() -> Self {\n\n StakedState::new(\n\n 0,\n\n Coin::zero(),\n\n Coin::zero(),\n\n 0,\n\n StakedStateAddress::BasicRedeem(RedeemAddress::default()),\n\n None,\n\n )\n\n }\n\n}\n\n\n", "file_path": "chain-core/src/state/account.rs", "rank": 1, "score": 254910.10556502326 }, { "content": "pub fn get_account(\n\n account_address: &StakedStateAddress,\n\n app: &ChainNodeApp<MockClient>,\n\n) -> StakedState {\n\n println!(\n\n \"uncommitted root hash: {}\",\n\n hex::encode(app.uncommitted_account_root_hash)\n\n );\n\n let account_key = to_stake_key(&account_address);\n\n let state = app.last_state.clone().expect(\"app state\");\n\n println!(\n\n \"committed root hash: {}\",\n\n hex::encode(&state.top_level.account_root)\n\n );\n\n let account = app\n\n .accounts\n\n .get_one(&app.uncommitted_account_root_hash, &account_key)\n\n .expect(\"account lookup problem\");\n\n\n\n match account {\n\n None => panic!(\"account not found\"),\n\n Some(AccountWrapper(a)) => a,\n\n }\n\n}\n\n\n", "file_path": "test-common/src/chain_env.rs", "rank": 3, "score": 242252.8187289432 }, { "content": "#[inline]\n\npub fn convert_io_err(e: std::io::Error) -> Exception {\n\n Exception::new(e.description())\n\n}\n", "file_path": "chain-abci/src/storage/account/tree.rs", "rank": 4, "score": 229532.06703171183 }, { "content": "pub fn validator_account_id() -> account::Id {\n\n validator_pub_key().into()\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 5, "score": 226477.78465197727 }, { "content": "/// Returns the identifier of the chosen network (a single byte included in transaction metadata)\n\npub fn get_network_id() -> u8 {\n\n unsafe { chosen_network::NETWORK_ID }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 6, "score": 225530.44702694786 }, { "content": "pub fn hash<T: AsRef<[u8]>>(value: T, node_type: NodeType) -> H256 {\n\n match node_type {\n\n NodeType::Leaf => hash256::<Blake2s>(&prefix_with_byte(value, 0)),\n\n NodeType::Intermediate => hash256::<Blake2s>(&prefix_with_byte(value, 1)),\n\n }\n\n}\n\n\n", "file_path": "chain-core/src/common/merkle_tree.rs", "rank": 7, "score": 219029.08329289587 }, { "content": "/// Calculate Keccak-256 crypto hash\n\npub fn keccak256(data: &[u8]) -> H256 {\n\n let mut output = [0u8; HASH_SIZE_256];\n\n let mut hasher = Keccak::v256();\n\n hasher.update(data);\n\n hasher.finalize(&mut output);\n\n output\n\n}\n\n\n", "file_path": "chain-core/src/init/address.rs", "rank": 8, "score": 209714.562130916 }, { "content": "/// Encrypts bytes with given passphrase\n\npub fn encrypt_bytes<K: AsRef<[u8]>>(\n\n key: K,\n\n passphrase: &SecUtf8,\n\n bytes: &[u8],\n\n) -> Result<Vec<u8>> {\n\n let mut nonce = [0; NONCE_SIZE];\n\n\n\n OsRng.fill(&mut nonce);\n\n\n\n let algo = get_algo(passphrase)?;\n\n\n\n let mut cipher = Vec::new();\n\n cipher.extend_from_slice(&nonce[..]);\n\n\n\n let payload = Payload {\n\n msg: bytes,\n\n aad: key.as_ref(),\n\n };\n\n\n\n cipher.append(\n\n &mut algo\n\n .encrypt(GenericArray::from_slice(&nonce), payload)\n\n .map_err(|_| Error::new(ErrorKind::EncryptionError, \"Unable to encrypt bytes\"))?,\n\n );\n\n\n\n Ok(cipher)\n\n}\n\n\n", "file_path": "client-common/src/storage.rs", "rank": 9, "score": 209714.562130916 }, { "content": "/// Decrypts bytes with given passphrase\n\npub fn decrypt_bytes<K: AsRef<[u8]>>(\n\n key: K,\n\n passphrase: &SecUtf8,\n\n bytes: &[u8],\n\n) -> Result<Vec<u8>> {\n\n let algo = get_algo(passphrase)?;\n\n\n\n let payload = Payload {\n\n msg: &bytes[NONCE_SIZE..],\n\n aad: key.as_ref(),\n\n };\n\n\n\n algo.decrypt(GenericArray::from_slice(&bytes[..NONCE_SIZE]), payload)\n\n .map_err(|_| {\n\n Error::new(\n\n ErrorKind::DecryptionError,\n\n \"Incorrect passphrase: Unable to unlock stored values\",\n\n )\n\n })\n\n}\n\n\n", "file_path": "client-common/src/storage.rs", "rank": 10, "score": 209714.562130916 }, { "content": "/// Given the Account state storage and the current / uncommitted account storage root,\n\n/// it inserts the updated account state into the account storage and returns the new root hash of the account state trie.\n\npub fn update_account(\n\n account: StakedState,\n\n account_root_hash: &StarlingFixedKey,\n\n accounts: &mut AccountStorage,\n\n) -> (StarlingFixedKey, Option<StakedState>) {\n\n (\n\n accounts\n\n .insert_one(\n\n Some(account_root_hash),\n\n &account.key(),\n\n &AccountWrapper(account.clone()),\n\n )\n\n .expect(\"update account\"),\n\n Some(account),\n\n )\n\n}\n\n\n\n/// TODO: sanity checks in abci https://github.com/tendermint/rust-abci/issues/49\n\nimpl<T: EnclaveProxy> abci::Application for ChainNodeApp<T> {\n\n /// Query Connection: Called on startup from Tendermint. The application should normally\n", "file_path": "chain-abci/src/app/mod.rs", "rank": 11, "score": 207638.37851299578 }, { "content": "/// Calculates hash of the input data -- if SCALE-serialized TX is passed in, it's equivalent to TxId.\n\n/// Currently, it uses blake2s.\n\npub fn txid_hash(buf: &[u8]) -> H256 {\n\n hash256::<Blake2s>(buf)\n\n}\n\n\n\n/// Key to identify the used TXID hash function, e.g. in ProofOps.\n\npub const TXID_HASH_ID: &[u8; 7] = b\"blake2s\";\n\n\n\n/// Transaction ID -- currently, blake2s hash of SCALE-serialized TX data\n\npub type TxId = H256;\n\n\n\n/// A Transaction containing tx inputs and tx outputs.\n\n/// TODO: max input/output size?\n\n/// TODO: custom Encode/Decode when data structures are finalized (for backwards/forwards compatibility, encoders/decoders should be able to work with old formats)\n\n#[derive(Debug, Default, PartialEq, Eq, Clone, Encode)]\n\n#[cfg_attr(\n\n all(feature = \"serde\", feature = \"hex\"),\n\n derive(Serialize, Deserialize)\n\n)]\n\npub struct Tx {\n\n pub inputs: Vec<TxoPointer>,\n", "file_path": "chain-core/src/tx/data/mod.rs", "rank": 12, "score": 204185.28957844523 }, { "content": "pub fn get_account(account_address: &RedeemAddress, app: &ChainNodeApp<MockClient>) -> StakedState {\n\n println!(\n\n \"uncommitted root hash: {}\",\n\n hex::encode(app.uncommitted_account_root_hash)\n\n );\n\n let account_key = to_stake_key(&StakedStateAddress::from(*account_address));\n\n let state = app.last_state.clone().expect(\"app state\");\n\n println!(\n\n \"committed root hash: {}\",\n\n hex::encode(&state.top_level.account_root)\n\n );\n\n let account = app\n\n .accounts\n\n .get_one(&app.uncommitted_account_root_hash, &account_key)\n\n .expect(\"account lookup problem\");\n\n\n\n match account {\n\n None => panic!(\"account not found\"),\n\n Some(AccountWrapper(a)) => a,\n\n }\n\n}\n\n\n", "file_path": "chain-abci/tests/abci_app.rs", "rank": 13, "score": 204004.3163016303 }, { "content": "pub fn compute_accounts_root(\n\n account_storage: &mut AccountStorage,\n\n accounts: &[StakedState],\n\n) -> H256 {\n\n let mut keys: Vec<_> = accounts.iter().map(StakedState::key).collect();\n\n let wrapped: Vec<_> = accounts.iter().cloned().map(AccountWrapper).collect();\n\n account_storage\n\n .insert(None, &mut keys, &wrapped)\n\n .expect(\"insert failed\")\n\n}\n\n\n", "file_path": "chain-abci/src/app/app_init.rs", "rank": 14, "score": 201609.05580584577 }, { "content": "fn check_attributes(tx_chain_hex_id: u8, extra_info: &ChainInfo) -> Result<(), Error> {\n\n // TODO: check other attributes?\n\n // check that chain IDs match\n\n if extra_info.chain_hex_id != tx_chain_hex_id {\n\n return Err(Error::WrongChainHexId);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 15, "score": 201197.6142655818 }, { "content": "/// Applies basic checks on transaction outputs\n\npub fn check_outputs_basic(outputs: &[TxOut]) -> Result<(), Error> {\n\n // check that there are outputs\n\n if outputs.is_empty() {\n\n return Err(Error::NoOutputs);\n\n }\n\n\n\n // check that all outputs have a non-zero amount\n\n if !outputs.iter().all(|x| x.value > Coin::zero()) {\n\n return Err(Error::ZeroCoin);\n\n }\n\n\n\n // Note: we don't need to check against MAX_COIN because Coin's\n\n // constructor should already do it.\n\n\n\n // TODO: check address attributes?\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 16, "score": 195635.32117541574 }, { "content": "/// Verifies if a new council node can be added\n\npub fn verify_node_join(\n\n maintx: &NodeJoinRequestTx,\n\n extra_info: ChainInfo,\n\n node_info: NodeInfo,\n\n mut account: StakedState,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n verify_unjailed(&account)?;\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n\n\n // checks that staked state transaction count matches to the one in transaction\n\n if maintx.nonce != account.nonce {\n\n return Err(Error::AccountIncorrectNonce);\n\n }\n\n\n\n // checks that the address in unjail transaction is same as that of staked state recovered from witness\n\n if maintx.address != account.address {\n\n return Err(Error::MismatchAccountAddress);\n\n }\n\n\n\n // checks that the bonded amount >= minimal required stake\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 17, "score": 193142.74023657624 }, { "content": "/// Calculates 256-bit crypto hash\n\npub fn hash256<D: Digest>(data: &[u8]) -> H256 {\n\n let mut hasher = D::new();\n\n hasher.input(data);\n\n let mut out = [0u8; HASH_SIZE_256];\n\n out.copy_from_slice(&hasher.result()[..]);\n\n out\n\n}\n\n\n\n/// Seconds since UNIX epoch\n\npub type Timespec = u64;\n\n\n\npub type H256 = [u8; HASH_SIZE_256];\n\npub type H264 = [u8; HASH_SIZE_256 + 1];\n\npub type H512 = [u8; HASH_SIZE_256 * 2];\n\n\n\n/// Types of tendermint events created during `deliver_tx` / `end_block`\n\n#[derive(Debug, Clone, Copy)]\n\npub enum TendermintEventType {\n\n ValidTransactions,\n\n BlockFilter,\n", "file_path": "chain-core/src/common/mod.rs", "rank": 18, "score": 193060.86177266465 }, { "content": "/// Given the chosen network, it returns bip44 cointype\n\npub fn get_bip44_coin_type() -> u32 {\n\n get_bip44_coin_type_from_network(get_network())\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 19, "score": 191757.28633612866 }, { "content": "pub fn create_storage() -> (Storage, AccountStorage) {\n\n (\n\n Storage::new_db(Arc::new(create(NUM_COLUMNS.unwrap()))),\n\n AccountStorage::new(Storage::new_db(Arc::new(create(1))), 20)\n\n .expect(\"Unable to create account storage\"),\n\n )\n\n}\n\n\n", "file_path": "test-common/src/chain_env.rs", "rank": 20, "score": 186001.8665939881 }, { "content": "pub fn get_account_op_witness<C: Signing>(\n\n secp: Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> StakedStateOpWitness {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n let sig = secp.sign_recoverable(&message, &secret_key);\n\n return StakedStateOpWitness::new(sig);\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 21, "score": 183295.66307021704 }, { "content": "fn parse_response_sigrl(resp: &[u8]) -> Result<Vec<u8>, RAError> {\n\n let mut headers = [httparse::EMPTY_HEADER; 16];\n\n let mut respp = httparse::Response::new(&mut headers);\n\n let result = respp.parse(resp);\n\n\n\n sanitize_http_response(&respp)?;\n\n let header = respp\n\n .headers\n\n .iter()\n\n .find(|&&header| header.name == \"Content-Length\")\n\n .ok_or(RAError::ParseError)?;\n\n let len_str = String::from_utf8(header.value.to_vec()).map_err(|_| RAError::ParseError)?;\n\n let len_num = len_str.parse::<u32>().map_err(|_| RAError::ParseError)?;\n\n if len_num == 0 {\n\n Ok(Vec::new())\n\n } else {\n\n let status = result.map_err(|_| RAError::ParseError)?;\n\n let header_len = match status {\n\n httparse::Status::Complete(l) => l,\n\n _ => {\n\n return Err(RAError::ParseError);\n\n }\n\n };\n\n let resp_body = &resp[header_len..];\n\n let base64_body = str::from_utf8(resp_body).map_err(|_| RAError::ParseError)?;\n\n base64::decode(base64_body).map_err(|_| RAError::ParseError)\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/tx-query/enclave/src/attest.rs", "rank": 22, "score": 182318.30978982145 }, { "content": "pub fn pure_account_storage(depth: usize) -> BinaryMerkleTreeResult<AccountStorage> {\n\n AccountStorage::new(Storage::new_db(Arc::new(create_memorydb(1))), depth)\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct AccountWrapper(pub StakedState);\n\n\n\nimpl Encode for AccountWrapper {\n\n #[inline]\n\n fn encode(&self) -> Result<Vec<u8>, Exception> {\n\n Ok(self.0.encode())\n\n }\n\n}\n\n\n\nimpl Decode for AccountWrapper {\n\n #[inline]\n\n fn decode(buffer: &[u8]) -> Result<Self, Exception> {\n\n let data = Vec::from(buffer);\n\n let account = StakedState::decode(&mut data.as_slice())\n\n .map_err(|e| Exception::new(&format!(\"failed to decode: {}\", e.what())))?;\n", "file_path": "chain-abci/src/storage/account/mod.rs", "rank": 23, "score": 180386.05400392984 }, { "content": "/// Applies basic checks on transaction inputs\n\npub fn check_inputs_basic(inputs: &[TxoPointer], witness: &TxWitness) -> Result<(), Error> {\n\n // check that there are inputs\n\n if inputs.is_empty() {\n\n return Err(Error::NoInputs);\n\n }\n\n\n\n // check that there are no duplicate inputs\n\n let mut inputs_s = BTreeSet::new();\n\n if !inputs.iter().all(|x| inputs_s.insert(x)) {\n\n return Err(Error::DuplicateInputs);\n\n }\n\n\n\n // verify transaction witnesses\n\n if inputs.len() < witness.len() {\n\n return Err(Error::UnexpectedWitnesses);\n\n }\n\n\n\n if inputs.len() > witness.len() {\n\n return Err(Error::MissingWitnesses);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 24, "score": 176672.97579515056 }, { "content": "fn get_account(account_address: &RedeemAddress) -> StakedState {\n\n StakedState::new_init_unbonded(Coin::one(), 0, StakedStateAddress::from(*account_address))\n\n}\n\n\n\nconst TEST_NETWORK_ID: u8 = 0xab;\n\n\n", "file_path": "chain-tx-enclave/tx-query/app/src/test/mod.rs", "rank": 25, "score": 173814.1358886152 }, { "content": "fn get_account(account_address: &RedeemAddress) -> StakedState {\n\n StakedState::new_init_unbonded(\n\n Coin::one(),\n\n 0,\n\n StakedStateAddress::from(*account_address),\n\n )\n\n}\n\n\n\nconst TEST_NETWORK_ID: u8 = 0xab;\n\n\n", "file_path": "chain-tx-enclave/tx-validation/app/src/test/mod.rs", "rank": 26, "score": 173814.1358886152 }, { "content": "/// Returns bip44 cointype of the provided network\n\n/// 1 \t0x80000001 \t \tTestnet (all coins)\n\n/// 394 \t0x8000018a \tCRO \tCrypto.com Chain\n\npub fn get_bip44_coin_type_from_network(network: Network) -> u32 {\n\n match network {\n\n Network::Mainnet => 394,\n\n Network::Testnet => 1,\n\n Network::Devnet => 1,\n\n }\n\n}\n\n\n\nmod chosen_network {\n\n use super::*;\n\n pub static mut NETWORK: Network = Network::Devnet;\n\n pub static mut NETWORK_ID: u8 = 0;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn init_chain_id_should_setup_correctly() {\n\n init_chain_id(\"dev-chain-y3m1e6-AB\");\n", "file_path": "chain-core/src/init/network.rs", "rank": 27, "score": 173518.32910983067 }, { "content": "fn find_account_from_event_attributes(\n\n attributes: &[Attribute],\n\n) -> Result<Option<StakedStateAddress>> {\n\n let maybe_attribute = find_event_attribute_by_key(attributes, TendermintEventKey::Account)?;\n\n match maybe_attribute {\n\n None => Ok(None),\n\n Some(attribute) => {\n\n let account = base64::decode(&attribute.value).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode base64 bytes of account in block results\",\n\n )\n\n })?;\n\n let address = String::from_utf8(account).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode string of account in block results\",\n\n )\n\n })?;\n\n let redeem_address = RedeemAddress::from_str(&address).chain(|| {\n", "file_path": "client-common/src/tendermint/types/block_results.rs", "rank": 28, "score": 169291.5046944581 }, { "content": "#[cfg(feature = \"base64\")]\n\nfn deserialize_ed25519_base64<'de, D>(deserializer: D) -> Result<[u8; PUBLIC_KEY_SIZE], D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n TendermintValidatorPubKey::from_base64(String::deserialize(deserializer)?.as_bytes())\n\n .map(|key| *key.as_bytes())\n\n .map_err(|e| D::Error::custom(format!(\"{}\", e)))\n\n}\n\n\n\n#[cfg(feature = \"hex\")]\n\nimpl fmt::Display for TendermintValidatorPubKey {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n TendermintValidatorPubKey::Ed25519(key) => write!(f, \"{}\", hex::encode(key)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"base64\")]\n\n#[derive(Error, Debug)]\n", "file_path": "chain-core/src/state/tendermint.rs", "rank": 29, "score": 165670.19423472518 }, { "content": "pub fn run() {\n\n crate::program::run_electron();\n\n}\n", "file_path": "client-rpc/src/lib.rs", "rank": 30, "score": 164931.84804829228 }, { "content": "#[cfg(feature = \"base64\")]\n\nfn serialize_ed25519_base64<S>(pk: &[u8], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n base64::encode(pk).serialize(serializer)\n\n}\n\n\n", "file_path": "chain-core/src/state/tendermint.rs", "rank": 31, "score": 164736.60647789025 }, { "content": "#[allow(dead_code)]\n\npub fn run_cli() {\n\n env_logger::init();\n\n let options = Options::from_args();\n\n Server::new(options).unwrap().start().unwrap();\n\n}\n\n\n", "file_path": "client-rpc/src/program.rs", "rank": 32, "score": 162690.48577912056 }, { "content": "#[allow(dead_code)]\n\npub fn run_electron() {\n\n env_logger::init();\n\n // \"~/Electron\", \".\", \"--chain-id\", \"ab\"]\n\n let args: Vec<String> = env::args().collect();\n\n log::info!(\"args={:?}\", args);\n\n let mut options = Options::from_iter(vec![\"\"].iter());\n\n if let Some(a) = find_string(&args, \"--chain-id\") {\n\n options.chain_id = args[a + 1].clone()\n\n }\n\n if let Some(a) = find_string(&args, \"--storage-dir\") {\n\n options.storage_dir = args[a + 1].clone()\n\n }\n\n\n\n if let Some(a) = find_string(&args, \"--websocket-url\") {\n\n options.websocket_url = args[a + 1].clone()\n\n }\n\n\n\n let mut storage = dirs::data_dir().expect(\"get storage dir\");\n\n storage.push(\".cro_storage\");\n\n options.storage_dir = storage.to_str().expect(\"get storage dir to_str\").into();\n\n\n\n log::info!(\"Options={:?}\", options);\n\n log::info!(\"Storage={}\", options.storage_dir);\n\n Server::new(options).unwrap().start().unwrap();\n\n}\n", "file_path": "client-rpc/src/program.rs", "rank": 33, "score": 162690.48577912056 }, { "content": "#[cfg(all(feature = \"serde\", feature = \"hex\"))]\n\nfn deserialize_chain_hex_id<'de, D>(deserializer: D) -> std::result::Result<u8, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StrVisitor;\n\n\n\n impl<'de> de::Visitor<'de> for StrVisitor {\n\n type Value = u8;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"view key in hexadecimal string\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let view_key_vec =\n\n hex::decode(value).map_err(|err| de::Error::custom(err.to_string()))?;\n", "file_path": "chain-core/src/tx/data/attribute.rs", "rank": 34, "score": 160671.7715136549 }, { "content": "/// computes the \"global\" application hash (used by Tendermint to check consistency + block replaying)\n\n/// currently: app_hash = blake2s(root of valid TX merkle tree\n\n/// || root of account/staked state trie || blake2s(scale bytes(rewards pool state)) || blake2s(scale bytes(network params)))\n\n/// TODO: cache (as many parts remain static)\n\n/// MUST/TODO: include node whitelists\n\npub fn compute_app_hash(\n\n valid_tx_id_tree: &MerkleTree<H256>,\n\n account_state_root: &H256,\n\n reward_pool: &RewardsPoolState,\n\n params: &NetworkParameters,\n\n) -> H256 {\n\n let valid_tx_part = valid_tx_id_tree.root_hash();\n\n let rewards_pool_part = reward_pool.hash();\n\n let network_params_part = params.hash();\n\n let mut bs = Vec::new();\n\n bs.extend(&valid_tx_part);\n\n bs.extend(&account_state_root[..]);\n\n bs.extend(&rewards_pool_part);\n\n bs.extend(&network_params_part);\n\n hash256::<Blake2s>(&bs)\n\n}\n\n\n\n/// External information needed for TX validation\n\n#[derive(Clone, Copy, Encode, Decode)]\n\npub struct ChainInfo {\n\n /// minimal fee computed for the transaction\n\n pub min_fee_computed: Fee,\n\n /// network hexamedical ID\n\n pub chain_hex_id: u8,\n\n /// time in the previous committed block\n\n pub previous_block_time: Timespec,\n\n /// how much time is required to wait until stake state's unbonded amount can be withdrawn\n\n pub unbonding_period: u32,\n\n}\n", "file_path": "chain-core/src/lib.rs", "rank": 35, "score": 160557.22306149112 }, { "content": "/// Verifies if an account can be unjailed\n\npub fn verify_unjailing(\n\n maintx: &UnjailTx,\n\n extra_info: ChainInfo,\n\n mut account: StakedState,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n\n\n // checks that account transaction count matches to the one in transaction\n\n if maintx.nonce != account.nonce {\n\n return Err(Error::AccountIncorrectNonce);\n\n }\n\n\n\n // checks that the address in unjail transaction is same as that of account recovered from witness\n\n if maintx.address != account.address {\n\n return Err(Error::MismatchAccountAddress);\n\n }\n\n\n\n match account.jailed_until() {\n\n None => Err(Error::AccountNotJailed),\n\n Some(jailed_until) => {\n\n if jailed_until > extra_info.previous_block_time {\n\n Err(Error::AccountJailed)\n\n } else {\n\n account.unjail();\n\n Ok((Fee::new(Coin::zero()), Some(account))) // Zero fee for unjail transaction\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 36, "score": 160554.83241629723 }, { "content": "/// checks TransferTx -- TODO: this will be moved to an enclave\n\n/// WARNING: it assumes double-spending BitVec of inputs is checked in chain-abci\n\npub fn verify_transfer(\n\n maintx: &Tx,\n\n witness: &TxWitness,\n\n extra_info: ChainInfo,\n\n transaction_inputs: Vec<TxWithOutputs>,\n\n) -> Result<Fee, Error> {\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n check_inputs_basic(&maintx.inputs, witness)?;\n\n check_outputs_basic(&maintx.outputs)?;\n\n let incoins = check_inputs(\n\n &maintx.id(),\n\n &maintx.inputs,\n\n witness,\n\n &extra_info,\n\n transaction_inputs,\n\n )?;\n\n let outcoins = maintx.get_output_total();\n\n if let Err(_coin_err) = outcoins {\n\n return Err(Error::InvalidSum); // FIXME: Err(Error::InvalidSum(coin_err));\n\n }\n\n check_input_output_sums(incoins, outcoins.unwrap(), &extra_info)\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 37, "score": 160549.35712406607 }, { "content": "pub fn get_nodes(\n\n addresses: &[Account],\n\n) -> BTreeMap<\n\n RedeemAddress,\n\n (\n\n ValidatorName,\n\n ValidatorSecurityContact,\n\n TendermintValidatorPubKey,\n\n ),\n\n> {\n\n addresses\n\n .iter()\n\n .map(|acct| {\n\n (\n\n acct.address,\n\n (acct.name.clone(), None, acct.validator_pub_key.clone()),\n\n )\n\n })\n\n .collect()\n\n}\n", "file_path": "test-common/src/chain_env.rs", "rank": 38, "score": 160549.35712406607 }, { "content": "pub fn monetary_expansion(\n\n tau: u64,\n\n total_staking: Coin,\n\n minted: Coin,\n\n params: &NetworkParameters,\n\n) -> Coin {\n\n let cap = params.get_rewards_monetary_expansion_cap();\n\n let r0 = FixedNumber::from_num(params.get_rewards_monetary_expansion_r0().as_millis()) / 1000;\n\n let tau = FixedNumber::from_num(tau);\n\n let total_staking = FixedNumber::from_num(u64::from(total_staking));\n\n let amount = total_staking\n\n * r0\n\n * exp(max(\n\n FixedNumber::from_num(EXP_LOWER_BOUND),\n\n -total_staking / tau,\n\n ));\n\n min(\n\n (cap - minted).unwrap_or_default(),\n\n Coin::new(amount.to_num()).unwrap(),\n\n )\n\n}\n\n\n", "file_path": "chain-abci/src/app/rewards.rs", "rank": 39, "score": 160549.35712406607 }, { "content": "/// checks moving some amount from bonded to unbonded in staked states\n\n/// NOTE: witness is assumed to be checked in chain-abci\n\npub fn verify_unbonding(\n\n maintx: &UnbondTx,\n\n extra_info: ChainInfo,\n\n mut account: StakedState,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n verify_unjailed(&account)?;\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n\n\n if maintx.from_staked_account != account.address {\n\n return Err(Error::MismatchAccountAddress);\n\n }\n\n // checks that account transaction count matches to the one in transaction\n\n if maintx.nonce != account.nonce {\n\n return Err(Error::AccountIncorrectNonce);\n\n }\n\n // check that a non-zero amount is being unbound\n\n if maintx.value == Coin::zero() {\n\n return Err(Error::ZeroCoin);\n\n }\n\n check_input_output_sums(account.bonded, maintx.value, &extra_info)?;\n\n account.unbond(\n\n maintx.value,\n\n extra_info.min_fee_computed.to_coin(),\n\n extra_info.previous_block_time + Timespec::from(extra_info.unbonding_period),\n\n );\n\n // only pay the minimal fee from the bonded amount if correct; the rest remains in bonded\n\n Ok((extra_info.min_fee_computed, Some(account)))\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 40, "score": 160549.35712406607 }, { "content": "pub fn validator_pub_key() -> PublicKey {\n\n validator_priv_key().public_key()\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 41, "score": 160036.82243859622 }, { "content": "fn find_tx_id_from_event_attributes(attributes: &[Attribute]) -> Result<Option<[u8; 32]>> {\n\n let maybe_attribute = find_event_attribute_by_key(attributes, TendermintEventKey::TxId)?;\n\n match maybe_attribute {\n\n None => Ok(None),\n\n Some(attribute) => {\n\n let tx_id = base64::decode(&attribute.value).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode base64 bytes of transaction id in block results\",\n\n )\n\n })?;\n\n let tx_id = hex::decode(&tx_id).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode hex bytes of transaction id in block results\",\n\n )\n\n })?;\n\n if 32 != tx_id.len() {\n\n return Err(Error::new(\n\n ErrorKind::DeserializationError,\n", "file_path": "client-common/src/tendermint/types/block_results.rs", "rank": 42, "score": 159733.1357136243 }, { "content": "fn expect_error<T, Error>(res: &Result<T, Error>, expected: Error)\n\nwhere\n\n Error: Debug,\n\n{\n\n match res {\n\n Err(err) if mem::discriminant(&expected) == mem::discriminant(err) => {}\n\n Err(err) => panic!(\"Expected error {:?} but got {:?}\", expected, err),\n\n Ok(_) => panic!(\"Expected error {:?} but succeeded\", expected),\n\n }\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 43, "score": 159188.3320234976 }, { "content": "fn parse_response_attn_report(resp: &[u8]) -> Result<AttnReport, RAError> {\n\n let mut headers = [httparse::EMPTY_HEADER; 16];\n\n let mut respp = httparse::Response::new(&mut headers);\n\n let result = respp.parse(resp);\n\n\n\n sanitize_http_response(&respp)?;\n\n\n\n let mut sig = String::new();\n\n let mut sig_cert = String::new();\n\n let mut attn_report = String::new();\n\n\n\n for header in respp.headers {\n\n match header.name {\n\n \"Content-Length\" => {\n\n let len_str =\n\n String::from_utf8(header.value.to_vec()).map_err(|_| RAError::ParseError)?;\n\n let len_num = len_str.parse::<u32>().map_err(|_| RAError::ParseError)?;\n\n if len_num != 0 {\n\n let status = result.map_err(|_| RAError::ParseError)?;\n\n let header_len = match status {\n", "file_path": "chain-tx-enclave/tx-query/enclave/src/attest.rs", "rank": 44, "score": 159008.21921025304 }, { "content": "/// checks wihdrawing from a staked state\n\n/// NOTE: witness is assumed to be checked in chain-abci\n\n/// TODO: move this to chain-abci? (the account update)\n\npub fn verify_unbonded_withdraw(\n\n maintx: &WithdrawUnbondedTx,\n\n extra_info: ChainInfo,\n\n mut account: StakedState,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n let fee = verify_unbonded_withdraw_core(maintx, extra_info, &account)?;\n\n account.withdraw();\n\n Ok((fee, Some(account)))\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 45, "score": 158506.81043032708 }, { "content": "/// verify a given extended address is associated to the witness\n\n/// and the signature against the given transaction `Tx`\n\n/// TODO: capture possible errors in enum?\n\n///\n\npub fn verify_tx_address(\n\n witness: &TxInWitness,\n\n txid: &TxId,\n\n address: &ExtendedAddr,\n\n) -> Result<(), secp256k1::Error> {\n\n let secp = Secp256k1::verification_only();\n\n let message = Message::from_slice(&txid[..])?;\n\n\n\n match (witness, address) {\n\n (TxInWitness::TreeSig(sig, proof), ExtendedAddr::OrTree(root_hash)) => {\n\n if !proof.verify(root_hash) {\n\n Err(secp256k1::Error::InvalidPublicKey)\n\n } else {\n\n schnorr_verify(\n\n &secp,\n\n &message,\n\n &sig,\n\n &PublicKey::from_slice(proof.value().as_bytes())?,\n\n )\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain-tx-validation/src/witness.rs", "rank": 46, "score": 158506.71238561207 }, { "content": "/// checks depositing to a staked state\n\n/// WARNING: it assumes double-spending BitVec of inputs is checked in chain-abci\n\n/// TODO: move this to chain-abci? (the account creation / update)\n\npub fn verify_bonded_deposit(\n\n maintx: &DepositBondTx,\n\n witness: &TxWitness,\n\n extra_info: ChainInfo,\n\n transaction_inputs: Vec<TxWithOutputs>,\n\n maccount: Option<StakedState>,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n if let Some(ref account) = maccount {\n\n verify_unjailed(account)?;\n\n }\n\n\n\n let incoins = verify_bonded_deposit_core(maintx, witness, extra_info, transaction_inputs)?;\n\n let deposit_amount = (incoins - extra_info.min_fee_computed.to_coin()).expect(\"init\");\n\n let account = match maccount {\n\n Some(mut a) => {\n\n a.deposit(deposit_amount);\n\n Some(a)\n\n }\n\n None => Some(StakedState::new_init_bonded(\n\n deposit_amount,\n\n extra_info.previous_block_time,\n\n maintx.to_staked_account,\n\n None,\n\n )),\n\n };\n\n Ok((extra_info.min_fee_computed, account))\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 48, "score": 158506.61246335373 }, { "content": "/// Given a db and a DB transaction, it will go through TX inputs and mark them as spent\n\n/// in the TX_META storage and it will create a new entry for TX in TX_META with all outputs marked as unspent.\n\npub fn update_utxos_commit(\n\n inputs: &[TxoPointer],\n\n no_of_outputs: TxoIndex,\n\n txid: TxId,\n\n db: Arc<dyn KeyValueDB>,\n\n dbtx: &mut DBTransaction,\n\n) {\n\n spend_utxos(inputs, db, dbtx);\n\n dbtx.put(\n\n COL_TX_META,\n\n &txid,\n\n &BitVec::from_elem(no_of_outputs as usize, false).to_bytes(),\n\n );\n\n}\n\n\n\nimpl<T: EnclaveProxy> ChainNodeApp<T> {\n\n pub fn process_txs(&mut self, inittx: &mut DBTransaction) {\n\n for txaux in self.delivered_txs.iter() {\n\n let txid: TxId = txaux.tx_id();\n\n // TODO: do the abci-query mock via enclave mock + switch to tx-query in client by default\n", "file_path": "chain-abci/src/app/commit.rs", "rank": 49, "score": 158501.8854723036 }, { "content": "/// helper method to validate basic assumptions\n\npub fn is_basic_valid_tx_request(\n\n request: &VerifyTxRequest,\n\n tx_inputs: &Option<Vec<SealedLog>>,\n\n chain_hex_id: u8,\n\n) -> Result<(), ()> {\n\n if request.info.chain_hex_id != chain_hex_id {\n\n return Err(());\n\n }\n\n match request.tx {\n\n TxEnclaveAux::DepositStakeTx { .. } => match tx_inputs {\n\n Some(ref i) if !i.is_empty() => Ok(()),\n\n _ => Err(()),\n\n },\n\n TxEnclaveAux::TransferTx { .. } => match tx_inputs {\n\n Some(ref i) if !i.is_empty() => Ok(()),\n\n _ => Err(()),\n\n },\n\n TxEnclaveAux::WithdrawUnbondedStakeTx { .. } => {\n\n if request.account.is_some() {\n\n Ok(())\n", "file_path": "enclave-protocol/src/lib.rs", "rank": 50, "score": 158501.8854723036 }, { "content": "/// tx filter\n\ntype TxFilter = [u8; 256];\n\n\n\n/// Internal encryption request\n\n#[derive(Encode, Decode)]\n\npub struct IntraEncryptRequest {\n\n /// transaction ID\n\n pub txid: TxId,\n\n /// EncryptionRequest\n\n pub sealed_enc_request: SealedLog,\n\n /// transaction inputs (if any)\n\n pub tx_inputs: Option<Vec<SealedLog>>,\n\n /// last chain info\n\n pub info: ChainInfo,\n\n}\n\n\n\n/// variable length request passed to the tx-validation enclave\n\n#[derive(Encode, Decode)]\n\npub enum IntraEnclaveRequest {\n\n ValidateTx {\n\n request: Box<VerifyTxRequest>,\n\n tx_inputs: Option<Vec<SealedLog>>,\n\n },\n\n EndBlock,\n\n Encrypt(Box<IntraEncryptRequest>),\n\n}\n\n\n", "file_path": "enclave-protocol/src/lib.rs", "rank": 51, "score": 158269.25463807432 }, { "content": "/// checks depositing to a staked state -- TODO: this will be moved to an enclave\n\n/// WARNING: it assumes double-spending BitVec of inputs is checked in chain-abci\n\npub fn verify_bonded_deposit_core(\n\n maintx: &DepositBondTx,\n\n witness: &TxWitness,\n\n extra_info: ChainInfo,\n\n transaction_inputs: Vec<TxWithOutputs>,\n\n) -> Result<Coin, Error> {\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n check_inputs_basic(&maintx.inputs, witness)?;\n\n let incoins = check_inputs(\n\n &maintx.id(),\n\n &maintx.inputs,\n\n witness,\n\n &extra_info,\n\n transaction_inputs,\n\n )?;\n\n if incoins <= extra_info.min_fee_computed.to_coin() {\n\n return Err(Error::InputOutputDoNotMatch);\n\n }\n\n Ok(incoins)\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 52, "score": 156542.05724566133 }, { "content": "/// checks wihdrawing from a staked state -- TODO: this will be moved to an enclave\n\n/// NOTE: witness is assumed to be checked in chain-abci\n\npub fn verify_unbonded_withdraw_core(\n\n maintx: &WithdrawUnbondedTx,\n\n extra_info: ChainInfo,\n\n account: &StakedState,\n\n) -> Result<Fee, Error> {\n\n verify_unjailed(account)?;\n\n\n\n check_attributes(maintx.attributes.chain_hex_id, &extra_info)?;\n\n check_outputs_basic(&maintx.outputs)?;\n\n // checks that account transaction count matches to the one in transaction\n\n if maintx.nonce != account.nonce {\n\n return Err(Error::AccountIncorrectNonce);\n\n }\n\n // checks that account can withdraw to outputs\n\n if account.unbonded_from > extra_info.previous_block_time {\n\n return Err(Error::AccountNotUnbonded);\n\n }\n\n // checks that there is something to wihdraw\n\n if account.unbonded == Coin::zero() {\n\n return Err(Error::ZeroCoin);\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 53, "score": 156542.05724566133 }, { "content": "/// verify the signature against the given transation `Tx`\n\n/// and recovers the address from it\n\n///\n\npub fn verify_tx_recover_address(\n\n witness: &StakedStateOpWitness,\n\n txid: &TxId,\n\n) -> Result<StakedStateAddress, secp256k1::Error> {\n\n match witness {\n\n StakedStateOpWitness::BasicRedeem(sig) => {\n\n let secp = Secp256k1::verification_only();\n\n let message = Message::from_slice(txid)?;\n\n let pk = secp.recover(&message, &sig)?;\n\n secp.verify(&message, &sig.to_standard(), &pk)?;\n\n Ok(StakedStateAddress::BasicRedeem(RedeemAddress::from(&pk)))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n\n\n use secp256k1::schnorrsig::schnorr_sign;\n", "file_path": "chain-tx-validation/src/witness.rs", "rank": 54, "score": 156542.05724566133 }, { "content": "/// raw sgx_sealed_data_t\n\ntype SealedLog = Vec<u8>;\n\n\n", "file_path": "enclave-protocol/src/lib.rs", "rank": 55, "score": 156111.14472478017 }, { "content": "fn extract_att_parts(payload: Vec<u8>) -> Result<(Vec<u8>, Vec<u8>, Vec<u8>)> {\n\n // Extract each field\n\n let mut iter = payload.split(|x| *x == 0x7C);\n\n let attn_report_raw = iter\n\n .next()\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n let sig_raw = iter\n\n .next()\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n let sig = base64::decode(&sig_raw)\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n let sig_cert_raw = iter\n\n .next()\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n let sig_cert_dec = base64::decode_config(&sig_cert_raw, base64::STANDARD)\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n Ok((attn_report_raw.to_vec(), sig, sig_cert_dec))\n\n}\n\n\n", "file_path": "client-core/src/cipher/sgx.rs", "rank": 56, "score": 155532.4535084485 }, { "content": "/// returns the original IAS certificate + the base64-decoded payload\n\nfn get_ias_cert() -> Result<(&'static [u8], Vec<u8>)> {\n\n let ias_report_ca = IAS_CERT;\n\n let mut ias_ca_stripped: Vec<u8> = ias_report_ca.to_vec();\n\n ias_ca_stripped.retain(|&x| x != 0x0d && x != 0x0a);\n\n let head_len = \"-----BEGIN CERTIFICATE-----\".len();\n\n let tail_len = \"-----END CERTIFICATE-----\".len();\n\n let full_len = ias_ca_stripped.len();\n\n let ias_ca_core: &[u8] = &ias_ca_stripped[head_len..full_len - tail_len];\n\n let ias_cert_dec = base64::decode_config(ias_ca_core, base64::STANDARD)\n\n .chain(|| (ErrorKind::InvalidInput, \"Invalid SGX certificate format\"))?;\n\n Ok((ias_report_ca, ias_cert_dec))\n\n}\n\n\n\n/// seems the closure is needed for type inference\n", "file_path": "client-core/src/cipher/sgx.rs", "rank": 57, "score": 154583.246351901 }, { "content": "pub fn header() -> Header {\n\n block().header\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 58, "score": 153481.7847518113 }, { "content": "pub fn block() -> Block {\n\n serde_json::from_str(\n\n r#\"{\n\n \"header\": {\n\n \"version\": {\n\n \"block\": \"10\",\n\n \"app\": \"0\"\n\n },\n\n \"chain_id\": \"test-chain-y3m1e6-AB\",\n\n \"height\": \"1\",\n\n \"time\": \"2019-11-18T05:49:16.254417Z\",\n\n \"num_txs\": \"0\",\n\n \"total_txs\": \"0\",\n\n \"last_block_id\": {\n\n \"hash\": \"\",\n\n \"parts\": {\n\n \"total\": \"0\",\n\n \"hash\": \"\"\n\n }\n\n },\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 59, "score": 153481.7847518113 }, { "content": "pub fn genesis() -> Genesis {\n\n serde_json::from_str(DEFAULT_GENESIS_JSON).unwrap()\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 60, "score": 153481.7847518113 }, { "content": "/// Unfortunately the usual Rust unit-test facility can't be used with Baidu SGX SDK,\n\n/// so this has to be run as a normal app\n\npub fn test_sealing() {\n\n let mut builder = Builder::new();\n\n\n\n builder\n\n .filter(None, LevelFilter::Debug)\n\n .write_style(WriteStyle::Always)\n\n .init();\n\n let mut db = Db::open(\".enclave-test\").expect(\"failed to open a storage path\");\n\n let mut metadb = db\n\n .open_tree(crate::META_KEYSPACE)\n\n .expect(\"failed to open a meta keyspace\");\n\n let mut txdb = db\n\n .open_tree(crate::TX_KEYSPACE)\n\n .expect(\"failed to open a tx keyspace\");\n\n\n\n let enclave = match init_enclave(true) {\n\n Ok(r) => {\n\n info!(\"[+] Init Enclave Successful {}!\", r.geteid());\n\n r\n\n }\n", "file_path": "chain-tx-enclave/tx-validation/app/src/test/mod.rs", "rank": 61, "score": 152868.74656341813 }, { "content": "/// Generates the TLS cert + private key\n\n///\n\n/// # Arguments\n\n///\n\n/// * `payload` - The remote attestation payload that would be included in the certificate extension:\n\n/// format \"<attestion report>|<IAS signature>|<IAS certificate>\"\n\n/// * `prv_k` - private key, currently P-256 from Intel SGX SDK's crypto library\n\n/// * `pub_k` - corresponding public key, currently P-256 from Intel SGX SDK's crypto library\n\n/// * `ecc_handle` - auxiliary object for ECC operations from Intel SGX SDK's crypto library\n\n///\n\npub fn gen_ecc_cert(\n\n payload: String,\n\n prv_k: &sgx_ec256_private_t,\n\n pub_k: &sgx_ec256_public_t,\n\n ecc_handle: &SgxEccHandle,\n\n) -> Result<CertKeyPair, sgx_status_t> {\n\n // Generate public key bytes since both DER will use it\n\n let mut pub_key_bytes: Vec<u8> = vec![4];\n\n let mut pk_gx = pub_k.gx.clone();\n\n pk_gx.reverse();\n\n let mut pk_gy = pub_k.gy.clone();\n\n pk_gy.reverse();\n\n pub_key_bytes.extend_from_slice(&pk_gx);\n\n pub_key_bytes.extend_from_slice(&pk_gy);\n\n\n\n // Generate Certificate DER\n\n let cert_der = yasna::construct_der(|writer| {\n\n writer.write_sequence(|writer| {\n\n writer.next().write_sequence(|writer| {\n\n // Certificate Version\n", "file_path": "chain-tx-enclave/tx-query/enclave/src/cert.rs", "rank": 62, "score": 152867.450722338 }, { "content": "/// Spawns websocket rpc loop in a new thread\n\n///\n\n/// # How it works\n\n///\n\n/// - Connects to websocket server at given `url` and splits the connection in `reader` and `writer`.\n\n/// - Spawns a thread and runs `websocket_rpc_loop` in the thread which continues until the thread panics.\n\n/// - For each websocket message received:\n\n/// - Parse the message into JSON-RPC response.\n\n/// - Pop the response channel from `channel_map` corresponding to response's `request_id`.\n\n/// - Send the response to the channel.\n\npub fn spawn(\n\n channel_map: Arc<Mutex<HashMap<String, SyncSender<JsonRpcResponse>>>>,\n\n mut websocket_reader: Reader<TcpStream>,\n\n websocket_writer: Arc<Mutex<Writer<TcpStream>>>,\n\n) -> JoinHandle<()> {\n\n thread::spawn(move || {\n\n for message in websocket_reader.incoming_messages() {\n\n match message {\n\n Ok(message) => match message {\n\n OwnedMessage::Text(ref message) => handle_text(message, channel_map.clone()),\n\n OwnedMessage::Binary(ref message) => handle_slice(message, channel_map.clone()),\n\n OwnedMessage::Ping(data) => send_pong(websocket_writer.clone(), data),\n\n _ => {\n\n log::trace!(\"Received unknown message: {:?}\", message);\n\n }\n\n },\n\n Err(err) => {\n\n log::error!(\"Websocket error message: {}\", err);\n\n break;\n\n }\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "client-common/src/tendermint/websocket_rpc_client/websocket_rpc_loop.rs", "rank": 63, "score": 152863.74524302 }, { "content": "pub fn encrypt_tx(\n\n eid: sgx_enclave_id_t,\n\n request: IntraEnclaveRequest,\n\n) -> Result<TxObfuscated, chain_tx_validation::Error> {\n\n let request_buf: Vec<u8> = request.encode();\n\n let mut response_buf: Vec<u8> = vec![0u8; request_buf.len()];\n\n let mut retval: sgx_status_t = sgx_status_t::SGX_SUCCESS;\n\n let response_slice = &mut response_buf[..];\n\n let result = unsafe {\n\n ecall_check_tx(\n\n eid,\n\n &mut retval,\n\n request_buf.as_ptr(),\n\n request_buf.len(),\n\n response_slice.as_mut_ptr(),\n\n response_buf.len() as u32,\n\n )\n\n };\n\n if retval == sgx_status_t::SGX_SUCCESS && result == retval {\n\n let response = IntraEnclaveResponse::decode(&mut response_buf.as_slice());\n\n match response {\n\n Ok(Ok(IntraEnclaveResponseOk::Encrypt(obftx))) => Ok(obftx),\n\n Ok(Err(e)) => Err(e),\n\n _ => Err(Error::EnclaveRejected),\n\n }\n\n } else {\n\n Err(Error::EnclaveRejected)\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/tx-validation/app/src/enclave_u/mod.rs", "rank": 64, "score": 152863.74524302 }, { "content": "pub fn end_block(\n\n eid: sgx_enclave_id_t,\n\n request: IntraEnclaveRequest,\n\n) -> Result<Option<Box<[u8; 256]>>, ()> {\n\n let request_buf: Vec<u8> = request.encode();\n\n // Buffer size: Result(1)+Result(1)+Enum(1)+Option(1)+Box(0)+TxFilter(256)\n\n let mut response_buf: Vec<u8> = vec![0u8; 260];\n\n let mut retval: sgx_status_t = sgx_status_t::SGX_SUCCESS;\n\n let response_slice = &mut response_buf[..];\n\n let result = unsafe {\n\n ecall_check_tx(\n\n eid,\n\n &mut retval,\n\n request_buf.as_ptr(),\n\n request_buf.len(),\n\n response_slice.as_mut_ptr(),\n\n response_buf.len() as u32,\n\n )\n\n };\n\n if retval == sgx_status_t::SGX_SUCCESS && result == retval {\n\n let response = IntraEnclaveResponse::decode(&mut response_buf.as_slice());\n\n match response {\n\n Ok(Ok(IntraEnclaveResponseOk::EndBlock(maybe_filter))) => Ok(maybe_filter),\n\n _ => Err(()),\n\n }\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/tx-validation/app/src/enclave_u/mod.rs", "rank": 65, "score": 152863.74524302 }, { "content": "pub fn check_initchain(\n\n eid: sgx_enclave_id_t,\n\n chain_hex_id: u8,\n\n last_app_hash: Option<H256>,\n\n) -> Result<(), Option<H256>> {\n\n let mut retval: sgx_status_t = sgx_status_t::SGX_SUCCESS;\n\n let result = unsafe { ecall_initchain(eid, &mut retval, chain_hex_id) };\n\n if retval == sgx_status_t::SGX_SUCCESS && result == retval {\n\n Ok(())\n\n } else {\n\n Err(last_app_hash)\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/tx-validation/app/src/enclave_u/mod.rs", "rank": 66, "score": 152863.74524302 }, { "content": "pub fn test_integration() {\n\n let mut builder = Builder::new();\n\n let validation_path =\n\n std::env::var(\"TX_VALIDATION_BIN_DIR\").unwrap_or(\"/root/sgx/tx-validation/bin/\".to_owned());\n\n let query_server_host = std::env::var(\"TX_QUERY_APP_HOST\").unwrap_or(\"0.0.0.0\".to_owned());\n\n let query_server_port = std::env::var(\"TX_QUERY_APP_PORT\").unwrap_or(\"3443\".to_owned());\n\n let query_server_addr = format!(\"{}:{}\", query_server_host, query_server_port);\n\n let validation_dir = Path::new(&validation_path);\n\n let connection_socket = format! {\"ipc://{}integration.enclave\", validation_path};\n\n builder\n\n .filter(None, LevelFilter::Info)\n\n .write_style(WriteStyle::Always)\n\n .init();\n\n let mut validation = Command::new(\"./tx-validation-app\")\n\n .current_dir(validation_dir)\n\n .env(\"TX_ENCLAVE_STORAGE\", \".enclave-integration\")\n\n .env(\"RUST_LOG\", \"debug\")\n\n .args(&[&connection_socket])\n\n .spawn()\n\n .expect(\"failed to start tx validation\");\n", "file_path": "chain-tx-enclave/tx-query/app/src/test/mod.rs", "rank": 67, "score": 152863.74524302 }, { "content": "/// Monitors websocket connection and retries if websocket is disconnected\n\n///\n\n/// # How it works\n\n///\n\n/// - Websocket connection has two possible states:\n\n/// - `Connected`: `websocket_rpc_loop` is connected to websocket server\n\n/// - `Disconnected`: `websocket_rpc_loop` is disconnected from websocket server. Connection should be retried.\n\n/// - This function spawns a thread and runs connection state machine in a loop.\n\n/// - If current state is `Disconnected`: Spawns `websocket_rpc_loop` and sets state to `Connected`.\n\n/// - If current state is `Connected`: Waits for `websocket_rpc_loop` thread to end and sets state to `Disconnected`.\n\npub fn monitor(\n\n url: String,\n\n channel_map: Arc<Mutex<HashMap<String, SyncSender<JsonRpcResponse>>>>,\n\n loop_handle: JoinHandle<()>,\n\n websocket_writer: Arc<Mutex<Writer<TcpStream>>>,\n\n) -> Arc<Mutex<ConnectionState>> {\n\n let connection_state = Arc::new(Mutex::new(ConnectionState::Connected));\n\n let connection_state_clone = connection_state.clone();\n\n\n\n thread::spawn(move || {\n\n let mut connection_handle = Some(loop_handle);\n\n\n\n loop {\n\n let connection_state = *connection_state_clone\n\n .lock()\n\n .expect(\"Unable to acquire lock on connection state\");\n\n\n\n let (new_connection_state, new_connection_handle) = match connection_state {\n\n ConnectionState::Disconnected => {\n\n log::warn!(\"Websocket RPC is disconnected. Trying to reconnect\");\n", "file_path": "client-common/src/tendermint/websocket_rpc_client/websocket_rpc_loop.rs", "rank": 68, "score": 152863.74524302 }, { "content": "pub fn check_tx(\n\n eid: sgx_enclave_id_t,\n\n request: IntraEnclaveRequest,\n\n txdb: &mut Tree,\n\n) -> Result<(Fee, Option<StakedState>), Error> {\n\n let request_buf: Vec<u8> = request.encode();\n\n let response_len = size_of::<sgx_sealed_data_t>() + request_buf.len();\n\n let mut response_buf: Vec<u8> = vec![0u8; response_len];\n\n let mut retval: sgx_status_t = sgx_status_t::SGX_SUCCESS;\n\n let response_slice = &mut response_buf[..];\n\n let result = unsafe {\n\n ecall_check_tx(\n\n eid,\n\n &mut retval,\n\n request_buf.as_ptr(),\n\n request_buf.len(),\n\n response_slice.as_mut_ptr(),\n\n response_buf.len() as u32,\n\n )\n\n };\n", "file_path": "chain-tx-enclave/tx-validation/app/src/enclave_u/mod.rs", "rank": 69, "score": 152863.74524302 }, { "content": "/// Returns the chosen network type\n\npub fn get_network() -> Network {\n\n unsafe { chosen_network::NETWORK }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 70, "score": 151439.86290312972 }, { "content": "pub fn status_response() -> Status {\n\n Status {\n\n node_info: node_info(),\n\n sync_info: sync_info(),\n\n validator_info: validator_info(),\n\n }\n\n}\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 71, "score": 151434.31310004884 }, { "content": "/// Random entropy, part of extended key.\n\ntype ChainCode = Vec<u8>;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\n/// extended key for hdwallet\n\npub struct ExtendedPrivKey {\n\n /// privatekey for extended key in hdwallet\n\n pub private_key: SecretKey,\n\n /// chain kind for hdwallet\n\n pub chain_code: ChainCode,\n\n}\n\n\n\n/// Indicate bits of random seed used to generate private key, 256 is recommended.\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub enum KeySeed {\n\n /// 128 seed\n\n S128 = 128,\n\n /// 256 seed\n\n S256 = 256,\n\n /// 512 seed\n\n S512 = 512,\n", "file_path": "client-core/src/hd_wallet/extended_key.rs", "rank": 72, "score": 150179.47671131179 }, { "content": "fn init_network_id(id: u8) {\n\n unsafe {\n\n INIT_NETWORK_ID.call_once(|| {\n\n chosen_network::NETWORK_ID = id;\n\n });\n\n }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 73, "score": 150112.76853193267 }, { "content": "fn prefix_with_byte<T: AsRef<[u8]>>(value: T, prefix: u8) -> Vec<u8> {\n\n let value = value.as_ref();\n\n let mut bytes = Vec::with_capacity(value.len() + 1);\n\n bytes.push(prefix);\n\n bytes.extend_from_slice(value);\n\n bytes\n\n}\n\n\n", "file_path": "chain-core/src/common/merkle_tree.rs", "rank": 74, "score": 150087.49192674665 }, { "content": "#[inline]\n\npub fn check_unseal<I>(\n\n view_key: Option<PublicKey>,\n\n check_allowed_views: bool,\n\n txids: I,\n\n mut sealed_logs: Vec<Vec<u8>>,\n\n) -> Option<Vec<TxWithOutputs>>\n\nwhere\n\n I: IntoIterator<Item = TxId> + ExactSizeIterator,\n\n{\n\n let mut return_result = Vec::with_capacity(sealed_logs.len());\n\n for (txid, sealed_log) in txids.into_iter().zip(sealed_logs.iter_mut()) {\n\n if sealed_log.len() >= (std::u32::MAX as usize) {\n\n return None;\n\n }\n\n let opt = unsafe {\n\n SgxSealedData::<[u8]>::from_raw_sealed_data_t(\n\n sealed_log.as_mut_ptr() as *mut sgx_sealed_data_t,\n\n sealed_log.len() as u32,\n\n )\n\n };\n", "file_path": "chain-tx-enclave/enclave-t-common/src/lib.rs", "rank": 75, "score": 149474.48487340653 }, { "content": "pub fn validators_response() -> ValidatorsResponse {\n\n serde_json::from_str(\n\n r#\"{\n\n \"block_height\": \"1\",\n\n \"validators\": [\n\n {\n\n \"address\": \"41D5FC236EDF35E68160BA0EA240A0E255EF6799\",\n\n \"pub_key\": {\n\n \"type\": \"tendermint/PubKeyEd25519\",\n\n \"value\": \"2H0sZxyy5iOU6q0/F+ZCQ3MyJJxg8odE5NMsGIyfFV0=\"\n\n },\n\n \"voting_power\": \"12500000000\",\n\n \"proposer_priority\": \"0\"\n\n }\n\n ]\n\n}\"#,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 76, "score": 149474.48487340653 }, { "content": "pub fn commit_response() -> CommitResponse {\n\n serde_json::from_str(r#\"{\n\n \"signed_header\": {\n\n \"header\": {\n\n \"version\": {\n\n \"block\": \"10\",\n\n \"app\": \"0\"\n\n },\n\n \"chain_id\": \"test-chain-y3m1e6-AB\",\n\n \"height\": \"1\",\n\n \"time\": \"2019-11-18T05:49:16.254417Z\",\n\n \"num_txs\": \"0\",\n\n \"total_txs\": \"0\",\n\n \"last_block_id\": {\n\n \"hash\": \"\",\n\n \"parts\": {\n\n \"total\": \"0\",\n\n \"hash\": \"\"\n\n }\n\n },\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 77, "score": 149474.48487340653 }, { "content": "pub fn storage_path() -> String {\n\n match std::env::var(\"TX_ENCLAVE_STORAGE\") {\n\n Ok(path) => path,\n\n Err(_) => \".enclave\".to_owned(),\n\n }\n\n}\n\n\n\npub const META_KEYSPACE: &[u8] = b\"meta\";\n\npub const TX_KEYSPACE: &[u8] = b\"tx\";\n", "file_path": "chain-tx-enclave/enclave-u-common/src/lib.rs", "rank": 78, "score": 147596.7905364234 }, { "content": "pub fn validator_priv_key() -> PrivateKey {\n\n serde_json::from_str(DEFAULT_VALIDATOR_KEY).unwrap()\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 79, "score": 147596.7905364234 }, { "content": "pub fn get_enclave_bridge_mock() -> MockClient {\n\n MockClient::new(0)\n\n}\n\n\n", "file_path": "test-common/src/chain_env.rs", "rank": 80, "score": 145796.17287076524 }, { "content": "pub fn get_enclave_bridge_mock() -> MockClient {\n\n MockClient::new(0)\n\n}\n\n\n", "file_path": "chain-abci/tests/abci_app.rs", "rank": 81, "score": 145796.17287076524 }, { "content": "pub fn start_enclave() -> SgxEnclave {\n\n match init_enclave(true) {\n\n Ok(r) => {\n\n info!(\"[+] Init Query Enclave Successful {}!\", r.geteid());\n\n r\n\n }\n\n Err(e) => {\n\n panic!(\"[-] Init Query Enclave Failed {}!\", e.as_str());\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/tx-query/app/src/main.rs", "rank": 82, "score": 144067.98149131914 }, { "content": "fn deserialize_transaction_id<'de, D>(deserializer: D) -> std::result::Result<TxId, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let transaction_id_raw: &str = Deserialize::deserialize(deserializer)?;\n\n let transaction_id_vec =\n\n hex::decode(transaction_id_raw).map_err(|e| de::Error::custom(e.to_string()))?;\n\n if transaction_id_vec.len() != 32 {\n\n return Err(de::Error::custom(\"Invalid transaction id length\"));\n\n }\n\n\n\n let mut transaction_id = [0; 32];\n\n transaction_id.copy_from_slice(&transaction_id_vec);\n\n\n\n Ok(transaction_id)\n\n}\n\n\n\nimpl Encode for TransactionChange {\n\n fn encode_to<W: Output>(&self, dest: &mut W) {\n\n self.transaction_id.encode_to(dest);\n", "file_path": "client-core/src/types/transaction_change.rs", "rank": 83, "score": 143863.63046399062 }, { "content": "pub fn node_info() -> node::Info {\n\n node::Info {\n\n protocol_version: node::info::ProtocolVersionInfo {\n\n p2p: 7,\n\n block: BLOCK_VERSION,\n\n app: APP_VERSION,\n\n },\n\n id: node::Id::from_str(\"7edc638f79308dfdfcd77b743e1375b8e1cea6f2\").unwrap(),\n\n listen_addr: node::info::ListenAddress::new(\"tcp://0.0.0.0:26656\".to_owned()),\n\n network: chain_id(),\n\n version: \"0.32.7\".parse().unwrap(),\n\n channels: channel::Channels::default(),\n\n moniker: Moniker::from_str(\"test\").unwrap(),\n\n other: node::info::OtherInfo {\n\n tx_index: node::info::TxIndexStatus::On,\n\n rpc_address: net::Address::from_str(\"tcp://127.0.0.1:26657\").unwrap(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 84, "score": 143756.91245124053 }, { "content": "pub fn get_ecdsa_witness<C: Signing>(\n\n secp: &Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> EcdsaSignature {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n let sig = secp.sign_recoverable(&message, &secret_key);\n\n return sig;\n\n}\n\n\n", "file_path": "chain-abci/benches/tx.rs", "rank": 85, "score": 143756.91245124053 }, { "content": "pub fn chain_id() -> chain::Id {\n\n genesis().chain_id\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 86, "score": 143756.91245124053 }, { "content": "pub fn validator_info() -> validator::Info {\n\n genesis().validators[0]\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 87, "score": 143756.91245124053 }, { "content": "pub fn get_validator_key(node: &CouncilNode) -> PubKey {\n\n let mut pk = PubKey::new();\n\n let (keytype, key) = node.consensus_pubkey.to_validator_update();\n\n pk.set_field_type(keytype);\n\n pk.set_data(key);\n\n pk\n\n}\n\n\n", "file_path": "chain-abci/src/app/app_init.rs", "rank": 88, "score": 143339.96301574478 }, { "content": "fn single_set(arr: &[u8]) -> BitVec {\n\n let mut r = [0u8; 256];\n\n let h = keccak256(arr);\n\n for i in [0usize, 2usize, 4usize].iter() {\n\n let m = (((h[*i] as usize) << 8) + (h[*i + 1] as usize)) % 2048;\n\n r[m / 8] |= 1 << (m % 8);\n\n }\n\n BitVec::from_bytes(&r[..])\n\n}\n\n\n\nimpl Bloom {\n\n /// Starts a fresh filter\n\n pub fn reset(&mut self) {\n\n self.0.clear();\n\n }\n\n\n\n /// Adds the other bloom filter to the current one\n\n pub fn add(&mut self, other: &Bloom) {\n\n self.0.union(&other.0);\n\n }\n", "file_path": "chain-tx-filter/src/filter.rs", "rank": 89, "score": 142471.8455096457 }, { "content": "pub fn get_ecdsa_witness<C: Signing>(\n\n secp: &Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> EcdsaSignature {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n secp.sign_recoverable(&message, &secret_key)\n\n}\n\n\n", "file_path": "test-common/src/chain_env.rs", "rank": 90, "score": 141879.21811425738 }, { "content": "pub fn get_tx_witness<C: Signing>(\n\n secp: Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n merkle_tree: &MerkleTree<RawPubkey>,\n\n) -> TxInWitness {\n\n let message = Message::from_slice(txid).unwrap();\n\n let public_key = PublicKey::from_secret_key(&secp, secret_key);\n\n let proof = merkle_tree\n\n .generate_proof(RawPubkey::from(public_key.serialize()))\n\n .unwrap();\n\n let signature = schnorr_sign(&secp, &message, secret_key).0;\n\n\n\n TxInWitness::TreeSig(signature, proof)\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 91, "score": 141879.21811425738 }, { "content": "pub fn sync_info() -> status::SyncInfo {\n\n status::SyncInfo {\n\n latest_block_hash: None,\n\n latest_app_hash: None,\n\n latest_block_height: Height::default(),\n\n latest_block_time: Time::now(),\n\n catching_up: false,\n\n }\n\n}\n\n\n", "file_path": "client-common/src/tendermint/mock.rs", "rank": 92, "score": 141879.21811425738 }, { "content": "pub fn get_ecdsa_witness<C: Signing>(\n\n secp: &Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> EcdsaSignature {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n secp.sign_recoverable(&message, &secret_key)\n\n}\n\n\n", "file_path": "chain-abci/tests/abci_app.rs", "rank": 93, "score": 141879.21811425738 }, { "content": "/// Given the chosen network, it returns the human readable part of Bech32 address\n\npub fn get_bech32_human_part() -> &'static str {\n\n get_bech32_human_part_from_network(get_network())\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 95, "score": 140078.60044859923 }, { "content": "/// Generate generic ABCI ProofOp for the witness\n\nfn get_witness_proof_op(witness: &[u8]) -> ProofOp {\n\n let mut op = ProofOp::new();\n\n op.set_field_type(\"witness\".into());\n\n op.set_key(TXID_HASH_ID.to_vec());\n\n op.set_data(txid_hash(witness).to_vec());\n\n op\n\n}\n\n\n\nimpl<T: EnclaveProxy> ChainNodeApp<T> {\n\n /// Helper to find a key under a column in KV DB, or log an error (both stored in the response).\n\n fn lookup(&self, resp: &mut ResponseQuery, column: Option<u32>, key: &[u8], log_message: &str) {\n\n let v = self.storage.db.get(column, key);\n\n match v {\n\n Ok(Some(uv)) => {\n\n resp.value = uv.into_vec();\n\n }\n\n _ => {\n\n resp.log += log_message;\n\n resp.code = 1;\n\n }\n", "file_path": "chain-abci/src/app/query.rs", "rank": 96, "score": 138934.95252047642 }, { "content": "fn parse_wallet_state<T: AsRef<[u8]>>(\n\n name: &str,\n\n bytes_optional: Option<T>,\n\n) -> Result<WalletState> {\n\n bytes_optional\n\n .map(|bytes| {\n\n WalletState::decode(&mut bytes.as_ref()).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n format!(\n\n \"Unable to deserialize wallet state for wallet with name {}\",\n\n name\n\n ),\n\n )\n\n })\n\n })\n\n .transpose()\n\n .map(|wallet_state_optional| wallet_state_optional.unwrap_or_default())\n\n}\n\n\n", "file_path": "client-core/src/service/wallet_state_service.rs", "rank": 97, "score": 138934.95252047642 }, { "content": "/// One-time initialization of the chosen network\n\n/// (as address textual format / serialization + HD-wallet path depend on the network type)\n\npub fn init_chain_id(chain_id_src: &str) {\n\n let chain_id = chain_id_src.to_string();\n\n assert!(chain_id.len() >= 6);\n\n let length = chain_id.len();\n\n let hexstring = &chain_id[(length - 2)..];\n\n let hexvalue = hex::decode(hexstring).expect(\"last two characters should be hex digits\");\n\n assert!(1 == hexvalue.len());\n\n init_network_id(hexvalue[0]);\n\n assert!(get_network_id() == hexvalue[0]);\n\n\n\n match chain_id_src {\n\n MAINNET_CHAIN_ID => init_network(Network::Mainnet),\n\n TESTNET_CHAIN_ID => init_network(Network::Testnet),\n\n _ => init_network(Network::Devnet),\n\n }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 98, "score": 138355.4432148996 }, { "content": "fn get_sigrl_from_intel(ias_key: &str, fd: c_int, gid: u32) -> Result<Vec<u8>, RAError> {\n\n // println!(\"get_sigrl_from_intel fd = {:?}\", fd);\n\n\n\n let req = format!(\"GET {}{}{:08x} HTTP/1.1\\r\\nHOST: {}\\r\\nOcp-Apim-Subscription-Key: {}\\r\\nConnection: Close\\r\\n\\r\\n\",\n\n API_SUFFIX,\n\n SIGRL_SUFFIX,\n\n gid,\n\n IAS_HOSTNAME,\n\n ias_key);\n\n\n\n // println!(\"{}\", req);\n\n\n\n let dns_name = webpki::DNSNameRef::try_from_ascii_str(IAS_HOSTNAME)\n\n .map_err(|_| RAError::CommunicationError)?;\n\n let mut sess = rustls::ClientSession::new(&IAS_CLIENT_CONFIG, dns_name);\n\n let mut sock = TcpStream::new(fd).map_err(|_| RAError::CommunicationError)?;\n\n let mut tls = rustls::Stream::new(&mut sess, &mut sock);\n\n\n\n tls.write(req.as_bytes())\n\n .map_err(|_| RAError::CommunicationError)?;\n", "file_path": "chain-tx-enclave/tx-query/enclave/src/attest.rs", "rank": 99, "score": 138319.7290177881 } ]
Rust
tests/integration_test.rs
durch/sphinx
b168f70ac422c6c6ecd80a784958cbee330aab44
extern crate sphinx; use sphinx::crypto; use sphinx::header::delays; use sphinx::route::{Destination, Node}; use sphinx::SphinxPacket; #[cfg(test)] mod create_and_process_sphinx_packet { use super::*; use sphinx::route::{DestinationAddressBytes, NodeAddressBytes}; use sphinx::{ constants::{ DESTINATION_ADDRESS_LENGTH, IDENTIFIER_LENGTH, NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER, }, ProcessedPacket, }; use std::time::Duration; #[test] fn returns_the_correct_data_at_each_hop_for_route_of_3_mixnodes_without_surb() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], ); let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message.clone(), &route, &destination, &delays).unwrap(); let next_sphinx_packet_1 = match sphinx_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr1, _delay1) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_addr1 ); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr2, _delay2) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_addr2 ); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - message.len() - 1]; let expected_payload = [zero_bytes, message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; } } #[cfg(test)] mod converting_sphinx_packet_to_and_from_bytes { use super::*; use sphinx::route::{DestinationAddressBytes, NodeAddressBytes}; use sphinx::{ constants::{ DESTINATION_ADDRESS_LENGTH, IDENTIFIER_LENGTH, NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER, }, ProcessedPacket, }; use std::time::Duration; #[test] fn it_is_possible_to_do_the_conversion_without_data_loss() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], ); let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message.clone(), &route, &destination, &delays).unwrap(); let sphinx_packet_bytes = sphinx_packet.to_bytes(); let recovered_packet = SphinxPacket::from_bytes(&sphinx_packet_bytes).unwrap(); let next_sphinx_packet_1 = match recovered_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_address, delay) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_address ); assert_eq!(delays[0].to_nanos(), delay.to_nanos()); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_address, delay) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_address ); assert_eq!(delays[1].to_nanos(), delay.to_nanos()); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - message.len() - 1]; let expected_payload = [zero_bytes, message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; } #[test] #[should_panic] fn it_panics_if_data_of_invalid_length_is_provided() { let (_, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (_, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (_, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], ); let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message, &route, &destination, &delays).unwrap(); let sphinx_packet_bytes = &sphinx_packet.to_bytes()[..300]; SphinxPacket::from_bytes(&sphinx_packet_bytes).unwrap(); } } #[cfg(test)] mod create_and_process_surb { use super::*; use crypto::EphemeralSecret; use sphinx::route::NodeAddressBytes; use sphinx::surb::{SURBMaterial, SURB}; use sphinx::{ constants::{NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER}, packet::builder::DEFAULT_PAYLOAD_SIZE, test_utils::fixtures::destination_fixture, ProcessedPacket, }; use std::time::Duration; #[test] fn returns_the_correct_data_at_each_hop_for_route_of_3_mixnodes() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node { address: NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), pub_key: node1_pk, }; let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node { address: NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), pub_key: node2_pk, }; let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node { address: NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), pub_key: node3_pk, }; let surb_route = vec![node1, node2, node3]; let surb_destination = destination_fixture(); let surb_initial_secret = EphemeralSecret::new(); let surb_delays = delays::generate_from_average_duration(surb_route.len(), Duration::from_secs(3)); let pre_surb = SURB::new( surb_initial_secret, SURBMaterial::new(surb_route, surb_delays.clone(), surb_destination), ) .unwrap(); let plaintext_message = vec![42u8; 160]; let (surb_sphinx_packet, first_hop) = SURB::use_surb(pre_surb, &plaintext_message, DEFAULT_PAYLOAD_SIZE).unwrap(); assert_eq!( first_hop, NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]) ); let next_sphinx_packet_1 = match surb_sphinx_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr1, _delay1) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_addr1 ); assert_eq!(_delay1, surb_delays[0]); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr2, _delay2) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_addr2 ); assert_eq!(_delay2, surb_delays[1]); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - plaintext_message.len() - 1]; let expected_payload = [zero_bytes, plaintext_message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; } }
extern crate sphinx; use sphinx::crypto; use sphinx::header::delays; use sphinx::route::{Destination, Node}; use sphinx::SphinxPacket; #[cfg(test)] mod create_and_process_sphinx_packet { use super::*; use sphinx::route::{DestinationAddressBytes, NodeAddressBytes}; use sphinx::{ constants::{ DESTINATION_ADDRESS_LENGTH, IDENTIFIER_LENGTH, NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER, }, ProcessedPacket, }; use std::time::Duration; #[test] fn returns_the_correct_data_at_each_hop_for_route_of_3_mixnodes_without_surb() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], ); let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message.clone(), &route, &destination, &delays).unwrap(); let next_sphinx_packet_1 = match sphinx_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr1, _delay1) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_addr1 ); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr2, _delay2) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_addr2 ); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - message.len() - 1]; let expected_payload = [zero_bytes, message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; } } #[cfg(test)] mod converting_sphinx_packet_to_and_from_bytes { use super::*; use sphinx::route::{DestinationAddressBytes, NodeAddressBytes}; use sphinx::{ constants::{ DESTINATION_ADDRESS_LENGTH, IDENTIFIER_LENGTH, NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER, }, ProcessedPacket, }; use std::time::Duration; #[test] fn it_is_possible_to_do_the_conversion_without_data_loss() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], );
#[test] #[should_panic] fn it_panics_if_data_of_invalid_length_is_provided() { let (_, node1_pk) = crypto::keygen(); let node1 = Node::new( NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), node1_pk, ); let (_, node2_pk) = crypto::keygen(); let node2 = Node::new( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), node2_pk, ); let (_, node3_pk) = crypto::keygen(); let node3 = Node::new( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), node3_pk, ); let route = [node1, node2, node3]; let average_delay = Duration::from_secs_f64(1.0); let delays = delays::generate_from_average_duration(route.len(), average_delay); let destination = Destination::new( DestinationAddressBytes::from_bytes([3u8; DESTINATION_ADDRESS_LENGTH]), [4u8; IDENTIFIER_LENGTH], ); let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message, &route, &destination, &delays).unwrap(); let sphinx_packet_bytes = &sphinx_packet.to_bytes()[..300]; SphinxPacket::from_bytes(&sphinx_packet_bytes).unwrap(); } } #[cfg(test)] mod create_and_process_surb { use super::*; use crypto::EphemeralSecret; use sphinx::route::NodeAddressBytes; use sphinx::surb::{SURBMaterial, SURB}; use sphinx::{ constants::{NODE_ADDRESS_LENGTH, PAYLOAD_SIZE, SECURITY_PARAMETER}, packet::builder::DEFAULT_PAYLOAD_SIZE, test_utils::fixtures::destination_fixture, ProcessedPacket, }; use std::time::Duration; #[test] fn returns_the_correct_data_at_each_hop_for_route_of_3_mixnodes() { let (node1_sk, node1_pk) = crypto::keygen(); let node1 = Node { address: NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]), pub_key: node1_pk, }; let (node2_sk, node2_pk) = crypto::keygen(); let node2 = Node { address: NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), pub_key: node2_pk, }; let (node3_sk, node3_pk) = crypto::keygen(); let node3 = Node { address: NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), pub_key: node3_pk, }; let surb_route = vec![node1, node2, node3]; let surb_destination = destination_fixture(); let surb_initial_secret = EphemeralSecret::new(); let surb_delays = delays::generate_from_average_duration(surb_route.len(), Duration::from_secs(3)); let pre_surb = SURB::new( surb_initial_secret, SURBMaterial::new(surb_route, surb_delays.clone(), surb_destination), ) .unwrap(); let plaintext_message = vec![42u8; 160]; let (surb_sphinx_packet, first_hop) = SURB::use_surb(pre_surb, &plaintext_message, DEFAULT_PAYLOAD_SIZE).unwrap(); assert_eq!( first_hop, NodeAddressBytes::from_bytes([5u8; NODE_ADDRESS_LENGTH]) ); let next_sphinx_packet_1 = match surb_sphinx_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr1, _delay1) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_addr1 ); assert_eq!(_delay1, surb_delays[0]); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_addr2, _delay2) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_addr2 ); assert_eq!(_delay2, surb_delays[1]); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - plaintext_message.len() - 1]; let expected_payload = [zero_bytes, plaintext_message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; } }
let message = vec![13u8, 16]; let sphinx_packet = SphinxPacket::new(message.clone(), &route, &destination, &delays).unwrap(); let sphinx_packet_bytes = sphinx_packet.to_bytes(); let recovered_packet = SphinxPacket::from_bytes(&sphinx_packet_bytes).unwrap(); let next_sphinx_packet_1 = match recovered_packet.process(&node1_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_address, delay) => { assert_eq!( NodeAddressBytes::from_bytes([4u8; NODE_ADDRESS_LENGTH]), next_hop_address ); assert_eq!(delays[0].to_nanos(), delay.to_nanos()); next_packet } _ => panic!(), }; let next_sphinx_packet_2 = match next_sphinx_packet_1.process(&node2_sk).unwrap() { ProcessedPacket::ForwardHop(next_packet, next_hop_address, delay) => { assert_eq!( NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]), next_hop_address ); assert_eq!(delays[1].to_nanos(), delay.to_nanos()); next_packet } _ => panic!(), }; match next_sphinx_packet_2.process(&node3_sk).unwrap() { ProcessedPacket::FinalHop(_, _, payload) => { let zero_bytes = vec![0u8; SECURITY_PARAMETER]; let additional_padding = vec![0u8; PAYLOAD_SIZE - SECURITY_PARAMETER - message.len() - 1]; let expected_payload = [zero_bytes, message, vec![1], additional_padding].concat(); assert_eq!(expected_payload, payload.as_bytes()); } _ => panic!(), }; }
function_block-function_prefix_line
[ { "content": "pub fn random_node() -> Node {\n\n let random_private_key = crypto::PrivateKey::new();\n\n Node {\n\n address: NodeAddressBytes::from_bytes([2u8; NODE_ADDRESS_LENGTH]),\n\n pub_key: (&random_private_key).into(),\n\n }\n\n}\n", "file_path": "src/test_utils.rs", "rank": 0, "score": 114793.02297712355 }, { "content": "pub fn generate_from_nanos(number: usize, average_delay: u64) -> Vec<Delay> {\n\n let exp = Exp::new(1.0 / average_delay as f64).unwrap();\n\n\n\n std::iter::repeat(())\n\n .take(number)\n\n .map(|_| Delay::new_from_nanos((exp.sample(&mut rand::thread_rng())).round() as u64)) // for now I just assume we will express it in nano-seconds to have an integer\n\n .collect()\n\n}\n\n\n", "file_path": "src/header/delays.rs", "rank": 1, "score": 86414.8353858284 }, { "content": "pub fn generate_from_average_duration(number: usize, average_delay: Duration) -> Vec<Delay> {\n\n let exp = Exp::new(1.0 / average_delay.as_nanos() as f64).unwrap();\n\n\n\n std::iter::repeat(())\n\n .take(number)\n\n .map(|_| Delay::new_from_nanos(exp.sample(&mut rand::thread_rng()).round() as u64))\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_delay_generation {\n\n use super::*;\n\n\n\n #[test]\n\n fn with_0_delays_returns_an_empty_vector() {\n\n let delays = generate_from_average_duration(0, Duration::from_millis(10));\n\n assert_eq!(0, delays.len());\n\n }\n\n\n\n #[test]\n", "file_path": "src/header/delays.rs", "rank": 2, "score": 85113.64626729763 }, { "content": "fn make_packet_copy(packet: &SphinxPacket) -> SphinxPacket {\n\n SphinxPacket::from_bytes(&packet.to_bytes()).unwrap()\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 3, "score": 67077.95100552749 }, { "content": "pub fn generate_pseudorandom_bytes(\n\n // TODO: those should use proper generic arrays to begin with!!\n\n // ^ will be done in next PR\n\n key: &[u8; STREAM_CIPHER_KEY_SIZE],\n\n iv: &[u8; STREAM_CIPHER_KEY_SIZE],\n\n length: usize,\n\n) -> Vec<u8> {\n\n let cipher_key = GenericArray::from_slice(&key[..]);\n\n let cipher_nonce = GenericArray::from_slice(&iv[..]);\n\n\n\n // generate a random string as an output of a PRNG, which we implement using stream cipher AES_CTR\n\n let mut cipher = Aes128Ctr::new(cipher_key, cipher_nonce);\n\n let mut data = vec![0u8; length];\n\n cipher.apply_keystream(&mut data);\n\n data\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 4, "score": 62448.99228447558 }, { "content": " 1\n\n );\n\n // and the rest should be 0 padding\n\n assert!(final_payload_inner\n\n .iter()\n\n .skip(SECURITY_PARAMETER + plaintext_length + 1)\n\n .all(|&b| b == 0))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_encapsulating_payload {\n\n use super::*;\n\n use crate::constants::PAYLOAD_KEY_SIZE;\n\n\n\n #[test]\n\n fn can_be_encapsulated_without_encryption() {\n\n let message = vec![1u8, 16];\n\n let payload_size = 512;\n", "file_path": "src/payload/mod.rs", "rank": 5, "score": 61638.72918409493 }, { "content": " .is_ok())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_unwrapping_payload {\n\n use super::*;\n\n use crate::constants::{PAYLOAD_KEY_SIZE, SECURITY_PARAMETER};\n\n use crate::packet::builder::DEFAULT_PAYLOAD_SIZE;\n\n\n\n #[test]\n\n fn unwrapping_results_in_original_payload_plaintext() {\n\n let message = vec![42u8; 16];\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_keys = [payload_key_1, payload_key_2, payload_key_3];\n\n\n\n let encrypted_payload =\n\n Payload::encapsulate_message(&message, &payload_keys, DEFAULT_PAYLOAD_SIZE).unwrap();\n", "file_path": "src/payload/mod.rs", "rank": 6, "score": 61638.53311768468 }, { "content": "\n\n let unwrapped_payload = payload_keys\n\n .iter()\n\n .fold(encrypted_payload, |current_layer, payload_key| {\n\n current_layer.unwrap(payload_key).unwrap()\n\n });\n\n\n\n let zero_bytes = vec![0u8; SECURITY_PARAMETER];\n\n let additional_padding =\n\n vec![0u8; DEFAULT_PAYLOAD_SIZE - PAYLOAD_OVERHEAD_SIZE - message.len()];\n\n let expected_payload = [zero_bytes, message, vec![1], additional_padding].concat();\n\n assert_eq!(expected_payload, unwrapped_payload.into_inner());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod plaintext_recovery {\n\n use super::*;\n\n use crate::constants::PAYLOAD_KEY_SIZE;\n\n use crate::packet::builder::DEFAULT_PAYLOAD_SIZE;\n", "file_path": "src/payload/mod.rs", "rank": 7, "score": 61636.4839389205 }, { "content": " fn from_bytes_returns_error_if_bytes_are_too_short() {\n\n let bytes = [0u8; 1].to_vec();\n\n let expected = ErrorKind::InvalidPayload;\n\n match Payload::from_bytes(&bytes) {\n\n Err(err) => assert_eq!(expected, err.kind()),\n\n _ => panic!(\"Should have returned an error when packet bytes too short\"),\n\n };\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod parameter_verification {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_returns_an_error_if_payload_size_is_smaller_than_the_overhead() {\n\n assert!(Payload::validate_parameters(PAYLOAD_OVERHEAD_SIZE - 1, 16).is_err());\n\n }\n\n\n\n #[test]\n", "file_path": "src/payload/mod.rs", "rank": 8, "score": 61636.09270888796 }, { "content": " fn it_returns_an_error_if_payload_size_is_smaller_than_the_lioness_blocklen() {\n\n assert!(Payload::validate_parameters(lioness::DIGEST_RESULT_SIZE - 1, 16).is_err());\n\n }\n\n\n\n #[test]\n\n fn it_returns_an_error_if_message_is_longer_than_maximum_allowed_length() {\n\n let payload_length = 100;\n\n let max_allowed_length = payload_length - PAYLOAD_OVERHEAD_SIZE;\n\n assert!(Payload::validate_parameters(payload_length, max_allowed_length + 1).is_err());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod final_payload_setting {\n\n use super::*;\n\n\n\n #[test]\n\n fn adds_correct_padding() {\n\n let plaintext_lengths = vec![0, 1, 16, 128, 4096];\n\n for plaintext_length in plaintext_lengths {\n", "file_path": "src/payload/mod.rs", "rank": 9, "score": 61632.89925422635 }, { "content": " /// Tries to recover `Payload` from a slice of bytes.\n\n pub fn from_bytes(bytes: &[u8]) -> Result<Self> {\n\n // with payloads being dynamic in size, the only thing we can do\n\n // is to check if it at least is longer than the minimum length\n\n if bytes.len() < PAYLOAD_OVERHEAD_SIZE {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n \"too short payload provided\",\n\n ));\n\n }\n\n\n\n Ok(Payload(bytes.to_vec()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod building_payload_from_bytes {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/payload/mod.rs", "rank": 10, "score": 61630.92327994397 }, { "content": "use lioness::Lioness;\n\n\n\n// payload consists of security parameter long zero-padding, plaintext and '1' byte to indicate start of padding\n\n// (it can optionally be followed by zero-padding\n\npub const PAYLOAD_OVERHEAD_SIZE: usize = SECURITY_PARAMETER + 1;\n\n\n\n// TODO: question: is padding to some pre-defined length a sphinx-specific thing or rather\n\n// something for our particular use case?\n\n#[derive(Debug)]\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub struct Payload(Vec<u8>);\n\n\n\n// is_empty does not make sense in this context, as you can't construct an empty Payload\n\n#[allow(clippy::len_without_is_empty)]\n\nimpl Payload {\n\n /// Tries to encapsulate provided plaintext message inside a sphinx payload adding\n\n /// as many layers of encryption as there are keys provided.\n\n /// Note that the encryption layers are going to be added in *reverse* order!\n\n pub fn encapsulate_message(\n\n plaintext_message: &[u8],\n", "file_path": "src/payload/mod.rs", "rank": 11, "score": 61630.287976894724 }, { "content": " let unencrypted_message =\n\n Payload::encapsulate_message(&message, &[], payload_size).unwrap();\n\n\n\n // should be equivalent to just setting final payload\n\n assert_eq!(\n\n unencrypted_message,\n\n Payload::set_final_payload(&message, payload_size)\n\n )\n\n }\n\n\n\n #[test]\n\n fn works_with_single_encryption_layer() {\n\n let message = vec![1u8, 16];\n\n let payload_size = 512;\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n\n\n assert!(Payload::encapsulate_message(&message, &[payload_key_1], payload_size).is_ok())\n\n }\n\n\n\n #[test]\n", "file_path": "src/payload/mod.rs", "rank": 12, "score": 61628.01371393487 }, { "content": "\n\n assert_eq!(message, recovered_plaintext);\n\n }\n\n\n\n // tests for correct padding detection\n\n #[test]\n\n fn it_is_possible_to_recover_plaintext_even_if_is_just_ones() {\n\n let message = vec![1u8; 160];\n\n\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_keys = [payload_key_1, payload_key_2, payload_key_3];\n\n\n\n let encrypted_payload =\n\n Payload::encapsulate_message(&message, &payload_keys, DEFAULT_PAYLOAD_SIZE).unwrap();\n\n\n\n let unwrapped_payload = payload_keys\n\n .iter()\n\n .fold(encrypted_payload, |current_layer, payload_key| {\n", "file_path": "src/payload/mod.rs", "rank": 13, "score": 61627.17301956802 }, { "content": " current_layer.unwrap(payload_key).unwrap()\n\n });\n\n\n\n let recovered_plaintext = unwrapped_payload.recover_plaintext().unwrap();\n\n\n\n assert_eq!(message, recovered_plaintext);\n\n }\n\n\n\n // tests for correct padding detection\n\n #[test]\n\n fn it_is_possible_to_recover_plaintext_even_if_is_just_zeroes() {\n\n let message = vec![0u8; 160];\n\n\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_keys = [payload_key_1, payload_key_2, payload_key_3];\n\n\n\n let encrypted_payload =\n\n Payload::encapsulate_message(&message, &payload_keys, DEFAULT_PAYLOAD_SIZE).unwrap();\n", "file_path": "src/payload/mod.rs", "rank": 14, "score": 61627.10693095215 }, { "content": " ErrorKind::InvalidPayload,\n\n \"specified payload_size is smaller than the required overhead\",\n\n ));\n\n // lioness blocksize is 32 bytes (in this implementation)\n\n // Technically this check shouldn't happen if you're not going to add any\n\n // encryption layers to the payload, but then why are you even using sphinx?\n\n } else if payload_size < lioness::DIGEST_RESULT_SIZE {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n \"specified payload_size is smaller lioness block size\",\n\n ));\n\n }\n\n\n\n let maximum_plaintext_length = payload_size - PAYLOAD_OVERHEAD_SIZE;\n\n if plaintext_len > maximum_plaintext_length {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n format!(\n\n \"too long message provided. Message was: {}B long, maximum_plaintext_length is: {}B\",\n\n plaintext_len,\n", "file_path": "src/payload/mod.rs", "rank": 15, "score": 61627.002791276376 }, { "content": "\n\n #[test]\n\n fn it_is_possible_to_recover_plaintext_from_valid_payload() {\n\n let message = vec![42u8; 160];\n\n\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_keys = [payload_key_1, payload_key_2, payload_key_3];\n\n\n\n let encrypted_payload =\n\n Payload::encapsulate_message(&message, &payload_keys, DEFAULT_PAYLOAD_SIZE).unwrap();\n\n\n\n let unwrapped_payload = payload_keys\n\n .iter()\n\n .fold(encrypted_payload, |current_layer, payload_key| {\n\n current_layer.unwrap(payload_key).unwrap()\n\n });\n\n\n\n let recovered_plaintext = unwrapped_payload.recover_plaintext().unwrap();\n", "file_path": "src/payload/mod.rs", "rank": 16, "score": 61625.969174744576 }, { "content": "\n\n let unwrapped_payload = payload_keys\n\n .iter()\n\n .fold(encrypted_payload, |current_layer, payload_key| {\n\n current_layer.unwrap(payload_key).unwrap()\n\n });\n\n\n\n let recovered_plaintext = unwrapped_payload.recover_plaintext().unwrap();\n\n\n\n assert_eq!(message, recovered_plaintext);\n\n }\n\n\n\n #[test]\n\n fn it_fails_to_recover_plaintext_from_invalid_payload() {\n\n let message = vec![42u8; 160];\n\n\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_keys = [payload_key_1, payload_key_2, payload_key_3];\n", "file_path": "src/payload/mod.rs", "rank": 17, "score": 61625.6378551644 }, { "content": "\n\n let encrypted_payload =\n\n Payload::encapsulate_message(&message, &payload_keys, DEFAULT_PAYLOAD_SIZE).unwrap();\n\n\n\n let unwrapped_payload = payload_keys\n\n .iter()\n\n .skip(1) // 'forget' about one key to obtain invalid decryption\n\n .fold(encrypted_payload, |current_layer, payload_key| {\n\n current_layer.unwrap(payload_key).unwrap()\n\n });\n\n\n\n assert!(unwrapped_payload.recover_plaintext().is_err())\n\n }\n\n\n\n #[test]\n\n fn it_fails_to_recover_plaintext_from_incorrectly_constructed_payload() {\n\n let zero_payload = Payload(vec![0u8; DEFAULT_PAYLOAD_SIZE]);\n\n\n\n assert!(zero_payload.recover_plaintext().is_err());\n\n }\n\n}\n", "file_path": "src/payload/mod.rs", "rank": 18, "score": 61625.62928391021 }, { "content": "// Copyright 2020 Nym Technologies SA\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::constants::SECURITY_PARAMETER;\n\nuse crate::header::keys::PayloadKey;\n\nuse crate::{Error, ErrorKind, Result};\n\nuse arrayref::array_ref;\n\nuse blake2::VarBlake2b;\n\nuse chacha::ChaCha; // we might want to swap this one with a different implementation\n", "file_path": "src/payload/mod.rs", "rank": 19, "score": 61624.83656051994 }, { "content": " // ensure payload always has correct length, because we're not testing for that\n\n let payload_size = plaintext_length + lioness::DIGEST_RESULT_SIZE;\n\n let final_payload =\n\n Payload::set_final_payload(&vec![42u8; plaintext_length], payload_size);\n\n let final_payload_inner = final_payload.into_inner();\n\n\n\n // first SECURITY_PARAMETER bytes have to be 0\n\n assert!(final_payload_inner\n\n .iter()\n\n .take(SECURITY_PARAMETER)\n\n .all(|&b| b == 0));\n\n // then the actual message should follow\n\n assert!(final_payload_inner\n\n .iter()\n\n .skip(SECURITY_PARAMETER)\n\n .take(plaintext_length)\n\n .all(|&b| b == 42));\n\n // single one\n\n assert_eq!(\n\n final_payload_inner[SECURITY_PARAMETER + plaintext_length],\n", "file_path": "src/payload/mod.rs", "rank": 20, "score": 61624.39644617215 }, { "content": " fn works_with_five_encryption_layers() {\n\n let message = vec![1u8, 16];\n\n let payload_size = 512;\n\n let payload_key_1 = [3u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_2 = [4u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_3 = [5u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_4 = [6u8; PAYLOAD_KEY_SIZE];\n\n let payload_key_5 = [7u8; PAYLOAD_KEY_SIZE];\n\n\n\n assert!(Payload::encapsulate_message(\n\n &message,\n\n &[\n\n payload_key_1,\n\n payload_key_2,\n\n payload_key_3,\n\n payload_key_4,\n\n payload_key_5\n\n ],\n\n payload_size\n\n )\n", "file_path": "src/payload/mod.rs", "rank": 21, "score": 61623.96809181753 }, { "content": " maximum_plaintext_length\n\n ),\n\n ));\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Attaches leading and trailing paddings of correct lengths to the provided plaintext message.\n\n /// Note: this function should only ever be called in [`encapsulate_message`] after\n\n /// [`validate_parameters`] was performed.\n\n fn set_final_payload(plaintext_message: &[u8], payload_size: usize) -> Self {\n\n let final_payload: Vec<u8> = std::iter::repeat(0u8)\n\n .take(SECURITY_PARAMETER) // start with zero-padding\n\n .chain(plaintext_message.iter().cloned()) // put the plaintext\n\n .chain(std::iter::repeat(1u8).take(1)) // add single 1 byte to indicate start of padding\n\n .chain(std::iter::repeat(0u8)) // and fill everything else with zeroes\n\n .take(payload_size) // take however much we need (remember, iterators are lazy)\n\n .collect();\n\n\n\n Payload(final_payload)\n", "file_path": "src/payload/mod.rs", "rank": 22, "score": 61623.18257213233 }, { "content": " payload_keys: &[PayloadKey],\n\n payload_size: usize,\n\n ) -> Result<Self> {\n\n Self::validate_parameters(payload_size, plaintext_message.len())?;\n\n let mut payload = Self::set_final_payload(plaintext_message, payload_size);\n\n\n\n // remember that we need to reverse the order of encryption\n\n for payload_key in payload_keys.iter().rev() {\n\n payload = payload.add_encryption_layer(payload_key)?;\n\n }\n\n\n\n Ok(payload)\n\n }\n\n\n\n /// Ensures the desires payload_size is longer than the required overhead as well\n\n /// as the blocksize of lioness encryption.\n\n /// It also checks if the plaintext can fit in the specified payload [size].\n\n fn validate_parameters(payload_size: usize, plaintext_len: usize) -> Result<()> {\n\n if payload_size < PAYLOAD_OVERHEAD_SIZE {\n\n return Err(Error::new(\n", "file_path": "src/payload/mod.rs", "rank": 23, "score": 61623.06232689354 }, { "content": " pub fn unwrap(mut self, payload_key: &PayloadKey) -> Result<Self> {\n\n let lioness_cipher = Lioness::<VarBlake2b, ChaCha>::new_raw(array_ref!(\n\n payload_key,\n\n 0,\n\n lioness::RAW_KEY_SIZE\n\n ));\n\n if let Err(err) = lioness_cipher.decrypt(&mut self.0) {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n format!(\"error while unwrapping payload - {}\", err),\n\n ));\n\n };\n\n Ok(self)\n\n }\n\n\n\n /// After calling [`unwrap`] required number of times with correct `payload_keys`, tries to parse\n\n /// the resultant payload content into original encapsulated plaintext message.\n\n pub fn recover_plaintext(self) -> Result<Vec<u8>> {\n\n debug_assert!(self.len() > PAYLOAD_OVERHEAD_SIZE);\n\n\n", "file_path": "src/payload/mod.rs", "rank": 24, "score": 61621.905509070144 }, { "content": " }\n\n\n\n /// Tries to add an additional layer of encryption onto self.\n\n fn add_encryption_layer(mut self, payload_enc_key: &PayloadKey) -> Result<Self> {\n\n let lioness_cipher = Lioness::<VarBlake2b, ChaCha>::new_raw(array_ref!(\n\n payload_enc_key,\n\n 0,\n\n lioness::RAW_KEY_SIZE\n\n ));\n\n\n\n if let Err(err) = lioness_cipher.encrypt(&mut self.0) {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n format!(\"error while encrypting payload - {}\", err),\n\n ));\n\n };\n\n Ok(self)\n\n }\n\n\n\n /// Tries to remove single layer of encryption from self.\n", "file_path": "src/payload/mod.rs", "rank": 25, "score": 61619.40293106005 }, { "content": " // assuming our payload is fully decrypted it has the following structure:\n\n // 00000.... (SECURITY_PARAMETER length)\n\n // plaintext (variable)\n\n // 1 (single 1 byte)\n\n // 0000 ... to pad to specified `payload_size`\n\n\n\n // In order to recover the plaintext we need to ignore first SECURITY_PARAMETER bytes\n\n // Then remove all tailing zeroes until first 1\n\n // and finally remove the first 1. The result should be our plaintext.\n\n // However, we must check if first SECURITY_PARAMETER bytes are actually 0\n\n if !self.0.iter().take(SECURITY_PARAMETER).all(|b| *b == 0) {\n\n return Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n \"malformed payload - no leading zero padding present\",\n\n ));\n\n }\n\n\n\n // only trailing padding present\n\n let padded_plaintext = self\n\n .into_inner()\n", "file_path": "src/payload/mod.rs", "rank": 26, "score": 61619.14627512016 }, { "content": " }\n\n\n\n fn inner(&self) -> &[u8] {\n\n &self.0\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n /// View this `Payload` as slice of bytes.\n\n pub fn as_bytes(&self) -> &[u8] {\n\n self.inner()\n\n }\n\n\n\n /// Convert this `Payload` as a vector of bytes.\n\n pub fn into_bytes(self) -> Vec<u8> {\n\n self.into_inner()\n\n }\n\n\n", "file_path": "src/payload/mod.rs", "rank": 27, "score": 61619.1273696098 }, { "content": " .into_iter()\n\n .skip(SECURITY_PARAMETER)\n\n .collect::<Vec<_>>();\n\n\n\n // we are looking for first occurrence of 1 in the tail and we get its index\n\n if let Some(i) = padded_plaintext.iter().rposition(|b| *b == 1) {\n\n // and now we only take bytes until that point (but not including it)\n\n let plaintext = padded_plaintext.into_iter().take(i).collect();\n\n return Ok(plaintext);\n\n }\n\n\n\n // our plaintext is invalid\n\n Err(Error::new(\n\n ErrorKind::InvalidPayload,\n\n \"malformed payload - invalid trailing padding\",\n\n ))\n\n }\n\n\n\n fn into_inner(self) -> Vec<u8> {\n\n self.0\n", "file_path": "src/payload/mod.rs", "rank": 28, "score": 61618.70455605686 }, { "content": " &final_routing_info.enc_routing_information.get_value_ref(),\n\n );\n\n assert_eq!(\n\n expected_mac.into_inner(),\n\n final_routing_info.integrity_mac.into_inner()\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_encapsulating_final_routing_information {\n\n use super::*;\n\n use crate::test_utils::fixtures::{destination_fixture, filler_fixture, routing_keys_fixture};\n\n\n\n #[test]\n\n fn it_produces_result_of_length_filler_plus_padded_concatenated_destination_and_identifier_and_flag_for_route_of_length_5(\n\n ) {\n\n let final_keys = routing_keys_fixture();\n\n let route_len = 5;\n\n let filler = filler_fixture(route_len - 1);\n", "file_path": "src/header/routing/destination.rs", "rank": 29, "score": 60165.72738733023 }, { "content": " // sanity check assertion, because we're using vectors\n\n assert_eq!(final_routing_info_vec.len(), ENCRYPTED_ROUTING_INFO_SIZE);\n\n let mut final_routing_information = [0u8; ENCRYPTED_ROUTING_INFO_SIZE];\n\n final_routing_information\n\n .copy_from_slice(&final_routing_info_vec[..ENCRYPTED_ROUTING_INFO_SIZE]);\n\n EncryptedRoutingInformation::from_bytes(final_routing_information)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_encapsulating_final_routing_information_and_mac {\n\n use crate::header::mac::HeaderIntegrityMac;\n\n use crate::{\n\n header::routing::EncapsulatedRoutingInformation,\n\n test_utils::{\n\n fixtures::{destination_fixture, filler_fixture, routing_keys_fixture},\n\n random_node,\n\n },\n\n };\n\n\n", "file_path": "src/header/routing/destination.rs", "rank": 30, "score": 60164.85419938741 }, { "content": "use crate::header::keys::StreamCipherKey;\n\nuse crate::header::routing::nodes::EncryptedRoutingInformation;\n\nuse crate::header::routing::{RoutingFlag, Version, ENCRYPTED_ROUTING_INFO_SIZE, FINAL_HOP};\n\nuse crate::route::{Destination, DestinationAddressBytes, SURBIdentifier};\n\nuse crate::utils;\n\nuse rand::rngs::OsRng;\n\n\n\n// this is going through the following transformations:\n\n/*\n\n FinalRoutingInformation -> PaddedFinalRoutingInformation -> EncryptedPaddedFinalRoutingInformation ->\n\n Encrypted Padded Destination with Filler - this can be treated as EncryptedRoutingInformation\n\n*/\n\n\n\n// TODO: perhaps add route_len to all final_routing_info related structs to simplify everything?\n\n// because it seems weird that say 'encrypt' requires route_len argument\n\npub(super) struct FinalRoutingInformation {\n\n flag: RoutingFlag,\n\n version: Version,\n\n destination: DestinationAddressBytes,\n\n // in paper delta\n", "file_path": "src/header/routing/destination.rs", "rank": 31, "score": 60163.86000947833 }, { "content": " fn max_padded_destination_identifier_length(route_len: usize) -> usize {\n\n // this should evaluate to (3 * (MAX_PATH_LENGTH - route_len) + 3) * SECURITY_PARAMETER\n\n ENCRYPTED_ROUTING_INFO_SIZE - (FILLER_STEP_SIZE_INCREASE * (route_len - 1))\n\n }\n\n\n\n pub(super) fn add_padding(self, route_len: usize) -> PaddedFinalRoutingInformation {\n\n // paper uses 0 bytes for this, however, we use random instead so that we would not be affected by the\n\n // attack on sphinx described by Kuhn et al.\n\n let padding = utils::bytes::random(\n\n &mut OsRng,\n\n ENCRYPTED_ROUTING_INFO_SIZE\n\n - (FILLER_STEP_SIZE_INCREASE * (route_len - 1))\n\n - FINAL_NODE_META_INFO_LENGTH,\n\n );\n\n\n\n // return D || I || PAD\n\n PaddedFinalRoutingInformation {\n\n value: std::iter::once(self.flag)\n\n .chain(self.version.to_bytes().into_iter())\n\n .chain(self.destination.to_bytes().iter().cloned())\n", "file_path": "src/header/routing/destination.rs", "rank": 32, "score": 60160.0095769595 }, { "content": " #[test]\n\n fn it_returns_mac_on_correct_data() {\n\n // this test is created to ensure we MAC the encrypted data BEFORE it is truncated\n\n let route = [random_node(), random_node(), random_node()];\n\n let routing_keys = [\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n ];\n\n let filler = filler_fixture(route.len() - 1);\n\n let destination = destination_fixture();\n\n let final_routing_info = EncapsulatedRoutingInformation::for_final_hop(\n\n &destination,\n\n &routing_keys.last().unwrap(),\n\n filler,\n\n route.len(),\n\n );\n\n\n\n let expected_mac = HeaderIntegrityMac::compute(\n\n routing_keys.last().unwrap().header_integrity_hmac_key,\n", "file_path": "src/header/routing/destination.rs", "rank": 33, "score": 60158.899274925294 }, { "content": " let route_len = 1;\n\n let filler = filler_fixture(route_len - 1);\n\n let destination = destination_fixture();\n\n\n\n let final_routing_header = FinalRoutingInformation::new(&destination, route_len)\n\n .add_padding(route_len)\n\n .encrypt(final_keys.stream_cipher_key, route_len)\n\n .combine_with_filler(filler, route_len);\n\n\n\n let expected_final_header_len = ENCRYPTED_ROUTING_INFO_SIZE;\n\n\n\n assert_eq!(\n\n expected_final_header_len,\n\n final_routing_header.get_value_ref().len()\n\n );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn it_panics_if_it_receives_filler_different_than_filler_step_multiplied_with_i() {\n", "file_path": "src/header/routing/destination.rs", "rank": 34, "score": 60157.82146583902 }, { "content": " let destination = destination_fixture();\n\n\n\n let final_routing_header = FinalRoutingInformation::new(&destination, route_len)\n\n .add_padding(route_len)\n\n .encrypt(final_keys.stream_cipher_key, route_len)\n\n .combine_with_filler(filler, route_len);\n\n\n\n let expected_final_header_len = ENCRYPTED_ROUTING_INFO_SIZE;\n\n\n\n assert_eq!(\n\n expected_final_header_len,\n\n final_routing_header.get_value_ref().len()\n\n );\n\n }\n\n\n\n #[test]\n\n fn it_produces_result_of_length_filler_plus_padded_concatenated_destination_and_identifier_and_flag_for_route_of_length_3(\n\n ) {\n\n let final_keys = routing_keys_fixture();\n\n let route_len = 3;\n", "file_path": "src/header/routing/destination.rs", "rank": 35, "score": 60154.705295444015 }, { "content": " let filler = filler_fixture(route_len - 1);\n\n let destination = destination_fixture();\n\n\n\n let final_routing_header = FinalRoutingInformation::new(&destination, route_len)\n\n .add_padding(route_len)\n\n .encrypt(final_keys.stream_cipher_key, route_len)\n\n .combine_with_filler(filler, route_len);\n\n\n\n let expected_final_header_len = ENCRYPTED_ROUTING_INFO_SIZE;\n\n\n\n assert_eq!(\n\n expected_final_header_len,\n\n final_routing_header.get_value_ref().len()\n\n );\n\n }\n\n\n\n #[test]\n\n fn it_produces_result_of_length_filler_plus_padded_concatenated_destination_and_identifier_and_flag_for_route_of_length_1(\n\n ) {\n\n let final_keys = routing_keys_fixture();\n", "file_path": "src/header/routing/destination.rs", "rank": 36, "score": 60154.622195910255 }, { "content": " .chain(self.identifier.iter().cloned())\n\n .chain(padding.iter().cloned())\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\n// in paper D || I || 0\n\npub(super) struct PaddedFinalRoutingInformation {\n\n value: Vec<u8>,\n\n}\n\n\n\nimpl PaddedFinalRoutingInformation {\n\n pub(super) fn encrypt(\n\n self,\n\n key: StreamCipherKey,\n\n route_len: usize,\n\n ) -> EncryptedPaddedFinalRoutingInformation {\n\n assert_eq!(\n\n FinalRoutingInformation::max_padded_destination_identifier_length(route_len),\n", "file_path": "src/header/routing/destination.rs", "rank": 37, "score": 60154.45080458391 }, { "content": "// Copyright 2020 Nym Technologies SA\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::constants::{\n\n FINAL_NODE_META_INFO_LENGTH, MAX_PATH_LENGTH, SECURITY_PARAMETER, STREAM_CIPHER_OUTPUT_LENGTH,\n\n};\n\nuse crate::crypto;\n\nuse crate::crypto::STREAM_CIPHER_INIT_VECTOR;\n\nuse crate::header::filler::{Filler, FILLER_STEP_SIZE_INCREASE};\n", "file_path": "src/header/routing/destination.rs", "rank": 38, "score": 60154.119968164596 }, { "content": " let final_keys = routing_keys_fixture();\n\n let route_len = 3;\n\n let filler = filler_fixture(route_len);\n\n let destination = destination_fixture();\n\n\n\n FinalRoutingInformation::new(&destination, route_len)\n\n .add_padding(route_len)\n\n .encrypt(final_keys.stream_cipher_key, route_len)\n\n .combine_with_filler(filler, route_len);\n\n }\n\n}\n", "file_path": "src/header/routing/destination.rs", "rank": 39, "score": 60152.64442435348 }, { "content": " identifier: SURBIdentifier, // in paper I\n\n}\n\n\n\nimpl FinalRoutingInformation {\n\n // TODO: this should really return a Result in case the assertion failed\n\n pub fn new(dest: &Destination, route_len: usize) -> Self {\n\n assert!(dest.address.as_bytes().len() <= Self::max_destination_length(route_len));\n\n\n\n Self {\n\n flag: FINAL_HOP,\n\n version: Version::new(),\n\n destination: dest.address,\n\n identifier: dest.identifier,\n\n }\n\n }\n\n\n\n fn max_destination_length(route_len: usize) -> usize {\n\n (3 * (MAX_PATH_LENGTH - route_len) + 2) * SECURITY_PARAMETER\n\n }\n\n\n", "file_path": "src/header/routing/destination.rs", "rank": 40, "score": 60152.22770404491 }, { "content": " value: Vec<u8>,\n\n}\n\n\n\nimpl EncryptedPaddedFinalRoutingInformation {\n\n // technically it's not exactly EncryptedRoutingInformation\n\n // as it's EncryptedPaddedFinalRoutingInformation with possibly concatenated filler string\n\n // however, for all of our purposes, it behaves exactly like EncryptedRoutingInformation\n\n pub(super) fn combine_with_filler(\n\n self,\n\n filler: Filler,\n\n route_len: usize,\n\n ) -> EncryptedRoutingInformation {\n\n let filler_value = filler.get_value();\n\n assert_eq!(\n\n filler_value.len(),\n\n FILLER_STEP_SIZE_INCREASE * (route_len - 1)\n\n );\n\n\n\n let final_routing_info_vec: Vec<u8> = self.value.into_iter().chain(filler_value).collect();\n\n\n", "file_path": "src/header/routing/destination.rs", "rank": 41, "score": 60150.798370344164 }, { "content": " self.value.len()\n\n );\n\n\n\n let pseudorandom_bytes = crypto::generate_pseudorandom_bytes(\n\n &key,\n\n &STREAM_CIPHER_INIT_VECTOR,\n\n STREAM_CIPHER_OUTPUT_LENGTH,\n\n );\n\n\n\n EncryptedPaddedFinalRoutingInformation {\n\n value: utils::bytes::xor(\n\n &self.value,\n\n &pseudorandom_bytes[..self.value.len()], // we already asserted it has correct length\n\n ),\n\n }\n\n }\n\n}\n\n\n\n// in paper XOR ( (D || I || 0), rho(h_{rho}(s)) )\n\npub(super) struct EncryptedPaddedFinalRoutingInformation {\n", "file_path": "src/header/routing/destination.rs", "rank": 42, "score": 60150.34566971647 }, { "content": "use crate::header::delays::Delay;\n\nuse crate::header::keys::{HeaderIntegrityMacKey, StreamCipherKey};\n\nuse crate::header::mac::HeaderIntegrityMac;\n\nuse crate::header::routing::{\n\n EncapsulatedRoutingInformation, RoutingFlag, Version, ENCRYPTED_ROUTING_INFO_SIZE, FINAL_HOP,\n\n FORWARD_HOP, TRUNCATED_ROUTING_INFO_SIZE,\n\n};\n\nuse crate::route::{DestinationAddressBytes, NodeAddressBytes, SURBIdentifier};\n\nuse crate::utils;\n\nuse crate::{Error, ErrorKind, Result};\n\nuse std::fmt;\n\n\n\npub const PADDED_ENCRYPTED_ROUTING_INFO_SIZE: usize =\n\n ENCRYPTED_ROUTING_INFO_SIZE + NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE;\n\n\n\n// in paper beta\n\npub(super) struct RoutingInformation {\n\n flag: RoutingFlag,\n\n version: Version,\n\n // in paper nu\n", "file_path": "src/header/routing/nodes.rs", "rank": 43, "score": 59889.25244502421 }, { "content": " use super::*;\n\n use crate::{\n\n header::routing::ENCRYPTED_ROUTING_INFO_SIZE,\n\n test_utils::fixtures::{header_integrity_mac_fixture, node_address_fixture},\n\n };\n\n\n\n #[test]\n\n fn it_returns_next_hop_address_integrity_mac_enc_routing_info() {\n\n let flag = FORWARD_HOP;\n\n let address_fixture = node_address_fixture();\n\n let delay = Delay::new_from_nanos(10);\n\n let integrity_mac = header_integrity_mac_fixture();\n\n let next_routing_information = [1u8; ENCRYPTED_ROUTING_INFO_SIZE];\n\n let version = Version::new();\n\n\n\n let data = [\n\n vec![flag],\n\n version.to_bytes().to_vec(),\n\n address_fixture.to_bytes().to_vec(),\n\n delay.to_bytes().to_vec(),\n", "file_path": "src/header/routing/nodes.rs", "rank": 44, "score": 59888.57884668717 }, { "content": " .encapsulate_with_mac(previous_node_routing_keys.header_integrity_hmac_key);\n\n\n\n assert_eq!(\n\n expected_encrypted_routing_info_vec,\n\n next_layer_routing.enc_routing_information.value.to_vec()\n\n );\n\n assert_eq!(\n\n expected_routing_mac,\n\n next_layer_routing.integrity_mac.as_bytes().to_vec()\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod encrypting_routing_information {\n\n use super::*;\n\n use crate::{\n\n crypto::STREAM_CIPHER_KEY_SIZE,\n\n test_utils::fixtures::{header_integrity_mac_fixture, node_address_fixture},\n\n };\n", "file_path": "src/header/routing/nodes.rs", "rank": 45, "score": 59888.435494554265 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod truncating_routing_information {\n\n use crate::test_utils::fixtures::encrypted_routing_information_fixture;\n\n\n\n #[test]\n\n fn it_does_not_change_prefixed_data() {\n\n let encrypted_routing_info = encrypted_routing_information_fixture();\n\n let routing_info_data_copy = encrypted_routing_info.value;\n\n\n\n let truncated_routing_info = encrypted_routing_info.truncate();\n\n for i in 0..truncated_routing_info.len() {\n\n assert_eq!(truncated_routing_info[i], routing_info_data_copy[i]);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod parse_decrypted_routing_information {\n", "file_path": "src/header/routing/nodes.rs", "rank": 46, "score": 59883.917323323054 }, { "content": " node_address: NodeAddressBytes,\n\n delay: Delay,\n\n // in paper gamma\n\n header_integrity_mac: HeaderIntegrityMac,\n\n // in paper also beta (!)\n\n next_routing_information: TruncatedRoutingInformation,\n\n}\n\n\n\nimpl RoutingInformation {\n\n pub(super) fn new(\n\n node_address: NodeAddressBytes,\n\n delay: Delay,\n\n next_encapsulated_routing_information: EncapsulatedRoutingInformation,\n\n ) -> Self {\n\n RoutingInformation {\n\n flag: FORWARD_HOP,\n\n version: Version::new(),\n\n node_address,\n\n delay,\n\n header_integrity_mac: next_encapsulated_routing_information.integrity_mac,\n", "file_path": "src/header/routing/nodes.rs", "rank": 47, "score": 59882.137561654614 }, { "content": "// Copyright 2020 Nym Technologies SA\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::constants::{\n\n DELAY_LENGTH, DESTINATION_ADDRESS_LENGTH, HEADER_INTEGRITY_MAC_SIZE, NODE_ADDRESS_LENGTH,\n\n NODE_META_INFO_SIZE, STREAM_CIPHER_OUTPUT_LENGTH, VERSION_LENGTH,\n\n};\n\nuse crate::crypto;\n\nuse crate::crypto::STREAM_CIPHER_INIT_VECTOR;\n", "file_path": "src/header/routing/nodes.rs", "rank": 48, "score": 59881.73287321656 }, { "content": " HeaderIntegrityMac::from_bytes(next_hop_integrity_mac),\n\n );\n\n\n\n ParsedRawRoutingInformation::ForwardHop(\n\n NodeAddressBytes::from_bytes(next_hop_address),\n\n Delay::from_bytes(delay_bytes),\n\n next_hop_encapsulated_routing_info,\n\n )\n\n }\n\n\n\n // TODO: this needs to be updated as a correct parse as final hop function!\n\n fn parse_as_final_hop(self) -> ParsedRawRoutingInformation {\n\n let mut i = 1;\n\n\n\n let mut version: [u8; VERSION_LENGTH] = Default::default();\n\n version.copy_from_slice(&self.value[i..i + VERSION_LENGTH]);\n\n i += VERSION_LENGTH;\n\n\n\n let mut destination_bytes: [u8; DESTINATION_ADDRESS_LENGTH] = Default::default();\n\n destination_bytes.copy_from_slice(&self.value[i..i + DESTINATION_ADDRESS_LENGTH]);\n", "file_path": "src/header/routing/nodes.rs", "rank": 49, "score": 59879.72786903459 }, { "content": "\n\n #[test]\n\n fn it_is_possible_to_decrypt_it_to_recover_original_data() {\n\n let key = [2u8; STREAM_CIPHER_KEY_SIZE];\n\n let flag = FORWARD_HOP;\n\n let address = node_address_fixture();\n\n let delay = Delay::new_from_nanos(15);\n\n let mac = header_integrity_mac_fixture();\n\n let next_routing = [8u8; TRUNCATED_ROUTING_INFO_SIZE];\n\n\n\n let version = Version::new();\n\n let encryption_data = [\n\n vec![flag],\n\n version.to_bytes().to_vec(),\n\n address.to_bytes().to_vec(),\n\n delay.to_bytes().to_vec(),\n\n mac.as_bytes().to_vec(),\n\n next_routing.to_vec(),\n\n ]\n\n .concat();\n", "file_path": "src/header/routing/nodes.rs", "rank": 50, "score": 59879.57128825166 }, { "content": " RawRoutingInformation {\n\n value: utils::bytes::xor(&self.value, &pseudorandom_bytes),\n\n }\n\n }\n\n}\n\n\n\npub struct RawRoutingInformation {\n\n value: Vec<u8>,\n\n}\n\n\n\npub enum ParsedRawRoutingInformation {\n\n ForwardHop(NodeAddressBytes, Delay, EncapsulatedRoutingInformation),\n\n FinalHop(DestinationAddressBytes, SURBIdentifier),\n\n}\n\n\n\nimpl RawRoutingInformation {\n\n pub fn parse(self) -> Result<ParsedRawRoutingInformation> {\n\n assert_eq!(\n\n NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE,\n\n self.value.len()\n", "file_path": "src/header/routing/nodes.rs", "rank": 51, "score": 59879.3618872015 }, { "content": "\n\n let version = Version::new();\n\n // calculate everything without using any object methods\n\n let concatenated_materials: Vec<u8> = [\n\n vec![FORWARD_HOP],\n\n version.to_bytes().to_vec(),\n\n node_address.to_bytes().to_vec(),\n\n delay.to_bytes().to_vec(),\n\n inner_layer_routing.integrity_mac.as_bytes().to_vec(),\n\n inner_layer_routing\n\n .enc_routing_information\n\n .value\n\n .to_vec()\n\n .iter()\n\n .cloned()\n\n .take(TRUNCATED_ROUTING_INFO_SIZE)\n\n .collect(),\n\n ]\n\n .concat();\n\n\n", "file_path": "src/header/routing/nodes.rs", "rank": 52, "score": 59878.98665494256 }, { "content": " next_routing_information: next_encapsulated_routing_information\n\n .enc_routing_information\n\n .truncate(),\n\n }\n\n }\n\n\n\n fn concatenate_components(self) -> Vec<u8> {\n\n std::iter::once(self.flag)\n\n .chain(self.version.to_bytes().iter().cloned())\n\n .chain(self.node_address.as_bytes().iter().cloned())\n\n .chain(self.delay.to_bytes().iter().cloned())\n\n .chain(self.header_integrity_mac.into_inner().into_iter())\n\n .chain(self.next_routing_information.iter().cloned())\n\n .collect()\n\n }\n\n\n\n pub(super) fn encrypt(self, key: StreamCipherKey) -> EncryptedRoutingInformation {\n\n let routing_info_components = self.concatenate_components();\n\n assert_eq!(ENCRYPTED_ROUTING_INFO_SIZE, routing_info_components.len());\n\n\n", "file_path": "src/header/routing/nodes.rs", "rank": 53, "score": 59878.33898563637 }, { "content": " let mut next_hop_address: [u8; NODE_ADDRESS_LENGTH] = Default::default();\n\n next_hop_address.copy_from_slice(&self.value[i..i + NODE_ADDRESS_LENGTH]);\n\n i += NODE_ADDRESS_LENGTH;\n\n\n\n let mut delay_bytes: [u8; DELAY_LENGTH] = Default::default();\n\n delay_bytes.copy_from_slice(&self.value[i..i + DELAY_LENGTH]);\n\n i += DELAY_LENGTH;\n\n\n\n // the next HEADER_INTEGRITY_MAC_SIZE bytes represent the integrity mac on the next hop\n\n let mut next_hop_integrity_mac: [u8; HEADER_INTEGRITY_MAC_SIZE] = Default::default();\n\n next_hop_integrity_mac.copy_from_slice(&self.value[i..i + HEADER_INTEGRITY_MAC_SIZE]);\n\n i += HEADER_INTEGRITY_MAC_SIZE;\n\n\n\n // the next ENCRYPTED_ROUTING_INFO_SIZE bytes represent the routing information for the next hop\n\n let mut next_hop_encrypted_routing_information = [0u8; ENCRYPTED_ROUTING_INFO_SIZE];\n\n next_hop_encrypted_routing_information\n\n .copy_from_slice(&self.value[i..i + ENCRYPTED_ROUTING_INFO_SIZE]);\n\n\n\n let next_hop_encapsulated_routing_info = EncapsulatedRoutingInformation::encapsulate(\n\n EncryptedRoutingInformation::from_bytes(next_hop_encrypted_routing_information),\n", "file_path": "src/header/routing/nodes.rs", "rank": 54, "score": 59878.04006747674 }, { "content": " let pseudorandom_bytes = crypto::generate_pseudorandom_bytes(\n\n &previous_node_routing_keys.stream_cipher_key,\n\n &STREAM_CIPHER_INIT_VECTOR,\n\n STREAM_CIPHER_OUTPUT_LENGTH,\n\n );\n\n\n\n let expected_encrypted_routing_info_vec = utils::bytes::xor(\n\n &concatenated_materials,\n\n &pseudorandom_bytes[..ENCRYPTED_ROUTING_INFO_SIZE],\n\n );\n\n\n\n let expected_routing_mac = crypto::compute_keyed_hmac::<HeaderIntegrityHmacAlgorithm>(\n\n &previous_node_routing_keys.header_integrity_hmac_key,\n\n &expected_encrypted_routing_info_vec,\n\n );\n\n let mut expected_routing_mac = expected_routing_mac.into_bytes().to_vec();\n\n expected_routing_mac.truncate(HEADER_INTEGRITY_MAC_SIZE);\n\n\n\n let next_layer_routing = RoutingInformation::new(node_address, delay, inner_layer_routing)\n\n .encrypt(previous_node_routing_keys.stream_cipher_key)\n", "file_path": "src/header/routing/nodes.rs", "rank": 55, "score": 59877.220176984665 }, { "content": " integrity_mac.as_bytes().to_vec(),\n\n next_routing_information.to_vec(),\n\n ]\n\n .concat();\n\n\n\n let raw_routing_info = RawRoutingInformation { value: data };\n\n\n\n match raw_routing_info.parse().unwrap() {\n\n ParsedRawRoutingInformation::ForwardHop(\n\n next_address,\n\n _delay,\n\n encapsulated_routing_info,\n\n ) => {\n\n assert_eq!(address_fixture, next_address);\n\n assert_eq!(\n\n integrity_mac.as_bytes().to_vec(),\n\n encapsulated_routing_info.integrity_mac.as_bytes().to_vec()\n\n );\n\n assert_eq!(\n\n next_routing_information.to_vec(),\n", "file_path": "src/header/routing/nodes.rs", "rank": 56, "score": 59876.62536600829 }, { "content": " }\n\n }\n\n\n\n fn add_zero_padding(self) -> PaddedEncryptedRoutingInformation {\n\n let zero_bytes =\n\n std::iter::repeat(0u8).take(NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE);\n\n let padded_enc_routing_info: Vec<u8> =\n\n self.value.iter().cloned().chain(zero_bytes).collect();\n\n\n\n assert_eq!(\n\n PADDED_ENCRYPTED_ROUTING_INFO_SIZE,\n\n padded_enc_routing_info.len()\n\n );\n\n PaddedEncryptedRoutingInformation {\n\n value: padded_enc_routing_info,\n\n }\n\n }\n\n\n\n pub(crate) fn unwrap(\n\n self,\n", "file_path": "src/header/routing/nodes.rs", "rank": 57, "score": 59876.404859117625 }, { "content": "\n\n let routing_information = RoutingInformation {\n\n flag: FORWARD_HOP,\n\n version,\n\n node_address: address,\n\n delay,\n\n header_integrity_mac: mac,\n\n next_routing_information: next_routing,\n\n };\n\n\n\n let encrypted_data = routing_information.encrypt(key);\n\n let decryption_key_source = crypto::generate_pseudorandom_bytes(\n\n &key,\n\n &STREAM_CIPHER_INIT_VECTOR,\n\n STREAM_CIPHER_OUTPUT_LENGTH,\n\n );\n\n let decryption_key = &decryption_key_source[..ENCRYPTED_ROUTING_INFO_SIZE];\n\n let decrypted_data = utils::bytes::xor(&encrypted_data.value, decryption_key);\n\n assert_eq!(encryption_data, decrypted_data);\n\n }\n", "file_path": "src/header/routing/nodes.rs", "rank": 58, "score": 59876.00918523916 }, { "content": " encapsulated_routing_info\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec()\n\n );\n\n }\n\n ParsedRawRoutingInformation::FinalHop(_, _) => panic!(),\n\n }\n\n }\n\n}\n", "file_path": "src/header/routing/nodes.rs", "rank": 59, "score": 59875.08527401427 }, { "content": " }\n\n\n\n fn truncate(self) -> TruncatedRoutingInformation {\n\n let mut truncated_routing_info = [0u8; TRUNCATED_ROUTING_INFO_SIZE];\n\n truncated_routing_info.copy_from_slice(&self.value[..TRUNCATED_ROUTING_INFO_SIZE]);\n\n truncated_routing_info\n\n }\n\n\n\n pub fn get_value_ref(&self) -> &[u8] {\n\n self.value.as_ref()\n\n }\n\n\n\n pub(super) fn encapsulate_with_mac(\n\n self,\n\n key: HeaderIntegrityMacKey,\n\n ) -> EncapsulatedRoutingInformation {\n\n let integrity_mac = HeaderIntegrityMac::compute(key, &self.value);\n\n EncapsulatedRoutingInformation {\n\n enc_routing_information: self,\n\n integrity_mac,\n", "file_path": "src/header/routing/nodes.rs", "rank": 60, "score": 59874.22730610214 }, { "content": " i += DESTINATION_ADDRESS_LENGTH;\n\n let destination = DestinationAddressBytes::from_bytes(destination_bytes);\n\n\n\n // the next HEADER_INTEGRITY_MAC_SIZE bytes represent the integrity mac on the next hop\n\n let mut identifier: [u8; HEADER_INTEGRITY_MAC_SIZE] = Default::default();\n\n identifier.copy_from_slice(&self.value[i..i + HEADER_INTEGRITY_MAC_SIZE]);\n\n\n\n ParsedRawRoutingInformation::FinalHop(destination, identifier)\n\n }\n\n}\n\n\n\n// result of truncating encrypted beta before passing it to next 'layer'\n", "file_path": "src/header/routing/nodes.rs", "rank": 61, "score": 59874.15342650075 }, { "content": "// result of xoring beta with rho (output of PRNG)\n\n// the derivation is only required for the tests. please remove it in production\n\n#[derive(Clone)]\n\npub struct EncryptedRoutingInformation {\n\n value: [u8; ENCRYPTED_ROUTING_INFO_SIZE],\n\n}\n\n\n\nimpl fmt::Debug for EncryptedRoutingInformation {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"EncryptedRoutingInformation: {{ value: {:?} }}\",\n\n self.value.to_vec()\n\n )\n\n }\n\n}\n\n\n\nimpl EncryptedRoutingInformation {\n\n pub fn from_bytes(bytes: [u8; ENCRYPTED_ROUTING_INFO_SIZE]) -> Self {\n\n Self { value: bytes }\n", "file_path": "src/header/routing/nodes.rs", "rank": 62, "score": 59873.96996644698 }, { "content": " );\n\n\n\n let flag = self.value[0];\n\n match flag {\n\n FORWARD_HOP => Ok(self.parse_as_forward_hop()),\n\n FINAL_HOP => Ok(self.parse_as_final_hop()),\n\n _ => Err(Error::new(\n\n ErrorKind::InvalidRouting,\n\n format!(\"tried to parse unknown routing flag: {}\", flag),\n\n )),\n\n }\n\n }\n\n\n\n fn parse_as_forward_hop(self) -> ParsedRawRoutingInformation {\n\n let mut i = 1;\n\n\n\n let mut version: [u8; VERSION_LENGTH] = Default::default();\n\n version.copy_from_slice(&self.value[i..i + VERSION_LENGTH]);\n\n i += VERSION_LENGTH;\n\n\n", "file_path": "src/header/routing/nodes.rs", "rank": 63, "score": 59873.74301809441 }, { "content": " let pseudorandom_bytes = crypto::generate_pseudorandom_bytes(\n\n &key,\n\n &STREAM_CIPHER_INIT_VECTOR,\n\n STREAM_CIPHER_OUTPUT_LENGTH,\n\n );\n\n\n\n let encrypted_routing_info_vec = utils::bytes::xor(\n\n &routing_info_components,\n\n &pseudorandom_bytes[..ENCRYPTED_ROUTING_INFO_SIZE],\n\n );\n\n\n\n let mut encrypted_routing_info = [0u8; ENCRYPTED_ROUTING_INFO_SIZE];\n\n encrypted_routing_info.copy_from_slice(&encrypted_routing_info_vec);\n\n\n\n EncryptedRoutingInformation {\n\n value: encrypted_routing_info,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/header/routing/nodes.rs", "rank": 64, "score": 59871.801449951716 }, { "content": " stream_cipher_key: StreamCipherKey,\n\n ) -> Result<ParsedRawRoutingInformation> {\n\n // we have to add padding to the encrypted routing information before decrypting, otherwise we gonna lose information\n\n self.add_zero_padding().decrypt(stream_cipher_key).parse()\n\n }\n\n}\n\n\n\npub struct PaddedEncryptedRoutingInformation {\n\n value: Vec<u8>,\n\n}\n\n\n\nimpl PaddedEncryptedRoutingInformation {\n\n pub fn decrypt(self, key: StreamCipherKey) -> RawRoutingInformation {\n\n let pseudorandom_bytes = crypto::generate_pseudorandom_bytes(\n\n &key,\n\n &crypto::STREAM_CIPHER_INIT_VECTOR,\n\n STREAM_CIPHER_OUTPUT_LENGTH,\n\n );\n\n\n\n assert_eq!(self.value.len(), pseudorandom_bytes.len());\n", "file_path": "src/header/routing/nodes.rs", "rank": 65, "score": 59871.331505334536 }, { "content": "// result of truncating encrypted beta before passing it to next 'layer'\n\ntype TruncatedRoutingInformation = [u8; TRUNCATED_ROUTING_INFO_SIZE];\n\n\n\n#[cfg(test)]\n\nmod preparing_header_layer {\n\n use super::*;\n\n use crate::constants::HeaderIntegrityHmacAlgorithm;\n\n use crate::{\n\n constants::HEADER_INTEGRITY_MAC_SIZE,\n\n test_utils::fixtures::{\n\n encapsulated_routing_information_fixture, node_address_fixture, routing_keys_fixture,\n\n },\n\n };\n\n\n\n #[test]\n\n fn it_returns_encrypted_truncated_address_and_flag_concatenated_with_inner_layer_and_mac_on_it()\n\n {\n\n let node_address = node_address_fixture();\n\n let delay = Delay::new_from_nanos(10);\n\n let previous_node_routing_keys = routing_keys_fixture();\n\n let inner_layer_routing = encapsulated_routing_information_fixture();\n", "file_path": "src/header/routing/nodes.rs", "rank": 66, "score": 58005.802017792266 }, { "content": " fn it_panic_if_empty_keys_are_provided() {\n\n let route = [random_node(), random_node()];\n\n let destination = destination_fixture();\n\n let delays = [\n\n Delay::new_from_nanos(10),\n\n Delay::new_from_nanos(20),\n\n Delay::new_from_nanos(30),\n\n ];\n\n let keys = vec![];\n\n let filler = filler_fixture(route.len() - 1);\n\n\n\n EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod encapsulating_forward_routing_information {\n\n use super::*;\n\n use crate::test_utils::{\n\n fixtures::{destination_fixture, filler_fixture, routing_keys_fixture},\n", "file_path": "src/header/routing/mod.rs", "rank": 67, "score": 57652.491546372614 }, { "content": " };\n\n\n\n #[test]\n\n #[should_panic]\n\n fn it_panics_if_route_is_longer_than_keys() {\n\n let route = [random_node(), random_node(), random_node()];\n\n let destination = destination_fixture();\n\n let delays = [\n\n Delay::new_from_nanos(10),\n\n Delay::new_from_nanos(20),\n\n Delay::new_from_nanos(30),\n\n ];\n\n let keys = [routing_keys_fixture(), routing_keys_fixture()];\n\n let filler = filler_fixture(route.len() - 1);\n\n\n\n EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n", "file_path": "src/header/routing/mod.rs", "rank": 68, "score": 57644.2087318339 }, { "content": " ..HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE],\n\n );\n\n\n\n let integrity_mac = HeaderIntegrityMac::from_bytes(integrity_mac_bytes);\n\n let enc_routing_information =\n\n EncryptedRoutingInformation::from_bytes(enc_routing_info_bytes);\n\n\n\n Ok(EncapsulatedRoutingInformation {\n\n enc_routing_information,\n\n integrity_mac,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod encapsulating_all_routing_information {\n\n use super::*;\n\n use crate::test_utils::{\n\n fixtures::{destination_fixture, filler_fixture, routing_keys_fixture},\n\n random_node,\n", "file_path": "src/header/routing/mod.rs", "rank": 69, "score": 57643.547215549406 }, { "content": " fn it_panics_if_keys_are_longer_than_route() {\n\n let route = [random_node(), random_node()];\n\n let destination = destination_fixture();\n\n let delays = [\n\n Delay::new_from_nanos(10),\n\n Delay::new_from_nanos(20),\n\n Delay::new_from_nanos(30),\n\n ];\n\n let keys = [\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n ];\n\n let filler = filler_fixture(route.len() - 1);\n\n\n\n EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n", "file_path": "src/header/routing/mod.rs", "rank": 70, "score": 57642.65997982414 }, { "content": "use crate::header::routing::nodes::{EncryptedRoutingInformation, RoutingInformation};\n\nuse crate::route::{Destination, Node, NodeAddressBytes};\n\nuse crate::{Error, ErrorKind, Result};\n\n\n\npub const TRUNCATED_ROUTING_INFO_SIZE: usize =\n\n ENCRYPTED_ROUTING_INFO_SIZE - (NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE);\n\npub const ENCRYPTED_ROUTING_INFO_SIZE: usize =\n\n (NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE) * MAX_PATH_LENGTH;\n\n\n\npub mod destination;\n\npub mod nodes;\n\n\n\npub const FORWARD_HOP: RoutingFlag = 1;\n\npub const FINAL_HOP: RoutingFlag = 2;\n\n\n\npub type RoutingFlag = u8;\n\n\n\n#[derive(Default)]\n\npub struct Version {\n\n major: u8,\n", "file_path": "src/header/routing/mod.rs", "rank": 71, "score": 57641.65239922287 }, { "content": " random_node,\n\n };\n\n\n\n #[test]\n\n fn it_correctly_generates_sphinx_routing_information_for_route_of_length_3() {\n\n // this is basically loop unwrapping, but considering the complex logic behind it, it's warranted\n\n let route = [random_node(), random_node(), random_node()];\n\n let destination = destination_fixture();\n\n let delay0 = Delay::new_from_nanos(10);\n\n let delay1 = Delay::new_from_nanos(20);\n\n let delay2 = Delay::new_from_nanos(30);\n\n let delays = [delay0.clone(), delay1.clone(), delay2].to_vec();\n\n let routing_keys = [\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n ];\n\n let filler = filler_fixture(route.len() - 1);\n\n let filler_copy = filler_fixture(route.len() - 1);\n\n assert_eq!(filler, filler_copy);\n", "file_path": "src/header/routing/mod.rs", "rank": 72, "score": 57639.96151299836 }, { "content": " fn it_panics_if_empty_route_is_provided() {\n\n let route = vec![];\n\n let destination = destination_fixture();\n\n let delays = [\n\n Delay::new_from_nanos(10),\n\n Delay::new_from_nanos(20),\n\n Delay::new_from_nanos(30),\n\n ];\n\n let keys = [\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n routing_keys_fixture(),\n\n ];\n\n let filler = filler_fixture(route.len() - 1);\n\n\n\n EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n", "file_path": "src/header/routing/mod.rs", "rank": 73, "score": 57639.49265622352 }, { "content": "\n\n#[cfg(test)]\n\nmod converting_encapsulated_routing_info_to_bytes {\n\n use super::*;\n\n use crate::test_utils::fixtures::encapsulated_routing_information_fixture;\n\n\n\n #[test]\n\n fn it_is_possible_to_convert_back_and_forth() {\n\n let encapsulated_routing_info = encapsulated_routing_information_fixture();\n\n let encapsulated_routing_info_bytes = encapsulated_routing_info.to_bytes();\n\n\n\n let recovered_routing_info =\n\n EncapsulatedRoutingInformation::from_bytes(&encapsulated_routing_info_bytes).unwrap();\n\n assert_eq!(\n\n encapsulated_routing_info\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec(),\n\n recovered_routing_info\n\n .enc_routing_information\n", "file_path": "src/header/routing/mod.rs", "rank": 74, "score": 57638.683333165776 }, { "content": "// Copyright 2020 Nym Technologies SA\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::constants::{HEADER_INTEGRITY_MAC_SIZE, MAX_PATH_LENGTH, NODE_META_INFO_SIZE};\n\nuse crate::header::delays::Delay;\n\nuse crate::header::filler::Filler;\n\nuse crate::header::keys::RoutingKeys;\n\nuse crate::header::mac::HeaderIntegrityMac;\n\nuse crate::header::routing::destination::FinalRoutingInformation;\n", "file_path": "src/header/routing/mod.rs", "rank": 75, "score": 57638.63733878643 }, { "content": "pub struct EncapsulatedRoutingInformation {\n\n pub(crate) enc_routing_information: EncryptedRoutingInformation,\n\n pub(crate) integrity_mac: HeaderIntegrityMac,\n\n}\n\n\n\nimpl EncapsulatedRoutingInformation {\n\n pub fn encapsulate(\n\n enc_routing_information: EncryptedRoutingInformation,\n\n integrity_mac: HeaderIntegrityMac,\n\n ) -> Self {\n\n Self {\n\n enc_routing_information,\n\n integrity_mac,\n\n }\n\n }\n\n\n\n pub fn new(\n\n route: &[Node],\n\n destination: &Destination,\n\n delays: &[Delay],\n", "file_path": "src/header/routing/mod.rs", "rank": 76, "score": 57637.7096153261 }, { "content": " routing_keys: &[RoutingKeys],\n\n filler: Filler,\n\n ) -> Self {\n\n assert_eq!(route.len(), routing_keys.len());\n\n assert_eq!(delays.len(), route.len());\n\n\n\n let final_keys = match routing_keys.last() {\n\n Some(k) => k,\n\n None => panic!(\"empty keys\"),\n\n };\n\n\n\n let encapsulated_destination_routing_info =\n\n Self::for_final_hop(destination, final_keys, filler, route.len());\n\n\n\n Self::for_forward_hops(\n\n encapsulated_destination_routing_info,\n\n &delays,\n\n route,\n\n routing_keys,\n\n )\n", "file_path": "src/header/routing/mod.rs", "rank": 77, "score": 57637.419508406376 }, { "content": " }\n\n #[test]\n\n fn it_correctly_generates_sphinx_routing_information_for_route_of_max_length() {\n\n // this is basically loop unwrapping, but considering the complex iterator, it's warranted\n\n assert_eq!(5, MAX_PATH_LENGTH); // make sure we catch it if we decided to change the constant\n\n\n\n /* since we're using max path length we expect literally:\n\n n4 || m4 || n3 || m3 || n2 || m2 || n1 || m1 || d || i || p\n\n // so literally no filler!\n\n where:\n\n {n1, n2, ...} are node addresses\n\n {m1, m2, ...} are macs on previous layers\n\n d is destination address\n\n i is destination identifier\n\n p is destination padding\n\n */\n\n // TODO: IMPLEMENT SPHINX HEADER LAYER UNWRAPPING\n\n // HOWEVER! to test it, we need to first wrap function to unwrap header layer because each consecutive (ni, mi) pair is encrypted\n\n }\n\n}\n", "file_path": "src/header/routing/mod.rs", "rank": 78, "score": 57637.364202227174 }, { "content": " routing_keys: &[RoutingKeys], // [Keys0, Keys1, Keys2, ..., Keys_{v-1}, Keys_v]\n\n ) -> Self {\n\n route\n\n .iter()\n\n .skip(1) // we don't want the first element as person creating the packet knows the address of the first hop\n\n .map(|node| node.address.to_bytes()) // we only care about the address field\n\n .zip(\n\n // we need both route (i.e. address field) and corresponding keys of the PREVIOUS hop\n\n routing_keys.iter().take(routing_keys.len() - 1), // we don't want last element - it was already used to encrypt the destination\n\n )\n\n .zip(delays.iter().take(delays.len() - 1)) // no need for the delay for the final node\n\n .rev() // we are working from the 'inside'\n\n // we should be getting here\n\n // [(Mix_v, Keys_{v-1}, Delay_{v-1}), (Mix_{v-1}, Keys_{v-2}, Delay_{v-2}), ..., (Mix2, Keys1, Delay1), (Mix1, Keys0, Delay0)]\n\n .fold(\n\n // we start from the already created encrypted final routing info and mac for the destination\n\n // (encrypted with Keys_v)\n\n encapsulated_destination_routing_info,\n\n |next_hop_encapsulated_routing_information,\n\n ((current_node_address, previous_node_routing_keys), delay)| {\n", "file_path": "src/header/routing/mod.rs", "rank": 79, "score": 57637.28710765112 }, { "content": " }\n\n\n\n fn for_final_hop(\n\n dest: &Destination,\n\n routing_keys: &RoutingKeys,\n\n filler: Filler,\n\n route_len: usize,\n\n ) -> Self {\n\n // personal note: I like how this looks so much.\n\n FinalRoutingInformation::new(dest, route_len)\n\n .add_padding(route_len) // add padding to obtain correct destination length\n\n .encrypt(routing_keys.stream_cipher_key, route_len) // encrypt with the key of final node (in our case service provider)\n\n .combine_with_filler(filler, route_len) // add filler to get header of correct length\n\n .encapsulate_with_mac(routing_keys.header_integrity_hmac_key) // combine the previous data with a MAC on the header (also calculated with the SPs key)\n\n }\n\n\n\n fn for_forward_hops(\n\n encapsulated_destination_routing_info: Self,\n\n delays: &[Delay],\n\n route: &[Node], // [Mix0, Mix1, Mix2, ..., Mix_{v-1}, Mix_v]\n", "file_path": "src/header/routing/mod.rs", "rank": 80, "score": 57633.788815893364 }, { "content": " );\n\n assert_eq!(\n\n destination_routing_info.integrity_mac.as_bytes().to_vec(),\n\n destination_routing_info_copy\n\n .integrity_mac\n\n .as_bytes()\n\n .to_vec()\n\n );\n\n\n\n let routing_info = EncapsulatedRoutingInformation::for_forward_hops(\n\n destination_routing_info,\n\n &delays,\n\n &route,\n\n &routing_keys,\n\n );\n\n\n\n let layer_1_routing =\n\n RoutingInformation::new(route[2].address, delay1, destination_routing_info_copy)\n\n .encrypt(routing_keys[1].stream_cipher_key)\n\n .encapsulate_with_mac(routing_keys[1].header_integrity_hmac_key);\n", "file_path": "src/header/routing/mod.rs", "rank": 81, "score": 57631.55740687194 }, { "content": " RoutingInformation::new(\n\n NodeAddressBytes::from_bytes(current_node_address),\n\n delay.to_owned(),\n\n next_hop_encapsulated_routing_information,\n\n )\n\n .encrypt(previous_node_routing_keys.stream_cipher_key)\n\n .encapsulate_with_mac(previous_node_routing_keys.header_integrity_hmac_key)\n\n },\n\n )\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n self.integrity_mac\n\n .as_bytes()\n\n .iter()\n\n .cloned()\n\n .chain(self.enc_routing_information.get_value_ref().iter().cloned())\n\n .collect()\n\n }\n\n\n", "file_path": "src/header/routing/mod.rs", "rank": 82, "score": 57631.13706164825 }, { "content": "\n\n let destination_routing_info = EncapsulatedRoutingInformation::for_final_hop(\n\n &destination,\n\n &routing_keys.last().unwrap(),\n\n filler,\n\n route.len(),\n\n );\n\n\n\n let destination_routing_info_copy = destination_routing_info.clone();\n\n\n\n // sanity check to make sure our 'copy' worked\n\n assert_eq!(\n\n destination_routing_info\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec(),\n\n destination_routing_info_copy\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec()\n", "file_path": "src/header/routing/mod.rs", "rank": 83, "score": 57629.391908126476 }, { "content": " pub fn from_bytes(bytes: &[u8]) -> Result<Self> {\n\n if bytes.len() != HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE {\n\n return Err(Error::new(\n\n ErrorKind::InvalidRouting,\n\n format!(\n\n \"tried to recover routing information using {} bytes, expected {}\",\n\n bytes.len(),\n\n HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE\n\n ),\n\n ));\n\n }\n\n\n\n let mut integrity_mac_bytes = [0u8; HEADER_INTEGRITY_MAC_SIZE];\n\n let mut enc_routing_info_bytes = [0u8; ENCRYPTED_ROUTING_INFO_SIZE];\n\n\n\n // first bytes represent the mac\n\n integrity_mac_bytes.copy_from_slice(&bytes[..HEADER_INTEGRITY_MAC_SIZE]);\n\n // the rest are for the routing info\n\n enc_routing_info_bytes.copy_from_slice(\n\n &bytes[HEADER_INTEGRITY_MAC_SIZE\n", "file_path": "src/header/routing/mod.rs", "rank": 84, "score": 57626.72435979222 }, { "content": "\n\n // this is what first mix should receive\n\n let layer_0_routing = RoutingInformation::new(route[1].address, delay0, layer_1_routing)\n\n .encrypt(routing_keys[0].stream_cipher_key)\n\n .encapsulate_with_mac(routing_keys[0].header_integrity_hmac_key);\n\n\n\n assert_eq!(\n\n routing_info\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec(),\n\n layer_0_routing\n\n .enc_routing_information\n\n .get_value_ref()\n\n .to_vec()\n\n );\n\n assert_eq!(\n\n routing_info.integrity_mac.into_inner(),\n\n layer_0_routing.integrity_mac.into_inner()\n\n );\n", "file_path": "src/header/routing/mod.rs", "rank": 85, "score": 57625.10101476779 }, { "content": " .get_value_ref()\n\n .to_vec()\n\n );\n\n\n\n assert_eq!(\n\n encapsulated_routing_info.integrity_mac.into_inner(),\n\n recovered_routing_info.integrity_mac.into_inner()\n\n );\n\n }\n\n}\n", "file_path": "src/header/routing/mod.rs", "rank": 86, "score": 57624.502937501646 }, { "content": " minor: u8,\n\n patch: u8,\n\n}\n\n\n\nimpl Version {\n\n pub fn new() -> Self {\n\n Self {\n\n major: env!(\"CARGO_PKG_VERSION_MAJOR\").to_string().parse().unwrap(),\n\n minor: env!(\"CARGO_PKG_VERSION_MINOR\").to_string().parse().unwrap(),\n\n patch: env!(\"CARGO_PKG_VERSION_PATCH\").to_string().parse().unwrap(),\n\n }\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n vec![self.major, self.minor, self.patch]\n\n }\n\n}\n\n\n\n// the derivation is only required for the tests. please remove it in production\n\n#[derive(Clone, Debug)]\n", "file_path": "src/header/routing/mod.rs", "rank": 87, "score": 57623.37420676761 }, { "content": "/// Compute keyed hmac\n\npub fn compute_keyed_hmac<D>(key: &[u8], data: &[u8]) -> HmacOutput<D>\n\nwhere\n\n D: Update + BlockInput + FixedOutput + Reset + Default + Clone,\n\n D::BlockSize: ArrayLength<u8>,\n\n D::OutputSize: ArrayLength<u8>,\n\n{\n\n let mut hmac =\n\n Hmac::<D>::new_varkey(key).expect(\"HMAC should be able to take key of any size!\");\n\n hmac.update(data);\n\n hmac.finalize()\n\n}\n\n\n\n#[cfg(test)]\n\nmod generating_pseudorandom_bytes {\n\n use super::*;\n\n\n\n // TODO: 10,000 is the wrong number, @aniap what is correct here?\n\n #[test]\n\n fn it_generates_output_of_size_10000() {\n\n let key: [u8; STREAM_CIPHER_KEY_SIZE] =\n\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let iv: [u8; STREAM_CIPHER_KEY_SIZE] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];\n\n\n\n let rand_bytes = generate_pseudorandom_bytes(&key, &iv, 10000);\n\n assert_eq!(10000, rand_bytes.len());\n\n }\n\n}\n", "file_path": "src/crypto/mod.rs", "rank": 88, "score": 46176.954625818755 } ]
Rust
src/main.rs
teru01/mio_webserver
6f873861e272096684f2f76ad93220e49c0d1c8b
extern crate mio; extern crate regex; use std::net::SocketAddr; use std::{ env, str, fs }; use std::io::{ Error, Read, BufReader, Write }; use std::collections::HashMap; use mio::*; use mio::tcp::{ TcpListener, TcpStream }; use regex::Regex; const SERVER: Token = Token(0); const WEBROOT: &str = "/webroot"; struct WebServer { address: SocketAddr, connections: HashMap<usize, TcpStream>, next_connection_id: usize } impl WebServer { fn new(addr: &str) -> Self { let address = addr.parse().unwrap(); WebServer { address, connections: HashMap::new(), next_connection_id: 1 } } fn run(&mut self) -> Result<(), Error> { let server = TcpListener::bind(&self.address).expect("Failed to bind address"); let poll = Poll::new().unwrap(); poll.register(&server, SERVER, Ready::readable(), PollOpt::edge()).unwrap(); let mut events = Events::with_capacity(1024); let mut response = Vec::new(); loop { poll.poll(&mut events, None).unwrap(); for event in &events { match event.token() { SERVER => { self.connection_handler(&server, &poll); } Token(conn_id) => { self.http_handler(conn_id, event, &poll, &mut response); } } } } } fn connection_handler(&mut self, server: &TcpListener, poll: &Poll) { let (stream, remote) = server.accept().expect("Failed to accept connection"); println!("Connection from {}", &remote); let token = Token(self.next_connection_id); poll.register(&stream, token, Ready::readable(), PollOpt::edge()).unwrap(); if let Some(_) = self.connections.insert(self.next_connection_id, stream){ panic!("Failed to register connection"); } self.next_connection_id += 1; } fn http_handler(&mut self, conn_id: usize, event: Event, poll: &Poll, response: &mut Vec<u8>) { if let Some(stream) = self.connections.get_mut(&conn_id) { if event.readiness().is_readable() { println!("conn_id: {}", conn_id); let mut buffer = [0u8; 512]; let nbytes = stream.read(&mut buffer).expect("Failed to read"); if nbytes != 0 { *response = WebServer::make_response(&buffer, &nbytes).unwrap(); poll.reregister(stream, Token(conn_id), Ready::writable(), PollOpt::edge()).unwrap(); } else { self.connections.remove(&conn_id); } } else if event.readiness().is_writable() { stream.write(&response).expect("Failed to write"); stream.flush().unwrap(); self.connections.remove(&conn_id); } } } fn make_response(buffer: &[u8], nbytes: &usize) -> Result<Vec<u8>, Error> { let http_pattern = Regex::new(r"(.*) (.*) HTTP/1.([0-1])\r\n.*").unwrap(); let captures = match http_pattern.captures(str::from_utf8(&buffer[..*nbytes]).unwrap()) { Some(cap) => cap, None => { let mut response = Vec::new(); response.append(&mut "HTTP/1.0 400 Bad Request\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); return Ok(response); } }; let method = captures.get(1).unwrap().as_str(); let path = &format!("{}{}{}", env::current_dir().unwrap().display(), WEBROOT, captures.get(2).unwrap().as_str()); let _version = captures.get(3).unwrap().as_str(); if method == "GET" { let file = match fs::File::open(path) { Ok(file) => file, Err(_) => { let mut response = Vec::new(); response.append(&mut "HTTP/1.0 404 Not Found\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n\r\n".to_string().into_bytes()); return Ok(response); } }; let mut reader = BufReader::new(file); let mut buf = Vec::new(); reader.read_to_end(&mut buf)?; let mut response = Vec::new(); response.append(&mut "HTTP/1.0 200 OK\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); response.append(&mut buf); return Ok(response); } let mut response = Vec::new(); response.append(&mut "HTTP/1.0 501 Not Implemented\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); return Ok(response); } } fn main() { let args:Vec<String> = env::args().collect(); if args.len() != 2 { eprintln!("Bad number of argments"); std::process::exit(1); } let mut server = WebServer::new(&args[1]); server.run().expect("Internal Server Error."); }
extern crate mio; extern crate regex; use std::net::SocketAddr; use std::{ env, str, fs }; use std::io::{ Error, Read, BufReader, Write }; use std::collections::HashMap; use mio::*; use mio::tcp::{ TcpListener, TcpStream }; use regex::Regex; const SERVER: Token = Token(0); const WEBROOT: &str = "/webroot"; struct WebServer { address: SocketAddr, connections: HashMap<usize, TcpStream>, next_connection_id: usize } impl WebServer { fn new(addr: &str) -> Self { let address = addr.parse().unwrap(); WebServer { address, connections: HashMap::new(), next_connection_id: 1 } } fn run(&mut self) -> Result<(), Error> { let server = TcpListener::bind(&self.address).expect("Failed to bind address"); let poll = Poll::new().unwrap(); poll.register(&server, SERVER, Ready::readable(), PollOpt::edge()).unwrap(); let mut events = Events::with_capacity(1024); let mut response = Vec::new(); loop { poll.poll(&mut events, None).unwrap(); for event in &events { match event.token() { SERVER => { self.connection_handler(&server, &poll); } Token(conn_id) => { self.http_handler(conn_id, event, &poll, &mut response); } } } } } fn connection_handler(&mut self, server: &TcpListener, poll: &Poll) { let (stream, remote) = server.accept().expect("Failed to accept connection"); println!("Connection from {}", &remote); let token = Token(self.next_connection_id); poll.register(&stream, token, Ready::readable(), PollOpt::edge()).unwrap(); if let Some(_) = self.connections.insert(self.next_connection_id, stream){ panic!("Failed to register connection"); } self.next_connection_id += 1; } fn http_handler(&mut self, conn_id: usize, event: Event, poll: &Poll, response: &mut Vec<u8>) { if let Some(stream) = self.connections.get_mut(&conn_id) {
} } fn make_response(buffer: &[u8], nbytes: &usize) -> Result<Vec<u8>, Error> { let http_pattern = Regex::new(r"(.*) (.*) HTTP/1.([0-1])\r\n.*").unwrap(); let captures = match http_pattern.captures(str::from_utf8(&buffer[..*nbytes]).unwrap()) { Some(cap) => cap, None => { let mut response = Vec::new(); response.append(&mut "HTTP/1.0 400 Bad Request\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); return Ok(response); } }; let method = captures.get(1).unwrap().as_str(); let path = &format!("{}{}{}", env::current_dir().unwrap().display(), WEBROOT, captures.get(2).unwrap().as_str()); let _version = captures.get(3).unwrap().as_str(); if method == "GET" { let file = match fs::File::open(path) { Ok(file) => file, Err(_) => { let mut response = Vec::new(); response.append(&mut "HTTP/1.0 404 Not Found\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n\r\n".to_string().into_bytes()); return Ok(response); } }; let mut reader = BufReader::new(file); let mut buf = Vec::new(); reader.read_to_end(&mut buf)?; let mut response = Vec::new(); response.append(&mut "HTTP/1.0 200 OK\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); response.append(&mut buf); return Ok(response); } let mut response = Vec::new(); response.append(&mut "HTTP/1.0 501 Not Implemented\r\n".to_string().into_bytes()); response.append(&mut "Server: mio webserver\r\n".to_string().into_bytes()); response.append(&mut "\r\n".to_string().into_bytes()); return Ok(response); } } fn main() { let args:Vec<String> = env::args().collect(); if args.len() != 2 { eprintln!("Bad number of argments"); std::process::exit(1); } let mut server = WebServer::new(&args[1]); server.run().expect("Internal Server Error."); }
if event.readiness().is_readable() { println!("conn_id: {}", conn_id); let mut buffer = [0u8; 512]; let nbytes = stream.read(&mut buffer).expect("Failed to read"); if nbytes != 0 { *response = WebServer::make_response(&buffer, &nbytes).unwrap(); poll.reregister(stream, Token(conn_id), Ready::writable(), PollOpt::edge()).unwrap(); } else { self.connections.remove(&conn_id); } } else if event.readiness().is_writable() { stream.write(&response).expect("Failed to write"); stream.flush().unwrap(); self.connections.remove(&conn_id); }
if_condition
[ { "content": "# mio_webserver\n\nNon-Blocking I/O web server with rust/mio.\n\n\n\n# How to use\n\n\n\n```\n\n$ cargo run [addr]:[port]\n\n```\n\n\n\nthen connect via telnet, nc or browser\n\n\n\n![sheep](https://user-images.githubusercontent.com/27873650/54591218-24c08c00-4a6d-11e9-9ead-49494b0adffc.png \"sheep\")\n\n\n\n# Specification\n\n\n\n* Return responses as HTTP 1.0.\n\n* Accept only HTTP 1.0 or 1.1.\n\n* Only accept GET method.\n\n* Disconnect a connection when a request and the corresponding response are exchanged.\n\n* Security is not considered.(directory traversal etc.)\n", "file_path": "README.md", "rank": 2, "score": 11837.950231414276 } ]
Rust
src/lib.rs
tstellanova/st7789
b1fe2c7af1947a044f37665315ddb28d9845b3ec
#![no_std] #![allow(clippy::type_complexity)] pub mod instruction; use crate::instruction::Instruction; use num_derive::ToPrimitive; use num_traits::ToPrimitive; use display_interface_spi::SPIInterface; use display_interface::WriteOnlyDataCommand; use embedded_hal::blocking::delay::DelayUs; use embedded_hal::blocking::spi; use embedded_hal::digital::v2::OutputPin; #[cfg(feature = "graphics")] mod graphics; #[cfg(feature = "batch")] mod batch; pub fn new_display_driver<SPI, CSX, DC, RST>( spi: SPI, csx: CSX, dc: DC, rst: RST, size_x: u16, size_y: u16, ) -> ST7789<SPIInterface<SPI, DC, CSX>, RST> where SPI: spi::Write<u8>, CSX: OutputPin, DC: OutputPin, RST: OutputPin, { let interface = SPIInterface::new(spi, dc, csx); ST7789::new(interface, rst, size_x as u16, size_y as u16) } pub struct ST7789<DI, RST> where DI: WriteOnlyDataCommand<u8>, RST: OutputPin, { di: DI, rst: RST, size_x: u16, size_y: u16, } #[derive(ToPrimitive)] pub enum Orientation { Portrait = 0b0000_0000, Landscape = 0b0110_0000, PortraitSwapped = 0b1100_0000, LandscapeSwapped = 0b1010_0000, } #[derive(Debug)] pub enum Error<PinE> { DisplayError, Pin(PinE), } impl<DI, RST, PinE> ST7789<DI, RST> where DI: WriteOnlyDataCommand<u8>, RST: OutputPin<Error = PinE>, { pub fn new(di: DI, rst: RST, size_x: u16, size_y: u16) -> Self { Self { di, rst, size_x, size_y, } } pub fn init(&mut self, delay_source: &mut impl DelayUs<u32>) -> Result<(), Error<PinE>> { self.hard_reset(delay_source)?; self.write_command(Instruction::SWRESET, None)?; delay_source.delay_us(150_000); self.write_command(Instruction::SLPOUT, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::INVOFF, None)?; self.write_command(Instruction::MADCTL, Some(&[0b0000_0000]))?; self.write_command(Instruction::COLMOD, Some(&[0b0101_0101]))?; self.write_command(Instruction::INVON, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::NORON, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::DISPON, None)?; delay_source.delay_us(10_000); Ok(()) } pub fn hard_reset(&mut self, delay_source: &mut impl DelayUs<u32>) -> Result<(), Error<PinE>> { self.rst.set_high().map_err(Error::Pin)?; delay_source.delay_us(10); self.rst.set_low().map_err(Error::Pin)?; delay_source.delay_us(10); self.rst.set_high().map_err(Error::Pin)?; delay_source.delay_us(10); Ok(()) } pub fn set_orientation(&mut self, orientation: &Orientation) -> Result<(), Error<PinE>> { let orientation = orientation.to_u8().unwrap_or(0); self.write_command(Instruction::MADCTL, Some(&[orientation]))?; Ok(()) } pub fn set_pixel(&mut self, x: u16, y: u16, color: u16) -> Result<(), Error<PinE>> { self.set_address_window(x, y, x, y)?; self.write_command(Instruction::RAMWR, None)?; self.write_word(color) } pub fn set_pixels<T>( &mut self, sx: u16, sy: u16, ex: u16, ey: u16, colors: T, ) -> Result<(), Error<PinE>> where T: IntoIterator<Item = u16>, { self.set_address_window(sx, sy, ex, ey)?; self.write_command(Instruction::RAMWR, None)?; self.write_pixels(colors) } #[cfg(not(feature = "buffer"))] fn write_pixels<T>(&mut self, colors: T) -> Result<(), Error<SPI, PinE>> where T: IntoIterator<Item = u16>, { for color in colors { self.write_word(color)?; } Ok(()) } #[cfg(feature = "buffer")] fn write_pixels<T>(&mut self, colors: T) -> Result<(), Error<PinE>> where T: IntoIterator<Item = u16>, { let mut buf = [0; 128]; let mut i = 0; for color in colors { let word = color.to_be_bytes(); buf[i] = word[0]; buf[i + 1] = word[1]; i += 2; if i == buf.len() { self.write_data(&buf)?; i = 0; } } if i > 0 { self.write_data(&buf[..i])?; } Ok(()) } fn write_command( &mut self, command: Instruction, params: Option<&[u8]>, ) -> Result<(), Error<PinE>> { self.di .send_commands(&[command.to_u8().unwrap()]) .map_err(|_| Error::DisplayError)?; if let Some(params) = params { self.di.send_data(params).map_err(|_| Error::DisplayError)?; } Ok(()) } fn write_data(&mut self, data: &[u8]) -> Result<(), Error<PinE>> { self.di.send_data(data).map_err(|_| Error::DisplayError)?; Ok(()) } fn write_word(&mut self, value: u16) -> Result<(), Error<PinE>> { self.write_data(&value.to_be_bytes()) } fn set_address_window( &mut self, sx: u16, sy: u16, ex: u16, ey: u16, ) -> Result<(), Error<PinE>> { self.write_command(Instruction::CASET, None)?; self.write_word(sx)?; self.write_word(ex)?; self.write_command(Instruction::RASET, None)?; self.write_word(sy)?; self.write_word(ey) } }
#![no_std] #![allow(clippy::type_complexity)] pub mod instruction; use crate::instruction::Instruction; use num_derive::ToPrimitive; use num_traits::ToPrimitive; use display_interface_spi::SPIInterface; use display_interface::WriteOnlyDataCommand; use embedded_hal::blocking::delay::DelayUs; use embedded_hal::blocking::spi; use embedded_hal::digital::v2::OutputPin; #[cfg(feature = "graphics")] mod graphics; #[cfg(feature = "batch")] mod batch; pub fn new_display_driver<SPI, CSX, DC, RST>( spi: SPI, csx: CSX, dc: DC, rst: RST, size_x: u16, size_y: u16, ) -> ST7789<SPIInterface<SPI, DC, CSX>, RST> where SPI: spi::Write<u8>, CSX: OutputPin, DC: OutputPin, RST: OutputPin, { let interface = SPIInterface::new(spi, dc, csx); ST7789::new(interface, rst, size_x as u16, size_y as u16) } pub struct ST7789<DI, RST> where DI: WriteOnlyDataCommand<u8>, RST: OutputPin, { di: DI, rst: RST, size_x: u16, size_y: u16, } #[derive(ToPrimitive)] pub enum Orientation { Portrait = 0b0000_0000, Landscape = 0b0110_0000, PortraitSwapped = 0b1100_0000, LandscapeSwapped = 0b1010_0000, } #[derive(Debug)] pub enum Error<PinE> { DisplayError, Pin(PinE), } impl<DI, RST, PinE> ST7789<DI, RST> where DI: WriteOnlyDataCommand<u8>, RST: OutputPin<Error = PinE>, { pub fn new(di: DI, rst: RST, size_x: u16, size_y: u16) -> Self { Self { di, rst, size_x, size_y, } } pub fn init(&mut self, delay_source: &mut impl DelayUs<u32>) -> Result<(), Error<PinE>> { self.hard_reset(delay_source)?; self.write_command(Instruction::SWRESET, None)?; delay_source.delay_us(150_000); self.write_command(Instruction::SLPOUT, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::INVOFF, None)?; self.write_command(Instruction::MADCTL, Some(&[0b0000_0000]))?; self.write_command(Instruction::COLMOD, Some(&[0b0101_0101]))?; self.write_command(Instruction::INVON, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::NORON, None)?; delay_source.delay_us(10_000); self.write_command(Instruction::DISPON, None)?; delay_source.delay_us(10_000); Ok(()) } pub fn hard_reset(&mut self, delay_source: &mut impl DelayUs<u32>) -> Result<(), Error<PinE>> { self.rst.set_high().map_err(Error::Pin)?; delay_source.delay_us(10); self.rst.set_low().map_err(Error::Pin)?; delay_source.delay_us(10); self.rst.set_high().map_err(Error::Pin)?; delay_source.delay_us(10); Ok(()) } pub fn set_orientation(&mut self, orientation: &Orientation) -> Result<(), Error<PinE>> { let orientation = orientation.to_u8().unwrap_or(0); self.write_command(Instruction::MADCTL, Some(&[orientation]))?; Ok(()) } pub fn set_pixel(&mut self, x: u16, y: u16, color: u16) -> Result<(), Error<PinE>> { self.set_address_window(x, y, x, y)?; self.write_command(Instruction::RAMWR, None)?; self.write_word(color) } pub fn set_pixels<T>( &mut self, sx: u16, sy: u16, ex: u16, ey: u16, colors: T, ) -> Result<(), Error<PinE>> where T: IntoIterator<Item = u16>, { self.set_address_window(sx, sy, ex, ey)?; self.write_command(Instruction::RAMWR, None)?; self.write_pixels(colors) } #[cfg(not(feature = "buffer"))] fn write_pixels<T>(&mut self, colors: T) -> Result<(), Error<SPI, PinE>> where T: IntoIterator<Item = u16>, { for color in colors { self.write_word(color)?; } Ok(()) } #[cfg(feature = "buffer")]
fn write_command( &mut self, command: Instruction, params: Option<&[u8]>, ) -> Result<(), Error<PinE>> { self.di .send_commands(&[command.to_u8().unwrap()]) .map_err(|_| Error::DisplayError)?; if let Some(params) = params { self.di.send_data(params).map_err(|_| Error::DisplayError)?; } Ok(()) } fn write_data(&mut self, data: &[u8]) -> Result<(), Error<PinE>> { self.di.send_data(data).map_err(|_| Error::DisplayError)?; Ok(()) } fn write_word(&mut self, value: u16) -> Result<(), Error<PinE>> { self.write_data(&value.to_be_bytes()) } fn set_address_window( &mut self, sx: u16, sy: u16, ex: u16, ey: u16, ) -> Result<(), Error<PinE>> { self.write_command(Instruction::CASET, None)?; self.write_word(sx)?; self.write_word(ex)?; self.write_command(Instruction::RASET, None)?; self.write_word(sy)?; self.write_word(ey) } }
fn write_pixels<T>(&mut self, colors: T) -> Result<(), Error<PinE>> where T: IntoIterator<Item = u16>, { let mut buf = [0; 128]; let mut i = 0; for color in colors { let word = color.to_be_bytes(); buf[i] = word[0]; buf[i + 1] = word[1]; i += 2; if i == buf.len() { self.write_data(&buf)?; i = 0; } } if i > 0 { self.write_data(&buf[..i])?; } Ok(()) }
function_block-full_function
[ { "content": "pub trait DrawBatch<DI, RST, T, PinE>\n\nwhere\n\n DI: WriteOnlyDataCommand<u8>,\n\n RST: OutputPin<Error = PinE>,\n\n T: IntoIterator<Item = Pixel<Rgb565>>,\n\n{\n\n fn draw_batch(&mut self, item_pixels: T) -> Result<(), Error<PinE>>;\n\n}\n\n\n\nimpl<DI, RST, T, PinE> DrawBatch<DI, RST, T, PinE> for ST7789<DI, RST>\n\nwhere\n\n DI: WriteOnlyDataCommand<u8>,\n\n RST: OutputPin<Error = PinE>,\n\n T: IntoIterator<Item = Pixel<Rgb565>>,\n\n{\n\n fn draw_batch(&mut self, item_pixels: T) -> Result<(), Error<PinE>> {\n\n // Get the pixels for the item to be rendered.\n\n let pixels = item_pixels.into_iter();\n\n // Batch the pixels into Pixel Rows.\n\n let rows = to_rows(pixels);\n", "file_path": "src/batch.rs", "rank": 1, "score": 89222.50077615282 }, { "content": "/// Consecutive color words for a Pixel Row\n\ntype RowColors = heapless::Vec<u16, MaxRowSize>;\n", "file_path": "src/batch.rs", "rank": 2, "score": 44885.12584984294 }, { "content": "/// Consecutive color words for a Pixel Block\n\ntype BlockColors = heapless::Vec<u16, MaxBlockSize>;\n\n\n\n/// Iterator for each Pixel Row in the pixel data. A Pixel Row consists of contiguous pixels on the same row.\n\n#[derive(Debug, Clone)]\n\npub struct RowIterator<P: Iterator<Item = Pixel<Rgb565>>> {\n\n /// Pixels to be batched into rows\n\n pixels: P,\n\n /// Start column number\n\n x_left: u8,\n\n /// End column number\n\n x_right: u8,\n\n /// Row number\n\n y: u8,\n\n /// List of pixel colours for the entire row\n\n colors: RowColors,\n\n /// True if this is the first pixel for the row\n\n first_pixel: bool,\n\n}\n\n\n\n/// Iterator for each Pixel Block in the pixel data. A Pixel Block consists of contiguous Pixel Rows with the same start and end column number.\n", "file_path": "src/batch.rs", "rank": 3, "score": 44885.12584984294 }, { "content": "/// Batch the pixels into Pixel Rows, which are contiguous pixels on the same row.\n\n/// P can be any Pixel Iterator (e.g. a rectangle).\n\nfn to_rows<P>(pixels: P) -> RowIterator<P>\n\nwhere\n\n P: Iterator<Item = Pixel<Rgb565>>,\n\n{\n\n RowIterator::<P> {\n\n pixels,\n\n x_left: 0,\n\n x_right: 0,\n\n y: 0,\n\n colors: RowColors::new(),\n\n first_pixel: true,\n\n }\n\n}\n\n\n", "file_path": "src/batch.rs", "rank": 4, "score": 32372.033812514484 }, { "content": "/// Batch the Pixel Rows into Pixel Blocks, which are contiguous Pixel Rows with the same start and end column number\n\n/// R can be any Pixel Row Iterator.\n\nfn to_blocks<R>(rows: R) -> BlockIterator<R>\n\nwhere\n\n R: Iterator<Item = PixelRow>,\n\n{\n\n BlockIterator::<R> {\n\n rows,\n\n x_left: 0,\n\n x_right: 0,\n\n y_top: 0,\n\n y_bottom: 0,\n\n colors: BlockColors::new(),\n\n first_row: true,\n\n }\n\n}\n\n\n\n/// Implement the Iterator for Pixel Rows.\n\n/// P can be any Pixel Iterator (e.g. a rectangle).\n\nimpl<P: Iterator<Item = Pixel<Rgb565>>> Iterator for RowIterator<P> {\n\n /// This Iterator returns Pixel Rows\n\n type Item = PixelRow;\n", "file_path": "src/batch.rs", "rank": 5, "score": 32371.85462367784 }, { "content": "use num_derive::ToPrimitive;\n\n\n\n/// ST7789 instructions.\n\n#[derive(ToPrimitive)]\n\npub enum Instruction {\n\n NOP = 0x00,\n\n SWRESET = 0x01,\n\n RDDID = 0x04,\n\n RDDST = 0x09,\n\n SLPIN = 0x10,\n\n SLPOUT = 0x11,\n\n PTLON = 0x12,\n\n NORON = 0x13,\n\n INVOFF = 0x20,\n\n INVON = 0x21,\n\n DISPOFF = 0x28,\n\n DISPON = 0x29,\n\n CASET = 0x2A,\n\n RASET = 0x2B,\n\n RAMWR = 0x2C,\n\n RAMRD = 0x2E,\n\n PTLAR = 0x30,\n\n COLMOD = 0x3A,\n\n MADCTL = 0x36,\n\n VCMOFSET = 0xC5,\n\n}\n", "file_path": "src/instruction.rs", "rank": 6, "score": 20096.500545290764 }, { "content": " &mut self,\n\n item: &dyn Dimensions,\n\n colors: &mut dyn Iterator<Item = u16>,\n\n ) -> Result<(), Error<PinE>> {\n\n let sx = item.top_left().x as u16;\n\n let sy = item.top_left().y as u16;\n\n let ex = item.bottom_right().x as u16;\n\n let ey = item.bottom_right().y as u16;\n\n\n\n self.set_pixels(sx, sy, ex, ey, colors)\n\n }\n\n}\n\n\n\nimpl<DI, RST, PinE> DrawTarget<Rgb565> for ST7789<DI, RST>\n\nwhere\n\n DI: WriteOnlyDataCommand<u8>,\n\n RST: OutputPin<Error = PinE>,\n\n{\n\n type Error = Error<PinE>;\n\n\n", "file_path": "src/graphics.rs", "rank": 7, "score": 19543.50008323182 }, { "content": "\n\n fn draw_image<'a, 'b, I>(&mut self, item: &'a Image<'b, I, Rgb565>) -> Result<(), Self::Error>\n\n where\n\n &'b I: IntoPixelIter<Rgb565>,\n\n I: ImageDimensions,\n\n {\n\n // TODO: this is inconsistent in embedded-graphics between Rectangle and Image\n\n // See: https://github.com/jamwaffles/embedded-graphics/issues/182\n\n let sx = item.top_left().x as u16;\n\n let sy = item.top_left().y as u16;\n\n let ex = (item.bottom_right().x - 1) as u16;\n\n let ey = (item.bottom_right().y - 1) as u16;\n\n let colors = item.into_iter().map(|p| RawU16::from(p.1).into_inner());\n\n\n\n self.set_pixels(sx, sy, ex, ey, colors)\n\n }\n\n\n\n fn size(&self) -> Size {\n\n Size::new(self.size_x.into(), self.size_y.into())\n\n }\n\n}\n", "file_path": "src/graphics.rs", "rank": 8, "score": 19540.6548195985 }, { "content": " fn draw_pixel(&mut self, pixel: Pixel<Rgb565>) -> Result<(), Self::Error> {\n\n let color = RawU16::from(pixel.1).into_inner();\n\n let x = pixel.0.x as u16;\n\n let y = pixel.0.y as u16;\n\n\n\n self.set_pixel(x, y, color)\n\n }\n\n\n\n #[cfg(feature = \"batch\")]\n\n fn draw_iter<T>(&mut self, item: T) -> Result<(), Self::Error>\n\n where\n\n T: IntoIterator<Item = Pixel<Rgb565>>,\n\n {\n\n use crate::batch::DrawBatch;\n\n\n\n self.draw_batch(item)\n\n }\n\n\n\n fn draw_rectangle(\n\n &mut self,\n", "file_path": "src/graphics.rs", "rank": 9, "score": 19538.919421115566 }, { "content": "use embedded_graphics::drawable::Pixel;\n\nuse embedded_graphics::geometry::Dimensions;\n\nuse embedded_graphics::image::{Image, ImageDimensions, IntoPixelIter};\n\nuse embedded_graphics::pixelcolor::raw::{RawData, RawU16};\n\nuse embedded_graphics::pixelcolor::Rgb565;\n\nuse embedded_graphics::prelude::{DrawTarget, Size};\n\nuse embedded_graphics::primitives::Rectangle;\n\nuse embedded_graphics::style::{PrimitiveStyle, Styled};\n\n\n\nuse embedded_hal::digital::v2::OutputPin;\n\n\n\nuse crate::{Error, ST7789};\n\nuse display_interface::WriteOnlyDataCommand;\n\n\n\nimpl<DI, RST, PinE> ST7789<DI, RST>\n\nwhere\n\n DI: WriteOnlyDataCommand<u8>,\n\n RST: OutputPin<Error = PinE>,\n\n{\n\n fn fill_rect(\n", "file_path": "src/graphics.rs", "rank": 10, "score": 19537.132939379455 }, { "content": " item: &Styled<Rectangle, PrimitiveStyle<Rgb565>>,\n\n ) -> Result<(), Self::Error> {\n\n // filled rect can be rendered into frame window directly\n\n if item.style.fill_color.is_some() {\n\n let mut colors = item.into_iter().map(|p| RawU16::from(p.1).into_inner());\n\n\n\n self.fill_rect(item, &mut colors)\n\n } else if let Some(_color) = item.style.stroke_color {\n\n if item.style.stroke_width == 0 {\n\n return Ok(()); // nothing to draw\n\n }\n\n // let sw = item.style.stroke_width as u16;\n\n\n\n // TODO: construct rectangle as 4 frames\n\n self.draw_iter(item)\n\n } else {\n\n // if we don't know what this rect is, draw individual pixels\n\n self.draw_iter(item)\n\n }\n\n }\n", "file_path": "src/graphics.rs", "rank": 11, "score": 19534.155043509196 }, { "content": "//! Original code from: https://github.com/lupyuen/piet-embedded/blob/master/piet-embedded-graphics/src/batch.rs\n\n//! Batch the pixels to be rendered into Pixel Rows and Pixel Blocks (contiguous Pixel Rows).\n\n//! This enables the pixels to be rendered efficiently as Pixel Blocks, which may be transmitted in a single Non-Blocking SPI request.\n\nuse crate::{Error, ST7789};\n\nuse display_interface::WriteOnlyDataCommand;\n\nuse embedded_graphics::{\n\n pixelcolor::{raw::RawU16, Rgb565},\n\n prelude::*,\n\n};\n\nuse embedded_hal::digital::v2::OutputPin;\n\n\n", "file_path": "src/batch.rs", "rank": 12, "score": 17062.365903104183 }, { "content": "\n\n /// Return the next Pixel Row of contiguous pixels on the same row\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // Loop over all pixels until we have composed a Pixel Row, or we have run out of pixels.\n\n loop {\n\n // Get the next pixel.\n\n let next_pixel = self.pixels.next();\n\n match next_pixel {\n\n None => {\n\n // If no more pixels...\n\n if self.first_pixel {\n\n return None; // No pixels to group\n\n }\n\n // Else return previous pixels as row.\n\n let row = PixelRow {\n\n x_left: self.x_left,\n\n x_right: self.x_right,\n\n y: self.y,\n\n colors: self.colors.clone(),\n\n };\n", "file_path": "src/batch.rs", "rank": 13, "score": 17060.713728667622 }, { "content": " self.colors.clear();\n\n self.first_pixel = true;\n\n return Some(row);\n\n }\n\n Some(Pixel(coord, color)) => {\n\n // If there is a pixel...\n\n let x = coord.x as u8;\n\n let y = coord.y as u8;\n\n let color = RawU16::from(color).into_inner();\n\n // Save the first pixel as the row start and handle next pixel.\n\n if self.first_pixel {\n\n self.first_pixel = false;\n\n self.x_left = x;\n\n self.x_right = x;\n\n self.y = y;\n\n self.colors.clear();\n\n self.colors.push(color).expect(\"never\");\n\n continue;\n\n }\n\n // If this pixel is adjacent to the previous pixel, add to the row.\n", "file_path": "src/batch.rs", "rank": 14, "score": 17059.66750965326 }, { "content": " // Batch the Pixel Rows into Pixel Blocks.\n\n let blocks = to_blocks(rows);\n\n // For each Pixel Block...\n\n for PixelBlock {\n\n x_left,\n\n x_right,\n\n y_top,\n\n y_bottom,\n\n colors,\n\n ..\n\n } in blocks\n\n {\n\n // Render the Pixel Block.\n\n self.set_pixels(\n\n x_left as u16,\n\n y_top as u16,\n\n x_right as u16,\n\n y_bottom as u16,\n\n colors,\n\n )?;\n", "file_path": "src/batch.rs", "rank": 15, "score": 17059.613076640402 }, { "content": "#[derive(Debug, Clone)]\n\npub struct BlockIterator<R: Iterator<Item = PixelRow>> {\n\n /// Pixel Rows to be batched into blocks\n\n rows: R,\n\n /// Start column number\n\n x_left: u8,\n\n /// End column number\n\n x_right: u8,\n\n /// Start row number\n\n y_top: u8,\n\n /// End row number\n\n y_bottom: u8,\n\n /// List of pixel colours for the entire block, row by row\n\n colors: BlockColors,\n\n /// True if this is the first row for the block\n\n first_row: bool,\n\n}\n\n\n\n/// A row of contiguous pixels\n\npub struct PixelRow {\n", "file_path": "src/batch.rs", "rank": 16, "score": 17059.586870745243 }, { "content": " }) => {\n\n // If there is a Pixel Row...\n\n // Save the first row as the block start and handle next block.\n\n if self.first_row {\n\n self.first_row = false;\n\n self.x_left = x_left;\n\n self.x_right = x_right;\n\n self.y_top = y;\n\n self.y_bottom = y;\n\n self.colors.clear();\n\n self.colors.extend_from_slice(&colors).expect(\"never\");\n\n continue;\n\n }\n\n // If this row is adjacent to the previous row and same size, add to the block.\n\n if y == self.y_bottom + 1 && x_left == self.x_left && x_right == self.x_right {\n\n // Don't add row if too many pixels in the block.\n\n if self.colors.extend_from_slice(&colors).is_ok() {\n\n self.y_bottom = y;\n\n continue;\n\n }\n", "file_path": "src/batch.rs", "rank": 17, "score": 17059.100547172664 }, { "content": " return None; // No rows to group\n\n }\n\n // Else return previous rows as block.\n\n let row = PixelBlock {\n\n x_left: self.x_left,\n\n x_right: self.x_right,\n\n y_top: self.y_top,\n\n y_bottom: self.y_bottom,\n\n colors: self.colors.clone(),\n\n };\n\n self.colors.clear();\n\n self.first_row = true;\n\n return Some(row);\n\n }\n\n Some(PixelRow {\n\n x_left,\n\n x_right,\n\n y,\n\n colors,\n\n ..\n", "file_path": "src/batch.rs", "rank": 18, "score": 17059.075193540077 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Implement the Iterator for Pixel Blocks.\n\n/// R can be any Pixel Row Iterator.\n\nimpl<R: Iterator<Item = PixelRow>> Iterator for BlockIterator<R> {\n\n /// This Iterator returns Pixel Blocks\n\n type Item = PixelBlock;\n\n\n\n /// Return the next Pixel Block of contiguous Pixel Rows with the same start and end column number\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // Loop over all Pixel Rows until we have composed a Pixel Block, or we have run out of Pixel Rows.\n\n loop {\n\n // Get the next Pixel Row.\n\n let next_row = self.rows.next();\n\n match next_row {\n\n None => {\n\n // If no more Pixel Rows...\n\n if self.first_row {\n", "file_path": "src/batch.rs", "rank": 19, "score": 17058.927622700907 }, { "content": " /// Start column number\n\n pub x_left: u8,\n\n /// End column number\n\n pub x_right: u8,\n\n /// Row number\n\n pub y: u8,\n\n /// List of pixel colours for the entire row\n\n pub colors: RowColors,\n\n}\n\n\n\n/// A block of contiguous pixel rows with the same start and end column number\n\npub struct PixelBlock {\n\n /// Start column number\n\n pub x_left: u8,\n\n /// End column number\n\n pub x_right: u8,\n\n /// Start row number\n\n pub y_top: u8,\n\n /// End row number\n\n pub y_bottom: u8,\n\n /// List of pixel colours for the entire block, row by row\n\n pub colors: BlockColors,\n\n}\n\n\n", "file_path": "src/batch.rs", "rank": 20, "score": 17058.39937570185 }, { "content": " if x == self.x_right.wrapping_add(1) && y == self.y && self.colors.push(color).is_ok() {\n\n // Don't add pixel if too many pixels in the row.\n\n self.x_right = x;\n\n continue;\n\n }\n\n // Else return previous pixels as row.\n\n let row = PixelRow {\n\n x_left: self.x_left,\n\n x_right: self.x_right,\n\n y: self.y,\n\n colors: self.colors.clone(),\n\n };\n\n self.x_left = x;\n\n self.x_right = x;\n\n self.y = y;\n\n self.colors.clear();\n\n self.colors.push(color).expect(\"never\");\n\n return Some(row);\n\n }\n\n }\n", "file_path": "src/batch.rs", "rank": 21, "score": 17057.670943614907 }, { "content": " }\n\n // Else return previous rows as block.\n\n let row = PixelBlock {\n\n x_left: self.x_left,\n\n x_right: self.x_right,\n\n y_top: self.y_top,\n\n y_bottom: self.y_bottom,\n\n colors: self.colors.clone(),\n\n };\n\n self.x_left = x_left;\n\n self.x_right = x_right;\n\n self.y_top = y;\n\n self.y_bottom = y;\n\n self.colors.clear();\n\n self.colors.extend_from_slice(&colors).expect(\"never\");\n\n return Some(row);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/batch.rs", "rank": 22, "score": 17056.17934524558 }, { "content": "\n\n // Dump out the Pixel Blocks for the square in test_display()\n\n /* if x_left >= 60 && x_left <= 150 && x_right >= 60 && x_right <= 150 && y_top >= 60 && y_top <= 150 && y_bottom >= 60 && y_bottom <= 150 {\n\n console::print(\"pixel block (\"); console::printint(x_left as i32); console::print(\", \"); console::printint(y_top as i32); ////\n\n console::print(\"), (\"); console::printint(x_right as i32); console::print(\", \"); console::printint(y_bottom as i32); console::print(\")\\n\"); ////\n\n } */\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/batch.rs", "rank": 23, "score": 17055.062204869566 }, { "content": "/// Max number of pixels per Pixel Block\n\ntype MaxBlockSize = heapless::consts::U100;\n\n\n", "file_path": "src/batch.rs", "rank": 24, "score": 11729.138194894713 }, { "content": "/// Max number of pixels per Pixel Row\n\ntype MaxRowSize = heapless::consts::U50;\n", "file_path": "src/batch.rs", "rank": 25, "score": 11729.138194894713 }, { "content": "# st7789\n\n\n\nThis is a Rust driver library for ST7789 displays using embedded_graphics, embedded_hal, and no_std, no_alloc support. \n\n- [Driver documentation](https://docs.rs/st7789). \n\n- [Examples](https://github.com/almindor/st7789-examples)\n\n- [Display datasheet](https://www.rhydolabz.com/documents/33/ST7789.pdf)\n\n\n\n[![ferris-demo](http://objdump.katona.me/ferris_fast.png)](http://objdump.katona.me/ferris_fast.mp4)\n\n\n\n## Features\n\n\n\nThese features are enabled by default:\n\n\n\n* `graphics` - embedded-graphics support: pulls in [embedded-graphics](https://crates.io/crates/embedded-graphics) dependency\n\n* `batch` - batch-drawing optimization: pulls in [heapless](https://crates.io/crates/heapless) dependency and allocates 300 bytes for frame buffer in the driver\n\n* `buffer` - use a 128 byte buffer for SPI data transfers\n\n\n\n## Status\n\n\n\n- [x] Communications via SPI\n\n- [x] Tested with PineTime watch\n\n- [ ] Offscreen Buffering\n\n- [ ] Hardware scrolling support\n\n\n\n## Changelog\n\n\n\n* `v0.2.2` - add buffering for SPI transfers\n\n* `v0.2.1` - use static dispatch for `set_pixels`\n\n* `v0.2.0` - batch support\n\n* `v0.1.0` - initial release\n\n\n", "file_path": "README.md", "rank": 38, "score": 9.13671503729305 } ]
Rust
src/day4.rs
xosdy/aoc2018
c4824b9fb7cc437cf29dbc83cdb10cef3e5ccdd7
use chrono::{NaiveDateTime, Timelike}; use std::collections::HashMap; use std::ops::Range; pub struct Record { id: u32, time_intervals: Vec<Range<u32>>, } #[aoc_generator(day4)] pub fn input_generator(input: &str) -> Vec<Record> { let mut sorted_raw_records = input .lines() .map(|line| { let mut parts = line.split(']'); let date_time = NaiveDateTime::parse_from_str(&parts.next().unwrap()[1..], "%F %R").unwrap(); (date_time, parts.next().unwrap().trim()) }) .collect::<Vec<(NaiveDateTime, &str)>>(); sorted_raw_records.sort_by(|a, b| a.0.cmp(&b.0)); let mut records = Vec::new(); sorted_raw_records .iter() .for_each(|(date_time, raw_record)| { if raw_record.ends_with("begins shift") { let id = raw_record.split(' ').nth(1).unwrap()[1..].parse().unwrap(); records.push(Record { id, time_intervals: Vec::new(), }); } else if *raw_record == "falls asleep" { records .last_mut() .unwrap() .time_intervals .push(date_time.minute()..60); } else if *raw_record == "wakes up" { records .last_mut() .unwrap() .time_intervals .last_mut() .unwrap() .end = date_time.minute(); } }); records } pub fn get_sleep_times_by_guard(records: &[Record]) -> HashMap<u32, [u32; 60]> { let mut sleep_times_by_guard = HashMap::<u32, [u32; 60]>::new(); for record in records { sleep_times_by_guard .entry(record.id) .and_modify(|sleep_times| { for interval in &record.time_intervals { for i in interval.clone() { sleep_times[i as usize] += 1; } } }) .or_insert_with(|| { let mut sleep_times = [0; 60]; for interval in &record.time_intervals { for i in interval.clone() { sleep_times[i as usize] = 1; } } sleep_times }); } sleep_times_by_guard } #[aoc(day4, part1)] pub fn solve_part1(records: &[Record]) -> u32 { let sleep_times_by_guard = get_sleep_times_by_guard(records); let guard_with_max_minutes = sleep_times_by_guard .iter() .max_by_key::<u32, _>(|(_, times)| times.iter().sum()) .unwrap() .0; let sleep_in_most_minute = sleep_times_by_guard[guard_with_max_minutes] .iter() .enumerate() .max_by_key(|x| x.1) .unwrap() .0 as u32; guard_with_max_minutes * sleep_in_most_minute } #[aoc(day4, part2)] pub fn solve_part2(records: &[Record]) -> u32 { let sleep_times_by_guard = get_sleep_times_by_guard(records); let (guard, times, _) = sleep_times_by_guard .iter() .map(|(guard, times)| { let minute_and_times = times.iter().enumerate().max_by_key(|x| x.1).unwrap(); (guard, minute_and_times.0 as u32, minute_and_times.1) }) .max_by_key(|x| x.2) .unwrap(); guard * times } #[cfg(test)] mod tests { use super::*; #[test] fn part1() { assert_eq!( solve_part1(&input_generator( r"[1518-11-01 00:00] Guard #10 begins shift [1518-11-01 00:05] falls asleep [1518-11-01 00:25] wakes up [1518-11-01 00:30] falls asleep [1518-11-01 00:55] wakes up [1518-11-01 23:58] Guard #99 begins shift [1518-11-02 00:40] falls asleep [1518-11-02 00:50] wakes up [1518-11-03 00:05] Guard #10 begins shift [1518-11-03 00:24] falls asleep [1518-11-03 00:29] wakes up [1518-11-04 00:02] Guard #99 begins shift [1518-11-04 00:36] falls asleep [1518-11-04 00:46] wakes up [1518-11-05 00:03] Guard #99 begins shift [1518-11-05 00:45] falls asleep [1518-11-05 00:55] wakes up" )), 240 ); } #[test] fn part2() { assert_eq!( solve_part2(&input_generator( r"[1518-11-01 00:00] Guard #10 begins shift [1518-11-01 00:05] falls asleep [1518-11-01 00:25] wakes up [1518-11-01 00:30] falls asleep [1518-11-01 00:55] wakes up [1518-11-01 23:58] Guard #99 begins shift [1518-11-02 00:40] falls asleep [1518-11-02 00:50] wakes up [1518-11-03 00:05] Guard #10 begins shift [1518-11-03 00:24] falls asleep [1518-11-03 00:29] wakes up [1518-11-04 00:02] Guard #99 begins shift [1518-11-04 00:36] falls asleep [1518-11-04 00:46] wakes up [1518-11-05 00:03] Guard #99 begins shift [1518-11-05 00:45] falls asleep [1518-11-05 00:55] wakes up" )), 4455 ); } }
use chrono::{NaiveDateTime, Timelike}; use std::collections::HashMap; use std::ops::Range; pub struct Record { id: u32, time_intervals: Vec<Range<u32>>, } #[aoc_generator(day4)]
pub fn get_sleep_times_by_guard(records: &[Record]) -> HashMap<u32, [u32; 60]> { let mut sleep_times_by_guard = HashMap::<u32, [u32; 60]>::new(); for record in records { sleep_times_by_guard .entry(record.id) .and_modify(|sleep_times| { for interval in &record.time_intervals { for i in interval.clone() { sleep_times[i as usize] += 1; } } }) .or_insert_with(|| { let mut sleep_times = [0; 60]; for interval in &record.time_intervals { for i in interval.clone() { sleep_times[i as usize] = 1; } } sleep_times }); } sleep_times_by_guard } #[aoc(day4, part1)] pub fn solve_part1(records: &[Record]) -> u32 { let sleep_times_by_guard = get_sleep_times_by_guard(records); let guard_with_max_minutes = sleep_times_by_guard .iter() .max_by_key::<u32, _>(|(_, times)| times.iter().sum()) .unwrap() .0; let sleep_in_most_minute = sleep_times_by_guard[guard_with_max_minutes] .iter() .enumerate() .max_by_key(|x| x.1) .unwrap() .0 as u32; guard_with_max_minutes * sleep_in_most_minute } #[aoc(day4, part2)] pub fn solve_part2(records: &[Record]) -> u32 { let sleep_times_by_guard = get_sleep_times_by_guard(records); let (guard, times, _) = sleep_times_by_guard .iter() .map(|(guard, times)| { let minute_and_times = times.iter().enumerate().max_by_key(|x| x.1).unwrap(); (guard, minute_and_times.0 as u32, minute_and_times.1) }) .max_by_key(|x| x.2) .unwrap(); guard * times } #[cfg(test)] mod tests { use super::*; #[test] fn part1() { assert_eq!( solve_part1(&input_generator( r"[1518-11-01 00:00] Guard #10 begins shift [1518-11-01 00:05] falls asleep [1518-11-01 00:25] wakes up [1518-11-01 00:30] falls asleep [1518-11-01 00:55] wakes up [1518-11-01 23:58] Guard #99 begins shift [1518-11-02 00:40] falls asleep [1518-11-02 00:50] wakes up [1518-11-03 00:05] Guard #10 begins shift [1518-11-03 00:24] falls asleep [1518-11-03 00:29] wakes up [1518-11-04 00:02] Guard #99 begins shift [1518-11-04 00:36] falls asleep [1518-11-04 00:46] wakes up [1518-11-05 00:03] Guard #99 begins shift [1518-11-05 00:45] falls asleep [1518-11-05 00:55] wakes up" )), 240 ); } #[test] fn part2() { assert_eq!( solve_part2(&input_generator( r"[1518-11-01 00:00] Guard #10 begins shift [1518-11-01 00:05] falls asleep [1518-11-01 00:25] wakes up [1518-11-01 00:30] falls asleep [1518-11-01 00:55] wakes up [1518-11-01 23:58] Guard #99 begins shift [1518-11-02 00:40] falls asleep [1518-11-02 00:50] wakes up [1518-11-03 00:05] Guard #10 begins shift [1518-11-03 00:24] falls asleep [1518-11-03 00:29] wakes up [1518-11-04 00:02] Guard #99 begins shift [1518-11-04 00:36] falls asleep [1518-11-04 00:46] wakes up [1518-11-05 00:03] Guard #99 begins shift [1518-11-05 00:45] falls asleep [1518-11-05 00:55] wakes up" )), 4455 ); } }
pub fn input_generator(input: &str) -> Vec<Record> { let mut sorted_raw_records = input .lines() .map(|line| { let mut parts = line.split(']'); let date_time = NaiveDateTime::parse_from_str(&parts.next().unwrap()[1..], "%F %R").unwrap(); (date_time, parts.next().unwrap().trim()) }) .collect::<Vec<(NaiveDateTime, &str)>>(); sorted_raw_records.sort_by(|a, b| a.0.cmp(&b.0)); let mut records = Vec::new(); sorted_raw_records .iter() .for_each(|(date_time, raw_record)| { if raw_record.ends_with("begins shift") { let id = raw_record.split(' ').nth(1).unwrap()[1..].parse().unwrap(); records.push(Record { id, time_intervals: Vec::new(), }); } else if *raw_record == "falls asleep" { records .last_mut() .unwrap() .time_intervals .push(date_time.minute()..60); } else if *raw_record == "wakes up" { records .last_mut() .unwrap() .time_intervals .last_mut() .unwrap() .end = date_time.minute(); } }); records }
function_block-full_function
[ { "content": "pub fn find_largest_power(serial: u32) -> (Vector2<u32>, i32) {\n\n let mut powers = HashMap::new();\n\n for y in 1..=298 {\n\n for x in 1..=298 {\n\n let position = Vector2::new(x, y);\n\n let power = get_square_power(serial, position, Vector2::new(3, 3));\n\n powers.insert(position, power);\n\n }\n\n }\n\n\n\n powers.into_iter().max_by_key(|&(_, p)| p).unwrap()\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 3, "score": 88987.35157014104 }, { "content": "pub fn get_square_power(serial: u32, position: Vector2<u32>, size: Vector2<u32>) -> i32 {\n\n let mut total_power = 0;\n\n for y in position.y..position.y + size.y {\n\n for x in position.x..position.x + size.x {\n\n total_power += get_cell_power(serial, Vector2::new(x, y));\n\n }\n\n }\n\n\n\n total_power\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 4, "score": 88098.30725402923 }, { "content": "pub fn get_cell_power(serial: u32, position: Vector2<u32>) -> i32 {\n\n let rack_id = position.x + 10;\n\n let power_level = (((rack_id * position.y + serial) * rack_id) / 100 % 10) as i32;\n\n power_level - 5\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 5, "score": 86164.12473061802 }, { "content": "/// Use [Summed-area table](https://en.wikipedia.org/wiki/Summed-area_table) to calculate largest square\n\npub fn find_largest_power_any_square(serial: u32) -> (Vector2<u32>, usize, i32) {\n\n let mut sum = [[0; 301]; 301];\n\n for y in 1..=300 {\n\n for x in 1..=300 {\n\n let position = Vector2::new(x as u32, y as u32);\n\n let power = get_cell_power(serial, position);\n\n\n\n sum[y][x] = power + sum[y - 1][x] + sum[y][x - 1] - sum[y - 1][x - 1];\n\n }\n\n }\n\n\n\n let mut best = (Vector2::zeros(), 0, i32::min_value());\n\n for size in 1..=300 {\n\n for y in size..=300 {\n\n for x in size..=300 {\n\n let power =\n\n sum[y - size][x - size] + sum[y][x] - sum[y][x - size] - sum[y - size][x];\n\n\n\n if power > best.2 {\n\n best = (\n", "file_path": "src/day11.rs", "rank": 6, "score": 84749.8744287442 }, { "content": "#[aoc(day3, part2)]\n\npub fn solve_part2(claims: &[Claim]) -> u32 {\n\n let grid = gen_grid(claims);\n\n\n\n let mut ids: HashSet<u32> = claims.iter().map(|claim| claim.id).collect();\n\n\n\n grid.iter()\n\n .filter_map(|(_, x)| match x {\n\n Status::Valid(_) => None,\n\n Status::Overlap(v) => Some(v),\n\n })\n\n .for_each(|v| {\n\n v.iter().for_each(|id| {\n\n ids.remove(id);\n\n })\n\n });\n\n\n\n *ids.iter().next().unwrap()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/day3.rs", "rank": 7, "score": 80943.00427213937 }, { "content": "#[aoc(day2, part1)]\n\npub fn solve_part1(input: &str) -> u32 {\n\n let (mut twos, mut threes) = (0, 0);\n\n\n\n input.lines().for_each(|line| {\n\n let (mut found_two, mut found_three) = (false, false);\n\n\n\n for c in line.bytes() {\n\n if found_two && found_three {\n\n break;\n\n }\n\n\n\n let count = bytecount::count(line.as_bytes(), c);\n\n if count == 2 && !found_two {\n\n found_two = true;\n\n twos += 1;\n\n }\n\n\n\n if count == 3 && !found_three {\n\n found_three = true;\n\n threes += 1;\n\n }\n\n }\n\n });\n\n\n\n twos * threes\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 8, "score": 80943.00427213937 }, { "content": "#[aoc(day9, part2)]\n\npub fn solve_part2(game: &Game) -> u32 {\n\n let mut game = game.clone();\n\n game.last_marble *= 100;\n\n game.run()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1() {\n\n let cases = [\n\n (\"10 players; last marble is worth 1618 points\", 8317),\n\n (\"13 players; last marble is worth 7999 points\", 146373),\n\n (\"17 players; last marble is worth 1104 points\", 2764),\n\n (\"21 players; last marble is worth 6111 points\", 54718),\n\n (\"30 players; last marble is worth 5807 points\", 37305),\n\n ];\n\n\n\n for case in &cases {\n\n assert_eq!(solve_part1(&input_generator(case.0)), case.1);\n\n }\n\n }\n\n}\n", "file_path": "src/day9.rs", "rank": 9, "score": 80943.00427213937 }, { "content": "#[aoc(day11, part1)]\n\n#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn solve_part1(serial: &u32) -> String {\n\n let pos = find_largest_power(*serial).0;\n\n format!(\"{},{}\", pos.x, pos.y)\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 10, "score": 80943.00427213937 }, { "content": "#[aoc(day11, part2)]\n\n#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn solve_part2(serial: &u32) -> String {\n\n let (pos, size, _) = find_largest_power_any_square(*serial);\n\n format!(\"{},{},{}\", pos.x, pos.y, size)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_get_cell_power() {\n\n assert_eq!(get_cell_power(57, Vector2::new(122, 79)), -5);\n\n assert_eq!(get_cell_power(39, Vector2::new(217, 196)), 0);\n\n assert_eq!(get_cell_power(71, Vector2::new(101, 153)), 4);\n\n }\n\n\n\n #[test]\n\n fn test_find_largest_power() {\n\n assert_eq!(find_largest_power(18), (Vector2::new(33, 45), 29));\n\n assert_eq!(find_largest_power(42), (Vector2::new(21, 61), 30));\n", "file_path": "src/day11.rs", "rank": 11, "score": 80943.00427213937 }, { "content": "#[aoc(day9, part1)]\n\npub fn solve_part1(game: &Game) -> u32 {\n\n game.run()\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 12, "score": 80943.00427213937 }, { "content": "#[aoc_generator(day11)]\n\npub fn input_generator(input: &str) -> Box<u32> {\n\n Box::new(input.trim().parse().unwrap())\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 14, "score": 77396.80679984431 }, { "content": "#[aoc(day7, part2)]\n\npub fn solve_part2(step_tree: &StepTree) -> u32 {\n\n let mut workers = vec![Worker::Idle; 5];\n\n let mut finished_steps = HashSet::<char>::new();\n\n let mut working_steps = HashSet::<char>::new();\n\n let mut total_seconds = 0;\n\n\n\n while finished_steps.len() != step_tree.len() {\n\n // Load step to idle workers\n\n workers\n\n .iter_mut()\n\n .filter(|w| match w {\n\n Worker::Idle => true,\n\n Worker::Busy(_) => false,\n\n })\n\n .for_each(|w| {\n\n // Find next step\n\n for (name, dependencies) in step_tree.iter() {\n\n if !finished_steps.contains(name)\n\n && !working_steps.contains(name)\n\n && (dependencies.is_empty()\n", "file_path": "src/day7.rs", "rank": 15, "score": 77355.27869873337 }, { "content": "#[allow(clippy::or_fun_call)]\n\nfn gen_grid(claims: &[Claim]) -> HashMap<Vector2<u32>, Status> {\n\n let mut grid = HashMap::<Vector2<u32>, Status>::new();\n\n for claim in claims.iter() {\n\n for x in claim.position.x..claim.position.x + claim.size.x {\n\n for y in claim.position.y..claim.position.y + claim.size.y {\n\n grid.entry(Vector2::new(x, y))\n\n .and_modify(|e| match e {\n\n Status::Valid(id) => *e = Status::Overlap(vec![*id, claim.id]),\n\n Status::Overlap(ids) => ids.push(claim.id),\n\n })\n\n .or_insert(Status::Valid(claim.id));\n\n }\n\n }\n\n }\n\n\n\n grid\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 16, "score": 42111.41866404144 }, { "content": "#[aoc(day16, part1)]\n\npub fn part1(input: &Input) -> usize {\n\n let samples = &input.samples;\n\n samples\n\n .iter()\n\n .filter(|s| {\n\n Opcode::iter()\n\n .filter(|op| {\n\n let instruction = Instruction::new(op.clone(), &s.unknown_instruction.0[1..4]);\n\n let mut registers = s.before.clone();\n\n instruction.execute(&mut registers);\n\n\n\n registers == s.after\n\n })\n\n .count()\n\n >= 3\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 17, "score": 35009.34686886956 }, { "content": "#[aoc(day16, part2)]\n\npub fn part2(input: &Input) -> usize {\n\n let opcode_map = guess_opcode(&input.samples);\n\n let mut registers: Registers = Default::default();\n\n input\n\n .raw_program\n\n .iter()\n\n .map(|i| Instruction::new(opcode_map[&i.0[0]].clone(), &i.0[1..4]))\n\n .for_each(|i| i.execute(&mut registers));\n\n\n\n registers.0[0]\n\n}\n", "file_path": "src/day16.rs", "rank": 18, "score": 35009.34686886956 }, { "content": "pub fn collapse(input: &str) -> String {\n\n let mut polymer = String::new();\n\n for c in input.trim().chars() {\n\n match polymer.chars().last() {\n\n None => polymer.push(c),\n\n Some(last_c) => {\n\n if c != last_c && c.to_ascii_lowercase() == last_c.to_ascii_lowercase() {\n\n polymer.pop();\n\n } else {\n\n polymer.push(c);\n\n }\n\n }\n\n }\n\n }\n\n\n\n polymer\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 19, "score": 35009.34686886956 }, { "content": "#[aoc(day5, part2)]\n\npub fn solve_part2(input: &str) -> usize {\n\n (b'a'..=b'z')\n\n .map(|c| {\n\n let c = c as char;\n\n let polymer = input\n\n .to_owned()\n\n .replace(c, \"\")\n\n .replace(c.to_ascii_uppercase(), \"\");\n\n collapse(&polymer).len()\n\n })\n\n .min()\n\n .unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1() {\n\n assert_eq!(solve_part1(\"dabAcCaCBAcCcaDA\"), 10);\n\n }\n\n\n\n #[test]\n\n fn part2() {\n\n assert_eq!(solve_part2(\"dabAcCaCBAcCcaDA\"), 4);\n\n }\n\n}\n", "file_path": "src/day5.rs", "rank": 20, "score": 34029.466784148375 }, { "content": "#[aoc(day24, part1)]\n\npub fn solve_part1(groups: &[Group]) -> usize {\n\n battle(groups.to_owned()).1\n\n}\n\n\n", "file_path": "src/day24.rs", "rank": 21, "score": 34029.466784148375 }, { "content": "#[aoc(day3, part1)]\n\npub fn solve_part1(claims: &[Claim]) -> usize {\n\n let grid = gen_grid(claims);\n\n\n\n grid.iter()\n\n .filter(|(_, x)| match x {\n\n Status::Valid(_) => false,\n\n Status::Overlap(_) => true,\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 22, "score": 34029.466784148375 }, { "content": "#[aoc(day22, part1)]\n\npub fn solve_part1(cave: &Cave) -> usize {\n\n let mut cave = cave.to_owned();\n\n cave.risk_level(cave.target)\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 23, "score": 34029.466784148375 }, { "content": "#[aoc(day2, part2)]\n\npub fn solve_part2(input: &str) -> String {\n\n for (line_index, line1) in input.lines().enumerate() {\n\n for line2 in input.lines().skip(line_index + 1) {\n\n let mut diff_count = 0;\n\n let mut last_diff_index = 0;\n\n for (i, (c1, c2)) in line1.chars().zip(line2.chars()).enumerate() {\n\n if c1 != c2 {\n\n diff_count += 1;\n\n last_diff_index = i;\n\n }\n\n }\n\n\n\n if diff_count == 1 {\n\n let mut id = line1.to_owned();\n\n id.remove(last_diff_index);\n\n return id;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/day2.rs", "rank": 24, "score": 34029.466784148375 }, { "content": "#[aoc(day22, part2)]\n\npub fn solve_part2(cave: &Cave) -> usize {\n\n let mut cave = cave.to_owned();\n\n cave.spent_minutes()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1() {\n\n let mut cave = Cave::new(510, Vector2::new(10, 10));\n\n assert_eq!(cave.risk_level(cave.target), 114);\n\n }\n\n\n\n #[test]\n\n fn part2() {\n\n let mut cave = Cave::new(510, Vector2::new(10, 10));\n\n assert_eq!(cave.spent_minutes(), 45);\n\n }\n\n}\n", "file_path": "src/day22.rs", "rank": 25, "score": 34029.466784148375 }, { "content": "#[aoc(day5, part1)]\n\npub fn solve_part1(input: &str) -> usize {\n\n collapse(input).len()\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 26, "score": 34029.466784148375 }, { "content": "#[aoc(day17, part1)]\n\npub fn solve_part1(grid: &Grid) -> usize {\n\n let mut grid = grid.to_owned();\n\n fill(&mut grid, &Vec2::new(500, 0), WaterDirection::Both);\n\n grid.water_count()\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 27, "score": 34029.466784148375 }, { "content": "#[aoc(day12, part1)]\n\npub fn solve_part1(rule: &Rule) -> i64 {\n\n let mut rule = rule.clone();\n\n for _ in 0..20 {\n\n rule.next_generation();\n\n }\n\n\n\n rule.sum()\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 28, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day16)]\n\npub fn input_generator(input: &str) -> Input {\n\n let mut samples = vec![];\n\n let mut sample: Sample = Default::default();\n\n let mut sample_end = 0;\n\n for (i, line) in input.lines().enumerate() {\n\n if i % 4 == 0 {\n\n if let Some(begin) = line.find('[') {\n\n sample.before = line[begin + 1..line.len() - 1].parse().unwrap();\n\n } else {\n\n sample_end = i;\n\n break;\n\n }\n\n } else if i % 4 == 1 {\n\n sample.unknown_instruction = line.parse().unwrap();\n\n } else if i % 4 == 2 {\n\n if let Some(begin) = line.find('[') {\n\n sample.after = line[begin + 1..line.len() - 1].parse().unwrap();\n\n samples.push(sample.clone());\n\n } else {\n\n sample_end = i;\n", "file_path": "src/day16.rs", "rank": 29, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day13)]\n\npub fn input_generator(input: &str) -> System {\n\n input.parse().unwrap()\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 30, "score": 34029.466784148375 }, { "content": "#[aoc(day1, part2)]\n\npub fn solve_part2(input: &[i32]) -> i32 {\n\n let mut records = HashSet::new();\n\n records.insert(0);\n\n let mut current_frequency = 0;\n\n\n\n for change in input.iter().cycle() {\n\n current_frequency += change;\n\n\n\n if !records.insert(current_frequency) {\n\n break;\n\n }\n\n }\n\n\n\n current_frequency\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/day1.rs", "rank": 31, "score": 34029.466784148375 }, { "content": "#[aoc(day17, part2)]\n\npub fn solve_part2(grid: &Grid) -> usize {\n\n let mut grid = grid.to_owned();\n\n fill(&mut grid, &Vec2::new(500, 0), WaterDirection::Both);\n\n grid.rest_water_count()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_INPUT: &str = r\"x=495, y=2..7\n\ny=7, x=495..501\n\nx=501, y=3..7\n\nx=498, y=2..4\n\nx=506, y=1..2\n\nx=498, y=10..13\n\nx=504, y=10..13\n\ny=13, x=498..504\";\n\n\n\n #[test]\n", "file_path": "src/day17.rs", "rank": 32, "score": 34029.466784148375 }, { "content": "#[aoc(day10, part1)]\n\npub fn solve_part1(points: &[Point]) -> String {\n\n let mut grid = Grid::new(points);\n\n loop {\n\n let height = grid.size().1.y;\n\n if height == 10 {\n\n break;\n\n }\n\n\n\n grid.update();\n\n }\n\n\n\n format!(\"\\n{}\", grid)\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 33, "score": 34029.466784148375 }, { "content": "#[aoc(day10, part2)]\n\npub fn solve_part2(points: &[Point]) -> usize {\n\n let mut grid = Grid::new(points);\n\n for i in 0.. {\n\n let height = grid.size().1.y;\n\n if height == 10 {\n\n return i;\n\n }\n\n\n\n grid.update();\n\n }\n\n\n\n unreachable!();\n\n}\n", "file_path": "src/day10.rs", "rank": 34, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day19)]\n\npub fn input_generator(input: &str) -> Vm {\n\n input.parse().unwrap()\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 35, "score": 34029.466784148375 }, { "content": "#[aoc(day21, part2)]\n\npub fn solve_part2(input: &str) -> usize {\n\n let mut vm: Vm = input.parse().unwrap();\n\n let mut numbers = HashSet::new();\n\n let mut last = 0;\n\n\n\n loop {\n\n vm.step();\n\n\n\n if *vm.ip() == 28 {\n\n if numbers.insert(vm.registers.0[vm.program[28].input1]) {\n\n last = vm.registers.0[vm.program[28].input1];\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n last\n\n}\n", "file_path": "src/day21.rs", "rank": 36, "score": 34029.466784148375 }, { "content": "#[aoc(day20, part2)]\n\npub fn solve_part2(grid: &Grid) -> usize {\n\n grid.pass_doors_more_than(1000)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_INPUTS: &[&(&str, usize)] = &[\n\n &(\"^WNE$\", 3),\n\n &(\"^ENWWW(NEEE|SSE(EE|N))$\", 10),\n\n &(\"^ENNWSWW(NEWS|)SSSEEN(WNSE|)EE(SWEN|)NNN$\", 18),\n\n &(\"^ESSWWN(E|NNENN(EESS(WNSE|)SSS|WWWSSSSE(SW|NNNE)))$\", 23),\n\n &(\n\n \"^WSSEESWWWNW(S|NENNEEEENN(ESSSSW(NWSW|SSEN)|WSWWN(E|WWS(E|SS))))$\",\n\n 31,\n\n ),\n\n ];\n\n\n\n #[test]\n\n fn part1() {\n\n for &&(input, expect) in TEST_INPUTS {\n\n let grid: Grid = input.parse().unwrap();\n\n assert_eq!(grid.pass_most_doors(), expect);\n\n }\n\n }\n\n}\n", "file_path": "src/day20.rs", "rank": 37, "score": 34029.466784148375 }, { "content": "#[aoc(day23, part1)]\n\npub fn solve_part1(bots: &[Nanobot]) -> usize {\n\n let max_radius_bot = bots.iter().max_by_key(|bot| bot.radius).unwrap();\n\n bots.iter()\n\n .filter(|bot| distance(max_radius_bot.position, bot.position) <= max_radius_bot.radius)\n\n .count()\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 38, "score": 34029.466784148375 }, { "content": "#[aoc(day13, part1)]\n\npub fn solve_part1(system: &System) -> String {\n\n let pos = system.clone().run_until_first_crash();\n\n format!(\"{},{}\", pos.x, pos.y)\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 39, "score": 34029.466784148375 }, { "content": "#[aoc(day19, part2)]\n\npub fn solve_part2(vm: &Vm) -> usize {\n\n // Base on my input, running instruction #0, #17-35, #1-2 will initialize registers like this:\n\n // ip\n\n // [0, 1, 10551376, 10550400, 3, 1]\n\n //\n\n // Instruction #3-15 equivalent to:\n\n // for r1 in 1..=r2 {\n\n // for r5 in 1..=r2 {\n\n // if r1 * r5 == r2 {\n\n // r0 += r1;\n\n // }\n\n // }\n\n // }\n\n // That's finding the sum of factors of r2.\n\n\n\n let mut vm = vm.to_owned();\n\n vm.registers.0[0] = 1;\n\n loop {\n\n vm.step();\n\n\n", "file_path": "src/day19.rs", "rank": 40, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day22)]\n\npub fn input_generator(input: &str) -> Cave {\n\n let mut iter = input.lines();\n\n let depth = iter\n\n .next()\n\n .and_then(|s| s.split_whitespace().next_back())\n\n .and_then(|depth_str| depth_str.parse().ok())\n\n .unwrap();\n\n let target = iter\n\n .next()\n\n .and_then(|s| s.split_whitespace().next_back())\n\n .and_then(|p| {\n\n let xy: Vec<_> = p.split(',').map(|x| x.parse().unwrap()).collect();\n\n Some(Vector2::new(xy[0], xy[1]))\n\n })\n\n .unwrap();\n\n\n\n Cave::new(depth, target)\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 41, "score": 34029.466784148375 }, { "content": "#[aoc(day8, part1)]\n\npub fn solve_part1(root: &Node) -> usize {\n\n root.sum_metadata()\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 42, "score": 34029.466784148375 }, { "content": "#[aoc(day23, part2)]\n\npub fn solve_part2(bots: &[Nanobot]) -> u64 {\n\n let mut pq = BinaryHeap::new();\n\n for bot in bots {\n\n let dist = distance(bot.position, Vector3::zeros());\n\n pq.push((Reverse(dist.saturating_sub(bot.radius)), 1));\n\n pq.push((Reverse(dist + bot.radius), -1));\n\n }\n\n\n\n let mut count = 0;\n\n let mut max_count = 0;\n\n let mut min_dist = 0;\n\n while let Some((dist, diff)) = pq.pop() {\n\n count += diff;\n\n\n\n if count > max_count {\n\n max_count = count;\n\n min_dist = dist.0;\n\n }\n\n }\n\n\n", "file_path": "src/day23.rs", "rank": 43, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day20)]\n\npub fn input_generator(input: &str) -> Grid {\n\n input.trim().parse().unwrap()\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 44, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day17)]\n\npub fn input_generator(input: &str) -> Grid {\n\n input.parse().unwrap()\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 45, "score": 34029.466784148375 }, { "content": "#[aoc(day13, part2)]\n\npub fn solve_part2(system: &System) -> String {\n\n let pos = system.clone().run_until_one_cart();\n\n format!(\"{},{}\", pos.x, pos.y)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1() {\n\n let mut system = input_generator(include_str!(\"../tests/day13_part1.txt\"));\n\n assert_eq!(system.run_until_first_crash(), Vector2::new(7, 3));\n\n }\n\n\n\n #[test]\n\n fn part2() {\n\n let mut system = input_generator(include_str!(\"../tests/day13_part2.txt\"));\n\n assert_eq!(system.run_until_one_cart(), Vector2::new(6, 4));\n\n }\n\n}\n", "file_path": "src/day13.rs", "rank": 46, "score": 34029.466784148375 }, { "content": "#[aoc(day24, part2)]\n\npub fn solve_part2(groups: &[Group]) -> usize {\n\n for boost in 0.. {\n\n let mut groups = groups.to_owned();\n\n groups\n\n .iter_mut()\n\n .filter(|x| x.clan == Clan::ImmuneSystem)\n\n .for_each(|x| x.attack_damage += boost);\n\n let (winner, remaining) = battle(groups);\n\n\n\n if winner == Clan::ImmuneSystem {\n\n return remaining;\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/day24.rs", "rank": 47, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day12)]\n\npub fn input_generator(input: &str) -> Rule {\n\n let lines: Vec<_> = input.lines().collect();\n\n\n\n let states = lines[0][lines[0].find(':').unwrap() + 2..]\n\n .chars()\n\n .enumerate()\n\n .filter(|(_, c)| *c == '#')\n\n .map(|(i, _)| i as i64)\n\n .collect();\n\n\n\n let spread_rules: HashSet<_> = lines[2..]\n\n .iter()\n\n .filter(|line| line.ends_with('#'))\n\n .map(|line| line[0..5].to_owned())\n\n .collect();\n\n\n\n Rule {\n\n states,\n\n spread_rules,\n\n }\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 48, "score": 34029.466784148375 }, { "content": "#[aoc(day8, part2)]\n\npub fn solve_part2(root: &Node) -> usize {\n\n root.value()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const INPUT: &'static str = r\"2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2\";\n\n\n\n #[test]\n\n fn part1() {\n\n assert_eq!(solve_part1(&input_generator(INPUT)), 138)\n\n }\n\n\n\n #[test]\n\n fn part2() {\n\n assert_eq!(solve_part2(&input_generator(INPUT)), 66)\n\n }\n\n}\n", "file_path": "src/day8.rs", "rank": 49, "score": 34029.466784148375 }, { "content": "#[aoc_generator(day18)]\n\npub fn input_generator(input: &str) -> Grid {\n\n input.parse().unwrap()\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 50, "score": 34029.466784148375 }, { "content": "#[aoc(day18, part1)]\n\npub fn solve_part1(grid: &Grid) -> usize {\n\n let mut grid = grid.to_owned();\n\n grid.run(10);\n\n grid.resource_value()\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 51, "score": 34029.466784148375 }, { "content": "#[aoc(day1, part1)]\n\npub fn solve_part1(input: &[i32]) -> i32 {\n\n input.iter().sum()\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 52, "score": 34029.466784148375 }, { "content": "#[aoc(day14, part2)]\n\npub fn solve_part2(digits: &str) -> usize {\n\n let digits: Vec<_> = digits\n\n .to_string()\n\n .chars()\n\n .map(|d| d.to_digit(10).unwrap())\n\n .collect();\n\n let mut recipes: Recipes = Default::default();\n\n\n\n loop {\n\n if recipes.scores.ends_with(&digits) {\n\n return recipes.scores.len() - digits.len();\n\n } else if recipes.scores[..recipes.scores.len() - 1].ends_with(&digits) {\n\n return recipes.scores.len() - digits.len() - 1;\n\n }\n\n\n\n recipes.tick();\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/day14.rs", "rank": 53, "score": 34029.466784148375 }, { "content": "#[aoc(day20, part1)]\n\npub fn solve_part1(grid: &Grid) -> usize {\n\n grid.pass_most_doors()\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 54, "score": 34029.466784148375 }, { "content": "#[aoc(day21, part1)]\n\npub fn solve_part1(input: &str) -> usize {\n\n let mut vm: Vm = input.parse().unwrap();\n\n while *vm.ip() != 28 {\n\n vm.step();\n\n }\n\n\n\n vm.registers.0[vm.program[28].input1]\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 55, "score": 34029.466784148375 }, { "content": "#[aoc(day18, part2)]\n\npub fn solve_part2(grid: &Grid) -> usize {\n\n let mut grid = grid.to_owned();\n\n grid.run(1_000_000_000);\n\n\n\n grid.resource_value()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_INPUT: &str = r\".#.#...|#.\n\n.....#|##|\n\n.|..|...#.\n\n..|#.....#\n\n#.#|||#|#|\n\n...#.||...\n\n.|....|...\n\n||...#|.#|\n\n|.||||..|.\n\n...#.|..|.\";\n\n\n\n #[test]\n\n fn part1() {\n\n let mut grid = input_generator(TEST_INPUT);\n\n grid.run(10);\n\n assert_eq!(grid.resource_value(), 1147);\n\n }\n\n}\n", "file_path": "src/day18.rs", "rank": 56, "score": 34029.466784148375 }, { "content": "#[aoc(day12, part2)]\n\npub fn solve_part2(rule: &Rule) -> i64 {\n\n let mut rule = rule.clone();\n\n\n\n for _i in 0..200 {\n\n // For my input, sum will increased by 80 after 99th generation\n\n // println!(\"{:3} {}\", _i, rule);\n\n // println!(\"{:3} {:5}\", _i, rule.sum());\n\n rule.next_generation();\n\n }\n\n\n\n let sum_200 = rule.sum();\n\n rule.next_generation();\n\n let sum_201 = rule.sum();\n\n let diff = sum_201 - sum_200;\n\n\n\n sum_201 + (50_000_000_000i64 - 201) * diff\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/day12.rs", "rank": 57, "score": 34029.466784148375 }, { "content": "#[aoc(day19, part1)]\n\npub fn solve_part1(vm: &Vm) -> usize {\n\n let mut vm = vm.to_owned();\n\n vm.run();\n\n vm.registers.0[0]\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 58, "score": 34029.466784148375 }, { "content": "pub fn factorization(num: usize) -> Vec<usize> {\n\n let mut factors = vec![];\n\n for i in 1..=num {\n\n if num % i == 0 {\n\n factors.push(i);\n\n }\n\n }\n\n\n\n factors\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 59, "score": 33461.673869598635 }, { "content": "#[aoc(day14, part1)]\n\npub fn solve_part1(recipe_count: &str) -> String {\n\n let recipe_count = recipe_count.parse().unwrap();\n\n let mut recipes: Recipes = Default::default();\n\n while recipes.scores.len() < recipe_count + 10 {\n\n recipes.tick();\n\n }\n\n\n\n recipes.scores[recipe_count..recipe_count + 10]\n\n .iter()\n\n .map(|&s| std::char::from_digit(s, 10).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 60, "score": 33128.847785971695 }, { "content": "#[aoc(day6, part2)]\n\npub fn solve_part2(points: &[Vector2<i32>]) -> usize {\n\n in_distance_count(points, 10000)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1() {\n\n assert_eq!(\n\n solve_part1(&input_generator(\n\n r\"1, 1\n\n1, 6\n\n8, 3\n\n3, 4\n\n5, 5\n\n8, 9\"\n\n )),\n\n 17\n", "file_path": "src/day6.rs", "rank": 61, "score": 32561.05487142195 }, { "content": "#[aoc(day25, part1)]\n\npub fn solve_part1(points: &[Vector4<i32>]) -> usize {\n\n let mut constellations: Vec<Option<Vec<Vector4<i32>>>> =\n\n points.iter().map(|&p| Some(vec![p])).collect();\n\n\n\n loop {\n\n let mut has_merged = false;\n\n for i in 0..constellations.len() {\n\n if constellations[i].is_none() {\n\n continue;\n\n }\n\n\n\n for j in i + 1..constellations.len() {\n\n if constellations[j].is_none() {\n\n continue;\n\n }\n\n\n\n if can_merge(\n\n constellations[i].as_ref().unwrap(),\n\n constellations[j].as_ref().unwrap(),\n\n ) {\n", "file_path": "src/day25.rs", "rank": 62, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day23)]\n\npub fn input_generator(input: &str) -> Vec<Nanobot> {\n\n let re = Regex::new(r\"pos=<(-?\\d+),(-?\\d+),(-?\\d+)>, r=(\\d+)\").unwrap();\n\n input\n\n .lines()\n\n .map(|line| {\n\n let caps = re.captures(line).unwrap();\n\n let pos: Vec<_> = (1..=3)\n\n .map(|i| caps.get(i).and_then(|s| s.as_str().parse().ok()).unwrap())\n\n .collect();\n\n\n\n Nanobot {\n\n position: Vector3::new(pos[0], pos[1], pos[2]),\n\n radius: caps.get(4).and_then(|s| s.as_str().parse().ok()).unwrap(),\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 63, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day8)]\n\npub fn input_generator(input: &str) -> Box<Node> {\n\n Box::new(Node::from_flat(\n\n &mut input.split_whitespace().map(|s| s.parse().unwrap()),\n\n ))\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 64, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day24)]\n\npub fn input_generator(input: &str) -> Vec<Group> {\n\n let mut groups = vec![];\n\n let armies_iter = input.split(\"\\n\\n\");\n\n\n\n for army in armies_iter {\n\n let mut iter = army.lines();\n\n let clan = iter.next().unwrap().trim_end_matches(\":\");\n\n let clan_groups = iter.map(move |line| {\n\n let mut group: Group = line.parse().unwrap();\n\n group.clan = match clan {\n\n \"Immune System\" => Clan::ImmuneSystem,\n\n \"Infection\" => Clan::Infection,\n\n _ => unreachable!(),\n\n };\n\n group\n\n });\n\n groups.extend(clan_groups);\n\n }\n\n\n\n groups\n\n}\n\n\n", "file_path": "src/day24.rs", "rank": 65, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day3)]\n\npub fn input_generator(input: &str) -> Vec<Claim> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n // Claim example\n\n // id x y w h\n\n // #123 @ 3,2: 5x4\n\n let sharp_index = line.find('#').unwrap();\n\n let at_index = line.find('@').unwrap();\n\n let id = line[sharp_index + 1..at_index - 1].parse().unwrap();\n\n let colon_index = line.find(':').unwrap();\n\n let mut offset = line[at_index + 2..colon_index]\n\n .split(',')\n\n .map(|n| n.parse().unwrap());\n\n let mut size = line[colon_index + 2..]\n\n .split('x')\n\n .map(|n| n.parse().unwrap());\n\n Claim {\n\n id,\n\n position: Vector2::new(offset.next().unwrap(), offset.next().unwrap()),\n\n size: Vector2::new(size.next().unwrap(), size.next().unwrap()),\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 66, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day1)]\n\npub fn input_generator(input: &str) -> Vec<i32> {\n\n input.lines().map(|line| line.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 67, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day9)]\n\npub fn input_generator(input: &str) -> Box<Game> {\n\n let parts: Vec<_> = input.split_whitespace().collect();\n\n Box::new(Game {\n\n players: parts[0].parse().unwrap(),\n\n last_marble: parts[6].parse().unwrap(),\n\n })\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 68, "score": 32561.05487142195 }, { "content": "#[allow(clippy::needless_range_loop)]\n\n#[aoc(day6, part1)]\n\npub fn solve_part1(points: &[Vector2<i32>]) -> usize {\n\n let max_x = points.iter().max_by_key(|p| p.x).unwrap().x as usize + 1;\n\n let max_y = points.iter().max_by_key(|p| p.y).unwrap().y as usize + 1;\n\n\n\n let mut grid: Vec<Vec<Vec<usize>>> = vec![vec![vec![]; max_x]; max_y];\n\n\n\n for y in 0..max_y {\n\n for x in 0..max_x {\n\n let distances: Vec<_> = points\n\n .iter()\n\n .map(|p| (p.x - x as i32).abs() + (p.y - y as i32).abs())\n\n .collect();\n\n let min_distance = distances.iter().min().unwrap();\n\n let closest_points = distances\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, d)| *d == min_distance)\n\n .map(|(i, _)| i)\n\n .collect();\n\n grid[y][x] = closest_points;\n", "file_path": "src/day6.rs", "rank": 69, "score": 32561.05487142195 }, { "content": "#[aoc_generator(day10)]\n\npub fn input_generator(input: &str) -> Vec<Point> {\n\n let re = Regex::new(r\"-?\\d+\").unwrap();\n\n input\n\n .lines()\n\n .map(|line| {\n\n let result: Vec<_> = re\n\n .captures_iter(line)\n\n .map(|cap| cap[0].parse().unwrap())\n\n .collect();\n\n\n\n Point {\n\n position: Vector2::new(result[0], result[1]),\n\n velocity: Vector2::new(result[2], result[3]),\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 70, "score": 32561.05487142195 }, { "content": "#[aoc(day7, part1)]\n\npub fn solve_part1(step_tree: &StepTree) -> String {\n\n let mut finished_steps = HashSet::<char>::new();\n\n\n\n let mut sequence = String::new();\n\n while finished_steps.len() != step_tree.len() {\n\n for (name, dependencies) in step_tree.iter() {\n\n if !finished_steps.contains(name)\n\n && (dependencies.is_empty()\n\n || dependencies.iter().all(|dep| finished_steps.contains(dep)))\n\n {\n\n finished_steps.insert(*name);\n\n sequence.push(*name);\n\n break;\n\n }\n\n }\n\n }\n\n\n\n sequence\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 71, "score": 32298.24673405085 }, { "content": "pub fn outcome(scene: &mut Scene) -> (i32, i32) {\n\n let mut round = 0;\n\n loop {\n\n let full_round = scene.tick();\n\n if !full_round {\n\n break;\n\n }\n\n\n\n round += 1;\n\n }\n\n\n\n let hp = scene.entities.iter().map(|e| e.hp).sum();\n\n (round, hp)\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 72, "score": 32045.559102224288 }, { "content": "#[allow(clippy::or_fun_call)]\n\n#[aoc_generator(day7)]\n\npub fn input_generator(input: &str) -> Box<StepTree> {\n\n let mut step_tree: StepTree = BTreeMap::new();\n\n\n\n input.lines().for_each(|line| {\n\n // Step example\n\n // Step C must be finished before step A can begin.\n\n let parts: Vec<_> = line.split(' ').collect();\n\n step_tree\n\n .entry(parts[1].chars().next().unwrap())\n\n .or_default();\n\n step_tree\n\n .entry(parts[7].chars().next().unwrap())\n\n .and_modify(|e| e.push(parts[1].chars().next().unwrap()))\n\n .or_insert(vec![parts[1].chars().next().unwrap()]);\n\n });\n\n\n\n Box::new(step_tree)\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 73, "score": 31730.453819501105 }, { "content": "#[aoc_generator(day6)]\n\npub fn input_generator(input: &str) -> Vec<Vector2<i32>> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n let mut axes = line.split(',');\n\n Vector2::new(\n\n axes.next().unwrap().trim().parse().unwrap(),\n\n axes.next().unwrap().trim().parse().unwrap(),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day6.rs", "rank": 74, "score": 31214.95805030344 }, { "content": "#[aoc_generator(day25)]\n\npub fn input_generator(input: &str) -> Vec<Vector4<i32>> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n let coordinate: Vec<i32> = line.split(',').map(|x| x.parse().unwrap()).collect();\n\n Vector4::new(coordinate[0], coordinate[1], coordinate[2], coordinate[3])\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day25.rs", "rank": 75, "score": 31214.95805030344 }, { "content": "pub fn distance(me: Vector3<i64>, other: Vector3<i64>) -> u64 {\n\n ((me.x - other.x).abs() + (me.y - other.y).abs() + (me.z - other.z).abs()) as u64\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 76, "score": 31144.995185137934 }, { "content": "pub fn battle(mut groups: Vec<Group>) -> (Clan, usize) {\n\n loop {\n\n groups.sort_by_key(|x| (Reverse(x.power()), Reverse(x.initiative)));\n\n let mut targets = vec![None; groups.len()];\n\n for i in 0..groups.len() {\n\n let mut max_damage = 0;\n\n for j in 0..groups.len() {\n\n if groups[i].clan == groups[j].clan\n\n || targets.contains(&Some(j))\n\n || groups[j].units == 0\n\n {\n\n continue;\n\n }\n\n\n\n let damage = groups[i].damage_to(&groups[j]);\n\n if damage > max_damage {\n\n max_damage = damage;\n\n targets[i] = Some(j);\n\n }\n\n }\n", "file_path": "src/day24.rs", "rank": 77, "score": 30744.85220208273 }, { "content": "pub fn guess_opcode(samples: &[Sample]) -> HashMap<usize, Opcode> {\n\n let mut match_opcodes: Vec<HashSet<Opcode>> = vec![HashSet::new(); OPCODE_COUNT];\n\n for (opcode, possible_opcodes) in match_opcodes.iter_mut().enumerate() {\n\n let sets: Vec<HashSet<Opcode>> = samples\n\n .iter()\n\n .filter(|s| s.unknown_instruction.0[0] == opcode)\n\n .map(|s| {\n\n Opcode::iter()\n\n .filter(|op| {\n\n let instruction =\n\n Instruction::new(op.clone(), &s.unknown_instruction.0[1..4]);\n\n let mut registers = s.before.clone();\n\n instruction.execute(&mut registers);\n\n\n\n registers == s.after\n\n })\n\n .collect::<HashSet<Opcode>>()\n\n })\n\n .collect();\n\n\n", "file_path": "src/day16.rs", "rank": 78, "score": 30446.515229642333 }, { "content": "pub fn distance(p1: Vector4<i32>, p2: Vector4<i32>) -> i32 {\n\n (p1.x - p2.x).abs() + (p1.y - p2.y).abs() + (p1.z - p2.z).abs() + (p1.w - p2.w).abs()\n\n}\n\n\n", "file_path": "src/day25.rs", "rank": 79, "score": 29545.95131255598 }, { "content": "pub fn can_merge(c1: &[Vector4<i32>], c2: &[Vector4<i32>]) -> bool {\n\n for &p1 in c1 {\n\n for &p2 in c2 {\n\n if distance(p1, p2) <= 3 {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/day25.rs", "rank": 80, "score": 28832.940946250066 }, { "content": "pub fn fill(grid: &mut Grid, position: &Vec2, direction: WaterDirection) -> Option<usize> {\n\n let max_y = (grid.0.iter().next_back().unwrap().0).0.y;\n\n if position.0.y > max_y {\n\n return None;\n\n }\n\n\n\n match grid.0.get(position).unwrap_or(&Tile::Sand) {\n\n Tile::Sand => {\n\n grid.0.insert(position.clone(), Tile::FlowingWater);\n\n fill(\n\n grid,\n\n &Vec2::new(position.0.x, position.0.y + 1),\n\n WaterDirection::Both,\n\n )?;\n\n match direction {\n\n WaterDirection::Both => match (\n\n fill(\n\n grid,\n\n &Vec2::new(position.0.x - 1, position.0.y),\n\n WaterDirection::Left,\n", "file_path": "src/day17.rs", "rank": 81, "score": 26790.364520305775 }, { "content": "use nalgebra::Vector2;\n\nuse std::collections::{HashMap, HashSet};\n\n\n\npub struct Claim {\n\n id: u32,\n\n position: Vector2<u32>,\n\n size: Vector2<u32>,\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum Status {\n\n Valid(u32),\n\n Overlap(Vec<u32>),\n\n}\n\n\n\n#[aoc_generator(day3)]\n", "file_path": "src/day3.rs", "rank": 83, "score": 10.982069606961362 }, { "content": "#[macro_use]\n\nextern crate aoc_runner_derive;\n\nextern crate nalgebra as na;\n\n#[macro_use]\n\nextern crate strum_macros;\n\n\n\npub mod vm;\n\n\n\npub mod day1;\n\npub mod day10;\n\npub mod day11;\n\npub mod day12;\n\npub mod day13;\n\npub mod day14;\n\npub mod day15;\n\npub mod day16;\n\npub mod day17;\n\npub mod day18;\n\npub mod day19;\n\npub mod day2;\n", "file_path": "src/lib.rs", "rank": 85, "score": 6.199304380874132 }, { "content": "use na::Vector3;\n\nuse regex::Regex;\n\nuse std::cmp::Reverse;\n\nuse std::collections::BinaryHeap;\n\n\n\npub struct Nanobot {\n\n position: Vector3<i64>,\n\n radius: u64,\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 86, "score": 6.021102302263214 }, { "content": "use std::collections::{BTreeMap, HashSet};\n\n\n\npub type StepTree = BTreeMap<char, Vec<char>>;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Worker {\n\n Idle,\n\n Busy(Work),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Work {\n\n handling_step: char,\n\n remaining_seconds: u32,\n\n}\n\n\n\n#[allow(clippy::or_fun_call)]\n\n#[aoc_generator(day7)]\n", "file_path": "src/day7.rs", "rank": 87, "score": 5.945236077783658 }, { "content": "use na::Vector2;\n\nuse regex::Regex;\n\nuse std::fmt;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Point {\n\n position: Vector2<i32>,\n\n velocity: Vector2<i32>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Grid(Vec<Point>);\n\n\n\nimpl Grid {\n\n pub fn new(points: &[Point]) -> Grid {\n\n Grid(points.to_owned())\n\n }\n\n\n\n pub fn update(&mut self) {\n\n let mut new_grid = Vec::new();\n", "file_path": "src/day10.rs", "rank": 88, "score": 5.886516934525571 }, { "content": "use na::Vector2;\n\nuse regex::Regex;\n\nuse std::cmp::Ordering;\n\nuse std::collections::BTreeMap;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd)]\n\npub struct Vec2(Vector2<usize>);\n\n\n\nimpl Vec2 {\n\n pub fn new(x: usize, y: usize) -> Vec2 {\n\n Vec2(Vector2::new(x, y))\n\n }\n\n}\n\n\n\nimpl Ord for Vec2 {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n (self.0.y, self.0.x).cmp(&(other.0.y, other.0.x))\n\n }\n", "file_path": "src/day17.rs", "rank": 89, "score": 5.644309943102132 }, { "content": "use na::Vector2;\n\nuse std::collections::{HashMap, HashSet, VecDeque};\n\nuse std::fmt;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum Tile {\n\n Wall,\n\n Cavern,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Race {\n\n Goblin,\n\n Elf,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Scene {\n\n grid: Vec<Vec<Tile>>,\n\n entities: Vec<Entity>,\n", "file_path": "src/day15.rs", "rank": 90, "score": 5.481978112195188 }, { "content": "use regex::Regex;\n\nuse std::cmp::Reverse;\n\nuse std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Clan {\n\n None,\n\n ImmuneSystem,\n\n Infection,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Group {\n\n clan: Clan,\n\n units: usize,\n\n hp_per_unit: usize,\n\n initiative: usize,\n\n weaknesses: Vec<String>,\n", "file_path": "src/day24.rs", "rank": 91, "score": 5.47418778404133 }, { "content": "use crate::vm::*;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::error::Error;\n\nuse std::str::FromStr;\n\nuse strum::IntoEnumIterator;\n\n\n\n#[derive(Debug, Default, Clone)]\n\npub struct Sample {\n\n before: Registers,\n\n unknown_instruction: UnknownInstruction,\n\n after: Registers,\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 92, "score": 5.463868919017672 }, { "content": "use na::Vector2;\n\nuse num_derive::FromPrimitive;\n\nuse std::cmp::Ordering;\n\nuse std::collections::{BinaryHeap, HashMap, HashSet};\n\nuse strum::{EnumCount, IntoEnumIterator};\n\n\n\n#[derive(Debug, Clone, Copy, EnumCount, FromPrimitive)]\n\npub enum RegionType {\n\n Rocky = 0,\n\n Wet = 1,\n\n Narrow = 2,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, EnumIter, PartialEq, Eq, Hash)]\n\npub enum Tool {\n\n Neither = 0,\n\n Torch = 1,\n\n ClimbingGear = 2,\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 93, "score": 5.309764270725429 }, { "content": "pub struct Recipes {\n\n elves: Vec<usize>,\n\n scores: Vec<u32>,\n\n}\n\n\n\nimpl Recipes {\n\n pub fn tick(&mut self) {\n\n let new_recipes: u32 = self.elves.iter().map(|&e| self.scores[e]).sum();\n\n self.scores.extend(\n\n new_recipes\n\n .to_string()\n\n .chars()\n\n .map(|c| c.to_digit(10).unwrap()),\n\n );\n\n\n\n for e in &mut self.elves {\n\n *e = (*e + self.scores[*e] as usize + 1) % self.scores.len();\n\n }\n\n }\n\n}\n", "file_path": "src/day14.rs", "rank": 94, "score": 5.150718388188815 }, { "content": "use na::Vector2;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::collections::VecDeque;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum Tile {\n\n Wall,\n\n Door,\n\n Room,\n\n}\n\n\n\nimpl fmt::Display for Tile {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let c = match self {\n\n Tile::Wall => \"#\",\n\n Tile::Door => \"+\",\n\n Tile::Room => \" \",\n", "file_path": "src/day20.rs", "rank": 95, "score": 4.829746657081197 }, { "content": "\n\n#[derive(Clone)]\n\npub struct Game {\n\n players: usize,\n\n last_marble: u32,\n\n}\n\n\n\nimpl Game {\n\n pub fn run(&self) -> u32 {\n\n let mut scores: Vec<u32> = vec![0; self.players];\n\n\n\n let mut marbles = VecDeque::new();\n\n marbles.push_back(0);\n\n for (marble, player_index) in (1..=self.last_marble).zip((0..scores.len()).cycle()) {\n\n if marble % 23 != 0 {\n\n marbles.rotate_left(2);\n\n marbles.push_back(marble);\n\n } else {\n\n marbles.rotate_right(7);\n\n scores[player_index] += marble + marbles.pop_back().unwrap();\n\n }\n\n }\n\n\n\n *scores.iter().max().unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 96, "score": 4.529901404450225 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Tile {\n\n Open,\n\n Trees,\n\n Lumberyard,\n\n}\n\n\n\nimpl FromStr for Tile {\n\n type Err = ();\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(match s {\n\n \".\" => Tile::Open,\n\n \"|\" => Tile::Trees,\n\n \"#\" => Tile::Lumberyard,\n\n _ => unreachable!(),\n", "file_path": "src/day18.rs", "rank": 97, "score": 4.398400824586423 }, { "content": "pub mod day20;\n\npub mod day21;\n\npub mod day22;\n\npub mod day23;\n\npub mod day24;\n\npub mod day25;\n\npub mod day3;\n\npub mod day4;\n\npub mod day5;\n\npub mod day6;\n\npub mod day7;\n\npub mod day8;\n\npub mod day9;\n\n\n\naoc_lib! {year = 2018}\n", "file_path": "src/lib.rs", "rank": 98, "score": 4.30847050202029 }, { "content": "use lazy_static::lazy_static;\n\nuse na::{Matrix2, Vector2};\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashSet;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n", "file_path": "src/day13.rs", "rank": 99, "score": 4.149044263719674 } ]
Rust
src/webdata.rs
andete/show_bgs
4089d645c78ba7c6e98e8321aaee5862aec97228
use chrono::{Date,DateTime,Utc}; use data::{Allegiance, Government, State}; use data; use std::collections::{BTreeMap,HashMap,HashSet}; use serde::de::{self, Deserialize, Deserializer}; #[derive(Debug,Deserialize, Serialize)] pub struct Systems { pub report_name: String, pub systems: Vec<System>, pub dates: Vec<String>, pub dates10: Vec<String>, pub bgs_day: String, pub factions: HashMap<String, FactionGlobalState>, } #[derive(Debug,Deserialize, Serialize)] pub struct System { pub eddb_id:i64, pub name:String, pub population: i64, pub factions:HashMap<String, Faction>, pub factions_by_inf:Vec<Faction>, pub warnings:Vec<String>, pub controlling:String, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct Faction { pub name:String, pub government:Government, pub allegiance:Allegiance, pub evolution:Vec<FactionData>, pub evolution10:Vec<FactionData>, pub global:Option<FactionGlobalState>, pub color:String, pub at_home:bool, pub controlling:bool, pub is_player_faction:bool, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct FactionGlobalState { pub name:String, pub government:Government, pub allegiance:Allegiance, pub state_date:DateTime<Utc>, pub state_day:Option<u8>, pub state_max_length:u8, pub state_danger:bool, pub state:State, pub state_system:Option<String>, pub pending_state:Option<State>, pub pending_state_system:Option<String>, pub recovery_state:Option<State>, pub recovery_state_system:Option<String>, pub is_player_faction:bool, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct FactionData { pub date:DateTime<Utc>, pub label_date:String, pub influence:f64, pub state:State, pub state_day:u8, pub state_max_length:u8, pub state_danger:bool, pub pending_states:Vec<FactionState>, pub recovering_states:Vec<FactionState>, pub influence_danger:bool, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct FactionState { pub state:State, pub state_recovery_length:u8, pub state_pending_length:u8, pub trend:data::Trend, pub trend_display:String, pub state_day:u8, pub state_pending_danger:bool, } impl From<data::System> for System { fn from(s:data::System) -> System { System { eddb_id:s.eddb_id, name:s.name.clone(), population:s.population, factions:HashMap::new(), factions_by_inf:vec![], warnings:vec![], controlling:s.dynamic.controlling_minor_faction, } } } impl<'a> From<&'a data::Faction> for Faction { fn from(s:&'a data::Faction) -> Faction { Faction { name:s.name.clone(), government:s.government, allegiance:s.allegiance, evolution:vec![], evolution10:vec![], color:"".into(), global:None, at_home:false, controlling:false, is_player_faction:s.is_player_faction, } } } impl<'a> From<&'a data::Faction> for FactionGlobalState { fn from(s:&'a data::Faction) -> FactionGlobalState { let (state, system) = s.faction_state(); let state:State = state.into(); let (pending_state, pending_system) = s.faction_pending_single_system_state(); let (recovery_state, recovery_system) = s.faction_recovering_single_system_state(); FactionGlobalState { name:s.name.clone(), government:s.government, allegiance:s.allegiance, state:state, state_system:system, state_date:s.dynamic.eddbv3_updated_at, state_day:None, state_max_length:state.max_length(), state_danger:state.danger(), pending_state:pending_state, pending_state_system:pending_system, recovery_state:recovery_state, recovery_state_system:recovery_system, is_player_faction:s.is_player_faction, } } } impl From <data::FactionHistory> for FactionData { fn from(h:data::FactionHistory) -> FactionData { let state:State = h.presence.state.into(); FactionData { date:h.updated_at, label_date:format!("{}", h.updated_at.format("%d/%m")), influence:h.presence.influence, state:state, state_day:0, state_max_length:state.max_length(), pending_states:h.presence.pending_states.into_iter().map(|s| s.into()).collect(), recovering_states:h.presence.recovering_states.into_iter().map(|s| s.into()).collect(), state_danger:state.danger(), influence_danger:false, } } } impl From <data::StateTrend> for FactionState { fn from(s:data::StateTrend) -> FactionState { let d = match s.trend { data::Trend::Up => "&uarr;", data::Trend::Down => "&darr;", data::Trend::Level => "&harr;", }.into(); let state:State = s.state.into(); FactionState { state:state, trend:s.trend, trend_display:d, state_day:0, state_recovery_length:state.recovery(), state_pending_length:state.pending(), state_pending_danger:state.pending_danger(), } } } fn update_states(states:&mut Vec<FactionState>, h:&mut HashMap<State,u8>) { let mut seen = HashSet::new(); for state in states { seen.insert(state.state); if !h.contains_key(&state.state) { h.insert(state.state, 1); state.state_day = 1; } else { let n = h.get(&state.state).unwrap() + 1; h.insert(state.state, n); state.state_day = n; } } let keys:Vec<State> = h.keys().cloned().collect(); for k in keys { if !seen.contains(&k) { h.remove(&k); } } } impl Faction { pub fn cleanup_evolution(&mut self, dates:&Vec<Date<Utc>>) { let mut b = BTreeMap::new(); for e in &self.evolution { let date = e.date.date(); if !b.contains_key(&date) { b.insert(date, vec![e.clone()]); } else { b.get_mut(&date).unwrap().push(e.clone()) } } let mut v = vec![]; let mut prev_inf = 0.0; for (_day, values) in b { let mut found = false; for val in &values { if val.influence != prev_inf { v.push(val.clone()); prev_inf = val.influence; found = true; break; } } if !found { info!("{} INF stayed equal", self.name); v.push(values[0].clone()) } } let mut di = dates.iter(); let mut prev:Option<FactionData> = None; let mut v2 = vec![]; for e in v { let mut date = di.next().unwrap(); while *date != e.date.date() { if let Some(e2) = prev { let mut e3 = e2.clone(); e3.date = date.and_hms(12,30,0); e3.label_date = format!("{}", date.format("%d/%m")); v2.push(e3.clone()); prev = Some(e3); } else { } date = di.next().unwrap(); } v2.push(e.clone()); prev = Some(e.clone()); } self.evolution = v2; } pub fn fill_in_state_days(&mut self) { let mut prev_state = State::None; let mut recovery_states = HashMap::new(); let mut pending_states = HashMap::new(); let mut c:u8 = 1; for e in &mut self.evolution { if e.state != prev_state { prev_state = e.state; c = 1; e.state_day = c; } else { c += 1; e.state_day = c; } update_states(&mut e.pending_states, &mut pending_states); update_states(&mut e.recovering_states, &mut recovery_states); } } pub fn fill_in_evolution10(&mut self, dates: &Vec<Date<Utc>>) { let dates10 = dates.as_slice().windows(10).last().unwrap().to_vec(); let mut ev = vec![]; for e in &self.evolution { if dates10.contains(&e.date.date()) { ev.push(e.clone()) } } self.evolution10 = ev; } pub fn latest_inf(&self) -> i64 { (self.evolution.last().unwrap().influence * 1000.0) as i64 } pub fn fill_in_state_other_system(&mut self) { } } impl<'de> Deserialize<'de> for State { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?.to_lowercase(); let state = match s.as_str() { "expansion" => State::Expansion, "war" => State::War, "civil unrest" | "civilunrest" => State::CivilUnrest, "civil war" | "civilwar" => State::CivilWar, "election" => State::Election, "boom" => State::Boom, "bust" => State::Bust, "famine" => State::Famine, "lockdown" => State::Lockdown, "investment" => State::Investment, "retreat" => State::Retreat, "outbreak" => State::Outbreak, "none" => State::None, other => { return Err(de::Error::custom(format!("Invalid state '{}'", other))); }, }; Ok(state) } }
use chrono::{Date,DateTime,Utc}; use data::{Allegiance, Government, State}; use data; use std::collections::{BTreeMap,HashMap,HashSet}; use serde::de::{self, Deserialize, Deserializer}; #[derive(Debug,Deserialize, Serialize)] pub struct Systems { pub report_name: String, pub systems: Vec<System>, pub dates: Vec<String>, pub dates10: Vec<String>, pub bgs_day: String, pub factions: HashMap<String, FactionGlobalState>, } #[derive(Debug,Deserialize, Serialize)] pub struct System { pub eddb_id:i64, pub name:String, pub population: i64, pub factions:HashMap<String, Faction>, pub factions_by_inf:Vec<Faction>, pub warnings:Vec<String>, pub controlling:String, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct Faction { pub name:String, pub government:Government, pub allegiance:Allegiance, pub evolution:Vec<FactionData>, pub evolution10:Vec<FactionData>, pub global:Option<FactionGlobalState>, pub color:String, pub at_home:bool, pub controlling:bool, pub is_player_faction:bool, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct FactionGlobalState { pub name:String, pub government:Government, pub allegiance:Allegiance, pub state_date:DateTime<Utc>, pub state_day:Option<u8>, pub state_max_length:u8, pub state_danger:bool, pub state:State, pub state_system:Option<String>, pub pending_state:Option<State>, pub pending_state_system:Option<String>, pub recovery_state:Option<State>, pub recovery_state_system:Option<String>, pub is_player_faction:bool, } #[derive(Debug,Deserialize, Serialize, Clone)] pub struct FactionData { pub date:DateTime<Utc>, pub label_date:String, pub influence:f64, pub state:State, pub state_day:u8, pub state_max_length:u8, pub state_danger:bool, pub pending_states:Vec<FactionState>, pub recovering_states:Vec<FactionState>, pub influence_danger:bool, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct FactionState { pub state:State, pub state_recovery_length:u8, pub state_pending_length:u8, pub trend:data::Trend, pub trend_display:String, pub state_day:u8, pub state_pending_danger:bool, } impl From<data::System> for System { fn from(s:data::System) -> System { System { eddb_id:s.eddb_id, name:s.name.clone(), population:s.population, factions:HashMap::new(), factions_by_inf:vec![], warnings:vec![], controlling:s.dynamic.controlling_minor_faction, } } } impl<'a> From<&'a data::Faction> for Faction { fn from(s:&'a data::Faction) -> Faction { Faction { name:s.name.clone(), government:s.government, allegiance:s.allegiance, evolution:vec![], evolution10:vec![], color:"".into(), global:None, at_home:false, controlling:false, is_player_faction:s.is_player_faction, } } } impl<'a> From<&'a data::Faction> for FactionGlobalState { fn from(s:&'a data::Faction) -> FactionGlobalState { let (state, system) = s.faction_state(); let state:State = state.into(); let (pending_state, pending_system) = s.faction_pending_single_system_state(); let (recovery_state, recovery_system) = s.faction_recovering_single_system_state(); FactionGlobalState { name:s.name.clone(), government:s.government, allegiance:s.allegiance, state:state, state_system:system, state_date:s.dynamic.eddbv3_updated_at, state_day:None, state_max_length:state.max_length(), state_danger:state.danger(), pending_state:pending_state, pending_state_system:pending_system, recovery_state:recovery_state, recovery_state_system:recovery_system, is_player_faction:s.is_player_faction, } } } impl From <data::FactionHistory> for FactionData { fn from(h:data::FactionHistory) -> FactionData { let state:State = h.presence.state.into(); FactionData { date:h.updated_at, label_date:format!("{}", h.updated_at.format("%d/%m")), influence:h.presence.influence, state:state, state_day:0, state_max_length:state.max_length(), pending_states:h.presence.pending_states.into_iter().map(|s| s.into()).collect(), recovering_states:h.presence.recovering_states.into_iter().map(|s| s.into()).collect(), state_danger:state.danger(), influence_danger:false, } } } impl From <data::StateTrend> for FactionState { fn from(s:data::StateTrend) -> FactionState { let d = match s.trend { data::Trend::Up => "&uarr;", data::Trend::Down => "&darr;", data::Trend::Level => "&harr;", }.into(); let state:State = s.state.into(); FactionState { state:state, trend:s.trend, trend_display:d, state_day:0, state_recovery_length:state.recovery(), state_pending_length:state.pending(), state_pending_danger:state.pending_danger(), } } } fn update_states(states:&mut Vec<FactionState>, h:&mut HashMap<State,u8>) { let mut seen = HashSet::new(); for state in states { seen.insert(state.state); if !h.contains_key(&state.state) { h.insert(state.state, 1); state.state_day = 1; } else { let n = h.get(&state.state).unwrap() + 1; h.insert(state.state, n); state.state_day = n; } } let keys:Vec<State> = h.keys().cloned().collect(); for k in keys { if !seen.contains(&k) { h.remove(&k); } } } impl Faction { pub fn cleanup_evolution(&mut self, dates:&Vec<Date<Utc>>) { let mut b = BTreeMap::new(); for e in &self.evolution { let date = e.date.date(); if !b.contains_key(&date) { b.insert(date, vec![e.clone()]); } else { b.get_mut(&date).unwrap().push(e.clone()) } } let mut v = vec![]; let mut prev_inf = 0.0; for (_day, values) in b { let mut found = false; for val in &values { if val.influence != prev_inf { v.push(val.clone()); prev_inf = val.influence; found = true; break; } } if !found { info!("{} INF stayed equal", self.name); v.push(values[0].clone()) } } let mut di = dates.iter(); let mut prev:Option<FactionData> = None; let mut v2 = vec![]; for e in v { let mut date = di.next().unwrap(); while *date != e.date.date() { if let Some(e2) = prev { let mut e3 = e2.clone(); e3.date = date.and_hms(12,30,0); e3.label_date = format!("{}", date.format("%d/%m")); v2.push(e3.clone()); prev = Some(e3); } else { } date = di.next().unwrap(); } v2.push(e.clone()); prev = Some(e.clone()); } self.evolution = v2; } pub fn fill_in_state_days(&mut self) { let mut prev_state = State::None; let mut recovery_states = HashMap::new(); let mut pending_states = HashMap::new(); let mut c:u8 = 1; for e in &mut self.evolution { if e.state != prev_state { prev_state = e.state; c = 1; e.state_day = c; } else { c += 1; e.state_day = c; } update_states(&mut e.pending_states, &mut pending_states); update_states(&mut e.recovering_states, &mut recovery_states); } } pub fn fill_in_evolution10(&mut self, dates: &Vec<Date<Utc>>) { let dates10 = dates.as_slice().windows(10).last().unwrap().to_vec(); let mut ev = vec![]; for e in &self.evolution { if dates10.contains(&e.date.date()) { ev.push(e.clone()) } } self.evolution10 = ev; } pub fn latest_inf(&self) -> i64 { (self.evolution.last().unwrap().influence * 1000.0) as i64 } pub fn fill_in_state_other_system(&mut self) { } } impl<'de> Deserialize<'de> for State { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?.to_lowercase(); let state = match s.as_str() { "expansion" => Stat
}
e::Expansion, "war" => State::War, "civil unrest" | "civilunrest" => State::CivilUnrest, "civil war" | "civilwar" => State::CivilWar, "election" => State::Election, "boom" => State::Boom, "bust" => State::Bust, "famine" => State::Famine, "lockdown" => State::Lockdown, "investment" => State::Investment, "retreat" => State::Retreat, "outbreak" => State::Outbreak, "none" => State::None, other => { return Err(de::Error::custom(format!("Invalid state '{}'", other))); }, }; Ok(state) }
function_block-function_prefixed
[ { "content": "pub fn fetch(config: &Config, n_days:i64) {\n\n info!(\"Fetching system info for last {} days\", n_days);\n\n info!(\"and discovering minor factions\");\n\n\n\n let system_names = config.systems();\n\n info!(\"systems: {:?}\", system_names);\n\n\n\n let datadir = config.datadir();\n\n create_dir_all(&datadir).unwrap();\n\n\n\n let client = reqwest::Client::new();\n\n\n\n let mut minor_factions = BTreeSet::<String>::new();\n\n\n\n let now = (Utc::now().timestamp()+100)*1000;\n\n info!(\"now: {}\", now);\n\n let then = now - ((n_days+1)*24*60*60*1000);\n\n\n\n // fetch ebgs data for systems\n\n let mut all_dates = BTreeSet::new();\n", "file_path": "src/fetch.rs", "rank": 1, "score": 59176.7343626405 }, { "content": "pub fn fetch_fact(config:&Config, n_days:i64) {\n\n\n\n info!(\"Fetching Minor Faction data for last {} days\", n_days);\n\n let datadir = config.datadir();\n\n let client = reqwest::Client::builder().gzip(true).timeout(Duration::from_secs(20)).build().unwrap();\n\n let n = format!(\"{}/minor_factions.json\", datadir);\n\n let f = File::open(&n).unwrap();\n\n let minor_factions:BTreeSet<String> = serde_json::from_reader(&f).unwrap();\n\n\n\n let now = Utc::now().timestamp()*1000;\n\n let then = now - ((n_days+1)*24*60*60*1000);\n\n info!(\"now: {}\", now);\n\n \n\n for faction_name in &minor_factions {\n\n info!(\"Faction: {}\", faction_name);\n\n // first fetch eddb data\n\n let url = format!(\"{}factions?name={}\", eddbv3::URL, faction_name);\n\n let res = client.get(&url).send().unwrap().text().unwrap();\n\n let mut faction_page:eddbv3::FactionPage = serde_json::from_str(&res).unwrap();\n\n let eddb_faction = faction_page.docs.remove(0);\n", "file_path": "src/fetch_fact.rs", "rank": 2, "score": 54584.197362429586 }, { "content": "pub fn webpage(config:&Config) {\n\n info!(\"Generating webpage.\");\n\n let datadir = config.datadir();\n\n let n = format!(\"{}/report.json\", datadir);\n\n let f = File::open(&n).unwrap();\n\n let systems:Systems = serde_json::from_reader(&f).unwrap();\n\n let templates_fn = format!(\"{}/../../template/*.tera\", datadir);\n\n let templates = compile_templates!(&templates_fn);\n\n let outdir = format!(\"{}/out\", datadir);\n\n let outpath:PathBuf = outdir.clone().into();\n\n create_dir_all(outdir).unwrap();\n\n let page = templates.render(\"index.tera\", &systems).unwrap();\n\n let n = format!(\"{}/out/index.html\", datadir);\n\n let mut f = File::create(&n).unwrap();\n\n f.write_all(page.as_bytes()).unwrap();\n\n let dir = format!(\"{}/../../template/data\", datadir);\n\n for entry in read_dir(dir).unwrap() {\n\n let file = entry.unwrap().path();\n\n let filename:String = file.file_name().unwrap().to_str().unwrap().into();\n\n let mut op = outpath.clone();\n\n op.push(filename);\n\n copy(file, op).unwrap();\n\n }\n\n}\n", "file_path": "src/webpage.rs", "rank": 3, "score": 46592.791076478774 }, { "content": "pub fn read_config(filename: &str) -> Config {\n\n use std::fs::File;\n\n\n\n let n = format!(\"{}/{}\", env!(\"CARGO_MANIFEST_DIR\"), filename);\n\n debug!(\"config file: {}\", n);\n\n let f = File::open(&n).unwrap();\n\n let mut c: Config = serde_json::from_reader(f).unwrap();\n\n c.main_systems.sort();\n\n c.other_systems.sort();\n\n c\n\n}\n\n\n\npub mod calculate;\n\npub mod data;\n\npub mod extdata;\n\npub mod fetch;\n\npub mod fetch_fact;\n\npub mod webdata;\n\npub mod webpage;\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 42257.12593127366 }, { "content": "pub fn calculate(config: &Config, yesterday: bool) {\n\n info!(\"Calculating data\");\n\n let mut system_warnings = HashMap::new();\n\n let wanted_system_names = config.systems();\n\n info!(\"systems to handle: {:?}\", wanted_system_names);\n\n let datadir = config.datadir();\n\n let mut systems = HashMap::new();\n\n let mut dates = BTreeSet::new();\n\n let mut system_dates = Vec::new();\n\n for system_name in &wanted_system_names {\n\n info!(\"Looking at {}...\", system_name);\n\n let n = format!(\"{}/systems/{}.json\", datadir, system_name);\n\n let f = File::open(&n).unwrap();\n\n let s: data::System = serde_json::from_reader(&f).unwrap();\n\n if let Some(d) = s.latest_day() {\n\n dates.insert(d);\n\n system_dates.push((system_name, d));\n\n }\n\n let system: System = s.into();\n\n systems.insert(system_name.clone(), system);\n", "file_path": "src/calculate.rs", "rank": 5, "score": 41725.75865075439 }, { "content": " (None, None)\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct FactionPresence {\n\n pub system_name: String,\n\n pub state: State,\n\n pub pending_states: Vec<StateTrend>,\n\n pub recovering_states: Vec<StateTrend>,\n\n pub influence: f64,\n\n}\n\n\n\nimpl From<ebgsv4::FactionPresence> for FactionPresence {\n\n fn from(p: ebgsv4::FactionPresence) -> FactionPresence {\n\n FactionPresence {\n\n system_name: p.system_name,\n\n state: p.state.into(),\n\n pending_states: p.pending_states.into_iter().map(|x| x.into()).collect(),\n\n recovering_states: p.recovering_states.into_iter().map(|x| x.into()).collect(),\n", "file_path": "src/data.rs", "rank": 6, "score": 22011.469228624588 }, { "content": " pub fn faction_state(&self) -> (State, Option<String>) {\n\n let mut p_state = State::None;\n\n for system in self.systems() {\n\n let state = self.last_state_in_system(&system);\n\n if state.is_single_system_state() {\n\n info!(\"XXX {} {} {:?}\", self.name, system, state);\n\n return (state, Some(system));\n\n }\n\n p_state = state;\n\n }\n\n (p_state, None)\n\n }\n\n\n\n pub fn systems(&self) -> Vec<String> {\n\n self.dynamic.presence.iter().map(|x| x.system_name.clone()).collect()\n\n }\n\n\n\n /// last known state of a faction in a particular system the faction has a presence in\n\n pub fn last_state_in_system(&self, system_name: &str) -> State {\n\n let mut state = State::None;\n", "file_path": "src/data.rs", "rank": 7, "score": 22011.389303252017 }, { "content": " // function of controlling minor faction\n\n pub allegiance: Allegiance,\n\n // function of controlling minor faction\n\n pub government: Government,\n\n // function of controlling minor faction\n\n factions: Vec<String>,\n\n pub history: Vec<SystemHistory>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct SystemHistory {\n\n pub updated_at: DateTime<Utc>,\n\n // pub updated_by: String, // typically EDDN, we don't care at this point\n\n pub controlling_minor_faction: String,\n\n pub state: State,\n\n // function of controlling minor faction\n\n pub government: Government,\n\n // function of controlling minor faction\n\n pub allegiance: Allegiance,\n\n // function of controlling minor faction\n", "file_path": "src/data.rs", "rank": 8, "score": 22011.19394158209 }, { "content": " pub security: Security,\n\n // actually dynamic, but by all practical means static now\n\n pub population: i64,\n\n // actually dynamic, but by all practical means static now\n\n pub dynamic: SystemDynamic,\n\n}\n\n\n\nimpl System {\n\n pub fn latest_day(&self) -> Option<Date<Utc>> {\n\n use std::iter;\n\n iter::once(self.dynamic.updated_at).chain(\n\n self.dynamic.history.iter().map(|x| x.updated_at)).map(|x| x.date()).max()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct SystemDynamic {\n\n pub updated_at: DateTime<Utc>,\n\n pub controlling_minor_faction: String,\n\n pub state: State,\n", "file_path": "src/data.rs", "rank": 9, "score": 22011.181664357413 }, { "content": "}\n\n\n\nimpl From<ebgsv4::EBGSState> for StateTrend {\n\n fn from(s: ebgsv4::EBGSState) -> StateTrend {\n\n StateTrend {\n\n state: s.state.into(),\n\n trend: Trend::from_i64(s.trend),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct FactionHistory {\n\n pub updated_at: DateTime<Utc>,\n\n pub updated_by: String,\n\n pub ebgs_id: String,\n\n pub presence: FactionPresence,\n\n}\n\n\n\n\n", "file_path": "src/data.rs", "rank": 10, "score": 22010.473649587948 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\npub struct FactionDynamic {\n\n pub eddbv3_updated_at: DateTime<Utc>,\n\n pub ebgsv4_updated_at: DateTime<Utc>,\n\n pub state: State,\n\n pub presence: Vec<FactionPresence>,\n\n pub history: Vec<FactionHistory>,\n\n}\n\n\n\nimpl Faction {\n\n pub fn from(eddb: eddbv3::Faction, ebgs: ebgsv4::Faction) -> Faction {\n\n // BUSY\n\n Faction {\n\n ebgs_eddbv3_id: eddb._id,\n\n name: eddb.name,\n\n government: eddb.government,\n\n allegiance: eddb.allegiance,\n\n home_system_id: eddb.home_system_id,\n\n is_player_faction: eddb.is_player_faction,\n\n dynamic: FactionDynamic {\n", "file_path": "src/data.rs", "rank": 11, "score": 22009.851406733393 }, { "content": "impl From<ebgsv4::FactionHistory> for FactionHistory {\n\n fn from(h: ebgsv4::FactionHistory) -> FactionHistory {\n\n FactionHistory {\n\n updated_at: h.updated_at.clone(),\n\n updated_by: h.updated_by.clone(),\n\n ebgs_id: h._id.clone(),\n\n presence: h.into(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct System {\n\n pub eddb_id: i64,\n\n pub ebgs_id: String,\n\n pub name: String,\n\n pub primary_economy: Economy,\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n", "file_path": "src/data.rs", "rank": 12, "score": 22009.56573146364 }, { "content": " pub fn is_single_system_state(&self) -> bool {\n\n match *self {\n\n State::Boom => false,\n\n State::Bust => false,\n\n State::CivilUnrest => false,\n\n State::Outbreak => false,\n\n State::Famine => false,\n\n State::Lockdown => false,\n\n State::None => false,\n\n State::Retreat => false,\n\n _ => true,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone, Copy)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Security {\n\n Medium,\n\n Low,\n", "file_path": "src/data.rs", "rank": 13, "score": 22008.343492053446 }, { "content": " pub trend: Trend,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Trend {\n\n Up,\n\n Level,\n\n Down,\n\n}\n\n\n\nimpl Trend {\n\n fn from_i64(i: i64) -> Trend {\n\n match i {\n\n -1 => Trend::Down,\n\n 1 => Trend::Up,\n\n 0 => Trend::Level,\n\n _ => unreachable!(),\n\n }\n\n }\n", "file_path": "src/data.rs", "rank": 14, "score": 22008.098804731813 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse std::collections::BTreeSet;\n\n\n\nuse chrono::{DateTime, Utc, Date};\n\n\n\nuse extdata::ebgsv4;\n\nuse extdata::eddbv3;\n\n\n\n/// `Allegiance` of a `Faction`\n\n#[derive(Debug, Deserialize, Serialize, Clone, Copy)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Allegiance {\n\n Independent,\n\n Federation,\n\n Alliance,\n\n Empire,\n\n}\n\n\n\n// for factions\n", "file_path": "src/data.rs", "rank": 15, "score": 22007.84702540466 }, { "content": " if let Some(state) = self.last_pending_single_system_state_in_system(&system) {\n\n info!(\"XXX {} {} {:?}\", self.name, system, state);\n\n if state.is_single_system_state() {\n\n return (Some(state), Some(system));\n\n }\n\n }\n\n }\n\n (None, None)\n\n }\n\n\n\n // TODO: this doesn't work correctly if some data is dated\n\n pub fn faction_recovering_single_system_state(&self) -> (Option<State>, Option<String>) {\n\n for system in self.systems() {\n\n if let Some(state) = self.last_recovering_single_system_state_in_system(&system) {\n\n info!(\"YYY {} {} {:?}\", self.name, system, state);\n\n if state.is_single_system_state() {\n\n return (Some(state), Some(system));\n\n }\n\n }\n\n }\n", "file_path": "src/data.rs", "rank": 16, "score": 22007.822071209514 }, { "content": " let mut date = None;\n\n for h in &self.dynamic.history {\n\n if &h.presence.system_name == system_name {\n\n if let Some(d) = date {\n\n if h.updated_at > d {\n\n state = h.presence.recovering_states.iter().filter(|x| x.state.is_single_system_state()).next().map(|x| x.state);\n\n date = Some(h.updated_at);\n\n }\n\n } else {\n\n state = h.presence.recovering_states.iter().filter(|x| x.state.is_single_system_state()).next().map(|x| x.state);\n\n date = Some(h.updated_at);\n\n }\n\n }\n\n }\n\n state\n\n }\n\n\n\n // TODO: this doesn't work correctly if some data is dated\n\n pub fn faction_pending_single_system_state(&self) -> (Option<State>, Option<String>) {\n\n for system in self.systems() {\n", "file_path": "src/data.rs", "rank": 17, "score": 22007.1400095338 }, { "content": " influence: p.influence,\n\n }\n\n }\n\n}\n\n\n\nimpl From<ebgsv4::FactionHistory> for FactionPresence {\n\n fn from(h: ebgsv4::FactionHistory) -> FactionPresence {\n\n FactionPresence {\n\n system_name: h.system,\n\n state: h.state.into(),\n\n pending_states: h.pending_states.into_iter().map(|x| x.into()).collect(),\n\n recovering_states: h.recovering_states.into_iter().map(|x| x.into()).collect(),\n\n influence: h.influence,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct StateTrend {\n\n pub state: State,\n", "file_path": "src/data.rs", "rank": 18, "score": 22006.670476449654 }, { "content": " pub factions: Vec<String>,\n\n}\n\n\n\nimpl From<ebgsv4::System> for System {\n\n fn from(s: ebgsv4::System) -> System {\n\n System {\n\n eddb_id: s.eddb_id,\n\n ebgs_id: s._id,\n\n name: s.name,\n\n population: s.population,\n\n x: s.x,\n\n y: s.y,\n\n z: s.z,\n\n security: s.security.into(),\n\n primary_economy: s.primary_economy.into(),\n\n dynamic: SystemDynamic {\n\n updated_at: s.updated_at,\n\n controlling_minor_faction: s.controlling_minor_faction,\n\n state: s.state.into(),\n\n allegiance: s.allegiance,\n", "file_path": "src/data.rs", "rank": 19, "score": 22006.626440694024 }, { "content": " ebgsv4::Economy::Tourism => Economy::Tourism,\n\n ebgsv4::Economy::HighTech => Economy::HighTech,\n\n ebgsv4::Economy::Terraforming => Economy::Terraforming,\n\n ebgsv4::Economy::Refinery => Economy::Refinery,\n\n ebgsv4::Economy::Military => Economy::Military,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Faction {\n\n pub ebgs_eddbv3_id: String,\n\n pub name: String,\n\n pub government: Government,\n\n pub allegiance: Allegiance,\n\n pub home_system_id: Option<i64>,\n\n pub is_player_faction: bool,\n\n pub dynamic: FactionDynamic,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 20, "score": 22006.358073560237 }, { "content": " eddbv3_updated_at: eddb.updated_at,\n\n ebgsv4_updated_at: ebgs.updated_at,\n\n state: eddb.state.into(),\n\n presence: ebgs.faction_presence.into_iter().map(|x| x.into()).collect(),\n\n history: ebgs.history.into_iter().map(|h| h.into()).collect(),\n\n },\n\n }\n\n }\n\n\n\n pub fn latest_day(&self, system_name: &str) -> Date<Utc> {\n\n let mut dates = BTreeSet::new();\n\n for h in &self.dynamic.history {\n\n if &h.presence.system_name == system_name {\n\n dates.insert(h.updated_at.date());\n\n }\n\n }\n\n dates.iter().max().unwrap().clone()\n\n }\n\n\n\n // TODO: this doesn't work correctly if some data is dated\n", "file_path": "src/data.rs", "rank": 21, "score": 22005.490022417445 }, { "content": " let mut state = None;\n\n let mut date = None;\n\n for h in &self.dynamic.history {\n\n if &h.presence.system_name == system_name {\n\n if let Some(d) = date {\n\n if h.updated_at > d {\n\n state = h.presence.pending_states.iter().filter(|x| x.state.is_single_system_state()).next().map(|x| x.state);\n\n date = Some(h.updated_at);\n\n }\n\n } else {\n\n state = h.presence.pending_states.iter().filter(|x| x.state.is_single_system_state()).next().map(|x| x.state);\n\n date = Some(h.updated_at);\n\n }\n\n }\n\n }\n\n state\n\n }\n\n\n\n pub fn last_recovering_single_system_state_in_system(&self, system_name: &str) -> Option<State> {\n\n let mut state = None;\n", "file_path": "src/data.rs", "rank": 22, "score": 22005.478958372296 }, { "content": " let mut date = None;\n\n // walk through the faction history, look at entries for the system we're interested in\n\n // take the newest entry state\n\n for h in &self.dynamic.history {\n\n if &h.presence.system_name == system_name {\n\n if let Some(d) = date {\n\n if h.updated_at > d {\n\n state = h.presence.state;\n\n date = Some(h.updated_at);\n\n }\n\n } else {\n\n date = Some(h.updated_at);\n\n state = h.presence.state;\n\n }\n\n }\n\n }\n\n state\n\n }\n\n\n\n pub fn last_pending_single_system_state_in_system(&self, system_name: &str) -> Option<State> {\n", "file_path": "src/data.rs", "rank": 23, "score": 22005.22988474184 }, { "content": " government: s.government.into(),\n\n factions: s.factions.iter().map(|x| x.name.clone()).collect(),\n\n history: s.history.into_iter().map(|x| x.into()).collect(),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl From<ebgsv4::SystemHistory> for SystemHistory {\n\n fn from(s: ebgsv4::SystemHistory) -> SystemHistory {\n\n SystemHistory {\n\n updated_at: s.updated_at,\n\n controlling_minor_faction: s.controlling_minor_faction,\n\n state: s.state.into(),\n\n government: s.government.into(),\n\n allegiance: s.allegiance,\n\n factions: s.factions.iter().map(|x| x.name.clone()).collect(),\n\n }\n\n }\n\n}", "file_path": "src/data.rs", "rank": 24, "score": 22004.89889464972 }, { "content": "#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Government {\n\n Anarchy,\n\n Corporate,\n\n Patronage,\n\n Communism,\n\n Confederacy,\n\n Cooperative,\n\n Democracy,\n\n Dictatorship,\n\n Feudal,\n\n Imperial,\n\n PrisonColony,\n\n Theocracy,\n\n Workshop,\n\n None,\n\n Engineer,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 25, "score": 22003.98939305948 }, { "content": " pub fn danger(&self) -> bool {\n\n match *self {\n\n State::Expansion => true,\n\n State::Investment => true,\n\n State::Retreat => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// if a pending state is a dangerous situation\n\n pub fn pending_danger(&self) -> bool {\n\n match *self {\n\n State::Expansion => true,\n\n State::Investment => true,\n\n State::Retreat => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// is this a state that only gets active in a single system\n", "file_path": "src/data.rs", "rank": 26, "score": 22003.71040401946 }, { "content": "}\n\n\n\n/// `State` of a `Faction`\n\n#[derive(Debug, Serialize, Clone, Copy, PartialEq, Eq, Hash)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum State {\n\n None,\n\n Expansion,\n\n War,\n\n CivilWar,\n\n Election,\n\n Boom,\n\n Bust,\n\n CivilUnrest,\n\n Famine,\n\n Outbreak,\n\n Lockdown,\n\n Investment,\n\n Retreat,\n\n}\n", "file_path": "src/data.rs", "rank": 27, "score": 22001.99642167668 }, { "content": "\n\nimpl State {\n\n /// maximum length in days\n\n pub fn max_length(&self) -> u8 {\n\n match *self {\n\n State::None => 0,\n\n State::Expansion => 6,\n\n State::War => 21,\n\n State::CivilWar => 21,\n\n State::Election => 4,\n\n State::Boom => 28,\n\n State::Bust => 28,\n\n State::CivilUnrest => 7,\n\n State::Famine => 28,\n\n State::Outbreak => 28,\n\n State::Lockdown => 14,\n\n State::Investment => 5,\n\n State::Retreat => 6,\n\n }\n\n }\n", "file_path": "src/data.rs", "rank": 28, "score": 22001.033113385638 }, { "content": " High,\n\n Anarchy,\n\n Anarchy2,\n\n Lawless,\n\n}\n\n\n\nimpl From<ebgsv4::Security> for Security {\n\n fn from(s: ebgsv4::Security) -> Security {\n\n match s {\n\n ebgsv4::Security::Anarchy => Security::Anarchy,\n\n ebgsv4::Security::Medium => Security::Medium,\n\n ebgsv4::Security::Low => Security::Low,\n\n ebgsv4::Security::High => Security::High,\n\n ebgsv4::Security::Anarchy2 => Security::Anarchy2,\n\n ebgsv4::Security::Lawless => Security::Lawless,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone, Copy)]\n", "file_path": "src/data.rs", "rank": 29, "score": 22000.135757919823 }, { "content": "\n\n /// days recovery\n\n pub fn recovery(&self) -> u8 {\n\n match *self {\n\n State::None => 0,\n\n State::Expansion => 2,\n\n State::War => 1,\n\n State::CivilWar => 1,\n\n State::Election => 2,\n\n State::Boom => 3,\n\n State::Bust => 3,\n\n State::CivilUnrest => 3,\n\n State::Famine => 7,\n\n State::Outbreak => 8,\n\n State::Lockdown => 1,\n\n State::Investment => 1,\n\n State::Retreat => 1,\n\n }\n\n }\n\n\n", "file_path": "src/data.rs", "rank": 30, "score": 21999.606481908137 }, { "content": " /// days pending\n\n pub fn pending(&self) -> u8 {\n\n match *self {\n\n State::None => 0,\n\n State::Expansion => 5,\n\n State::War => 3,\n\n State::CivilWar => 3,\n\n State::Election => 3,\n\n State::Boom => 1,\n\n State::Bust => 2,\n\n State::CivilUnrest => 1,\n\n State::Famine => 2,\n\n State::Outbreak => 1,\n\n State::Lockdown => 1,\n\n State::Investment => 0,\n\n State::Retreat => 1,\n\n }\n\n }\n\n\n\n /// if a state is a danger state\n", "file_path": "src/data.rs", "rank": 31, "score": 21999.441509333734 }, { "content": "impl From<ebgsv4::Government> for Government {\n\n fn from(g: ebgsv4::Government) -> Government {\n\n match g {\n\n ebgsv4::Government::Anarchy => Government::Anarchy,\n\n ebgsv4::Government::Corporate => Government::Corporate,\n\n ebgsv4::Government::Patronage => Government::Patronage,\n\n ebgsv4::Government::Communism => Government::Communism,\n\n ebgsv4::Government::Confederacy => Government::Confederacy,\n\n ebgsv4::Government::Cooperative => Government::Cooperative,\n\n ebgsv4::Government::Democracy => Government::Democracy,\n\n ebgsv4::Government::Dictatorship => Government::Dictatorship,\n\n ebgsv4::Government::Feudal => Government::Feudal,\n\n ebgsv4::Government::Imperial => Government::Imperial,\n\n ebgsv4::Government::PrisonColony => Government::PrisonColony,\n\n ebgsv4::Government::Theocracy => Government::Theocracy,\n\n ebgsv4::Government::Workshop => Government::Workshop,\n\n ebgsv4::Government::None => Government::None,\n\n ebgsv4::Government::Engineer => Government::Engineer,\n\n }\n\n }\n", "file_path": "src/data.rs", "rank": 32, "score": 21998.945181602856 }, { "content": "\n\nimpl From<ebgsv4::State> for State {\n\n fn from(s: ebgsv4::State) -> State {\n\n match s {\n\n ebgsv4::State::None => State::None,\n\n ebgsv4::State::Expansion => State::Expansion,\n\n ebgsv4::State::War => State::War,\n\n ebgsv4::State::CivilWar => State::CivilWar,\n\n ebgsv4::State::Election => State::Election,\n\n ebgsv4::State::Boom => State::Boom,\n\n ebgsv4::State::Bust => State::Bust,\n\n ebgsv4::State::CivilUnrest => State::CivilUnrest,\n\n ebgsv4::State::Famine => State::Famine,\n\n ebgsv4::State::Outbreak => State::Outbreak,\n\n ebgsv4::State::Lockdown => State::Lockdown,\n\n ebgsv4::State::Investment => State::Investment,\n\n ebgsv4::State::Retreat => State::Retreat,\n\n }\n\n }\n\n}\n", "file_path": "src/data.rs", "rank": 33, "score": 21998.63936452484 }, { "content": "#[serde(rename_all = \"lowercase\")]\n\npub enum Economy {\n\n Industrial,\n\n Extraction,\n\n Colony,\n\n Agriculture,\n\n Tourism,\n\n HighTech,\n\n Terraforming,\n\n Refinery,\n\n Military,\n\n}\n\n\n\nimpl From<ebgsv4::Economy> for Economy {\n\n fn from(e: ebgsv4::Economy) -> Economy {\n\n match e {\n\n ebgsv4::Economy::Agriculture => Economy::Agriculture,\n\n ebgsv4::Economy::Extraction => Economy::Extraction,\n\n ebgsv4::Economy::Colony => Economy::Colony,\n\n ebgsv4::Economy::Industrial => Economy::Industrial,\n", "file_path": "src/data.rs", "rank": 34, "score": 21995.873353285817 }, { "content": " pub government: Government,\n\n pub factions: Vec<SystemPresence>,\n\n pub history: Vec<SystemHistory>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct SystemPresence {\n\n pub name: String,\n\n pub name_lower: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct SystemHistory {\n\n pub controlling_minor_faction: String,\n\n pub security: Security,\n\n pub updated_at: DateTime<Utc>,\n\n pub state: State,\n\n pub government: Government,\n\n pub population: i64,\n\n pub updated_by: String,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 38, "score": 26.100022754107005 }, { "content": "\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Faction {\n\n pub eddb_id: i64,\n\n pub government: data::Government,\n\n pub name: String,\n\n pub _id: String,\n\n pub name_lower: String,\n\n //pub is_player_faction:bool,\n\n pub updated_at: DateTime<Utc>,\n\n pub faction_presence: Vec<FactionPresence>,\n\n pub allegiance: data::Allegiance,\n\n pub history: Vec<FactionHistory>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct FactionPresence {\n\n pub system_name: String,\n\n pub state: State,\n\n pub pending_states: Vec<EBGSState>,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 40, "score": 25.29890618404631 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse chrono::{DateTime, Utc};\n\n\n\nuse extdata::ebgsv4::{State, EBGSPage};\n\nuse data::Allegiance;\n\nuse data::Government;\n\n\n\npub const URL: &'static str = \"https://elitebgs.kodeblox.com/api/eddb/v3/\";\n\n\n\npub type FactionPage = EBGSPage<Faction>;\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n\npub struct Faction {\n\n pub _id: String,\n\n pub name_lower: String,\n\n pub name: String,\n\n pub updated_at: DateTime<Utc>,\n\n pub government_id: u8,\n\n pub government: Government,\n\n pub allegiance_id: u8,\n\n pub allegiance: Allegiance,\n\n pub state_id: u8,\n\n pub state: State,\n\n pub home_system_id: Option<i64>,\n\n pub is_player_faction: bool,\n\n}\n", "file_path": "src/extdata/eddbv3.rs", "rank": 44, "score": 22.634379690733137 }, { "content": " pub recovering_states: Vec<EBGSState>,\n\n pub influence: f64,\n\n pub system_name_lower: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct EBGSState {\n\n pub state: State,\n\n pub trend: i64,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct FactionHistory {\n\n pub system: String,\n\n pub state: State,\n\n pub updated_at: DateTime<Utc>,\n\n pub system_lower: String,\n\n pub updated_by: String,\n\n pub pending_states: Vec<EBGSState>,\n\n pub recovering_states: Vec<EBGSState>,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 45, "score": 22.132890402619235 }, { "content": " pub _id: String,\n\n pub influence: f64,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct System {\n\n pub eddb_id: i64,\n\n pub name_lower: String,\n\n pub allegiance: data::Allegiance,\n\n pub _id: String,\n\n pub population: i64,\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n\n pub updated_at: DateTime<Utc>,\n\n pub state: State,\n\n pub security: Security,\n\n pub controlling_minor_faction: String,\n\n pub primary_economy: Economy,\n\n pub name: String,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 47, "score": 19.96670654433633 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse serde::de::{self, Deserialize, Deserializer};\n\n\n\nuse data;\n\n\n\npub const URL:&'static str = \"https://elitebgs.kodeblox.com/api/ebgs/v4/\";\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct EBGSPage<T> {\n\n pub docs: Vec<T>,\n\n pub page: i64,\n\n pub pages: i64,\n\n pub total: i64,\n\n pub limit: i64,\n\n}\n\n\n\npub type FactionsPage = EBGSPage<Faction>;\n\npub type SystemsPage = EBGSPage<System>;\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 48, "score": 19.66125636466062 }, { "content": " for system in systems.values() {\n\n for faction in system.factions.values() {\n\n for e in &faction.evolution {\n\n dates.insert(e.date.date());\n\n }\n\n }\n\n }\n\n let dates_vec: Vec<Date<Utc>> = dates.iter().cloned().collect();\n\n\n\n for system in &mut systems.values_mut() {\n\n for faction in &mut system.factions.values_mut() {\n\n faction.cleanup_evolution(&dates_vec);\n\n faction.fill_in_state_days();\n\n faction.fill_in_evolution10(&dates_vec);\n\n faction.fill_in_state_other_system();\n\n faction.global = global_factions.get(&faction.name).map(|x| x.clone());\n\n // remove systemname from global if it is the local system\n\n /* TODO: don't special case...\n\n if let Some(ref mut gl) = faction.global {\n\n let mut go = false;\n", "file_path": "src/calculate.rs", "rank": 50, "score": 19.616447163205883 }, { "content": " }\n\n if system.controlling.to_lowercase() == faction_template.name.to_lowercase() {\n\n faction.controlling = true;\n\n }\n\n let inf = history.presence.influence * 100.0;\n\n let mut data: FactionData = history.into();\n\n if !at_home && inf < 2.5 {\n\n data.influence_danger = true;\n\n }\n\n if inf >= 75.0 {\n\n data.influence_danger = true;\n\n }\n\n faction.evolution.push(data);\n\n }\n\n }\n\n }\n\n let mut faction_colors: HashMap<String, String> = HashMap::new();\n\n faction_colors.insert(\"The Order of Mobius\".into(), \"black\".into());\n\n\n\n let mut dates = BTreeSet::new();\n", "file_path": "src/calculate.rs", "rank": 51, "score": 19.041904368944557 }, { "content": " if let Some(ref mut s2) = gl.state_system {\n\n if s2 == &system.name {\n\n go = true;\n\n }\n\n }\n\n if go {\n\n gl.state_system = None;\n\n }\n\n }\n\n */\n\n }\n\n let mut colors: BTreeSet<String> = vec![\"blue\", \"green\", \"cyan\", \"orange\",\n\n \"pink\", \"grey\", \"magenta\", \"yellow\", \"red\"].into_iter().map(|x| x.into()).collect();\n\n // first fill in using registered colors, but only if no duplicates\n\n for faction in &mut system.factions.values_mut() {\n\n if faction.name == \"The Order of Mobius\" {\n\n faction.color = \"black\".into();\n\n continue;\n\n }\n\n if let Some(color) = faction_colors.get(&faction.name) {\n", "file_path": "src/calculate.rs", "rank": 52, "score": 18.697154300601785 }, { "content": " system.factions_by_inf.sort_by(\n\n |a, b| b.latest_inf().cmp(&a.latest_inf())\n\n );\n\n }\n\n\n\n let n = format!(\"{}/report.json\", datadir);\n\n let f = File::create(&n).unwrap();\n\n\n\n // order by order in Config\n\n let mut s2: Vec<System> = vec![];\n\n for name in &wanted_system_names {\n\n let mut system = systems.remove(name).unwrap();\n\n system.warnings = system_warnings.remove(name).unwrap_or(vec![]);\n\n s2.push(system)\n\n }\n\n\n\n let dates: Vec<String> = dates_vec.iter().skip(1).map(|e| format!(\"{}\", e.format(\"%d/%m\"))).collect();\n\n let dates2 = dates.clone();\n\n let dates10 = dates.as_slice();\n\n let dates10 = dates10.windows(10).last().unwrap().to_vec();\n", "file_path": "src/calculate.rs", "rank": 53, "score": 18.204332154226577 }, { "content": "\n\nimpl Config {\n\n pub fn systems(&self) -> Vec<String> {\n\n let mut v = self.main_systems.clone();\n\n for s in &self.other_systems { v.push(s.clone()) }\n\n v\n\n }\n\n pub fn datadir(&self) -> String {\n\n format!(\"{}/data/{}\", env!(\"CARGO_MANIFEST_DIR\"), self.short_name)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 18.090800363729436 }, { "content": " //println!(\"Found existing color: {} for {}\", color, faction.name);\n\n if colors.contains(color) {\n\n //println!(\"Using existing\");\n\n faction.color = color.clone();\n\n colors.remove(color);\n\n } else {\n\n //println!(\"Skipping existing\");\n\n }\n\n }\n\n }\n\n let mut it = colors.into_iter();\n\n for faction in &mut system.factions.values_mut() {\n\n if faction.color.is_empty() {\n\n faction.color = it.next().unwrap().into();\n\n faction_colors.insert(faction.name.clone(), faction.color.clone());\n\n }\n\n }\n\n for faction in system.factions.values() {\n\n system.factions_by_inf.push(faction.clone())\n\n }\n", "file_path": "src/calculate.rs", "rank": 55, "score": 16.51767829920487 }, { "content": " pub allegiance: data::Allegiance,\n\n pub factions: Vec<SystemPresence>,\n\n}\n\n\n\n// `State` of a `Faction`\n\n#[derive(Debug, Serialize, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum State {\n\n None,\n\n Expansion,\n\n War,\n\n CivilWar,\n\n Election,\n\n Boom,\n\n Bust,\n\n CivilUnrest,\n\n Famine,\n\n Outbreak,\n\n Lockdown,\n\n Investment,\n\n Retreat,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 56, "score": 16.279964865341796 }, { "content": " }\n\n // info!(\"systems: {:?}\", systems);\n\n\n\n info!(\"dates: {:?}\", dates);\n\n let bgs_day = if !yesterday {\n\n dates.iter().max().unwrap().clone()\n\n } else {\n\n dates.iter().max().unwrap().pred()\n\n };\n\n info!(\"Current BGS day: {}\", bgs_day);\n\n for &(system, date) in &system_dates {\n\n if bgs_day != date {\n\n warn!(\"System is not up to date: {}: {} {}\", system, bgs_day, date);\n\n //warnings.push(format!(\"System is not up to date: {}\", system));\n\n }\n\n }\n\n\n\n let mut global_factions = HashMap::new();\n\n\n\n let minor_faction_names: BTreeSet<String> = {\n", "file_path": "src/calculate.rs", "rank": 60, "score": 15.60836607408843 }, { "content": " if !wanted_system_names.contains(&system_name) {\n\n continue\n\n }\n\n if data_faction.latest_day(&system_name) != bgs_day {\n\n warn!(\"Faction {} is not up to date in {}: {} {}\", minor_faction_name, system_name, bgs_day, data_faction.latest_day(&system_name));\n\n let v = system_warnings.entry(system_name.clone()).or_insert(vec![]);\n\n v.push(format!(\"Faction {} is not up to date in {}\", minor_faction_name, system_name));\n\n }\n\n }\n\n let faction_template: Faction = (&data_faction).into();\n\n for history in data_faction.dynamic.history {\n\n let mut at_home = false;\n\n // could be that the system is not in our system list...\n\n if let Some(system) = systems.get_mut(&history.presence.system_name) {\n\n let faction = system.factions.entry(faction_template.name.clone()).or_insert(faction_template.clone());\n\n if let Some(home_id) = data_faction.home_system_id {\n\n if system.eddb_id == home_id {\n\n faction.at_home = true;\n\n at_home = true;\n\n }\n", "file_path": "src/calculate.rs", "rank": 61, "score": 15.546270537603938 }, { "content": " #[serde(rename = \"$government_feudal;\")]\n\n Feudal,\n\n #[serde(rename = \"$government_imperial;\")]\n\n Imperial,\n\n #[serde(rename = \"$government_prison_colony;\")]\n\n PrisonColony,\n\n #[serde(rename = \"$government_theocracy;\")]\n\n Theocracy,\n\n #[serde(rename = \"$government_workshop;\")]\n\n Workshop,\n\n #[serde(rename = \"$government_none;\")]\n\n None,\n\n #[serde(rename = \"$government_engineer;\")]\n\n Engineer,\n\n}\n\n\n\n// custom deserializer needed for state to deal with civil unrest vs civilunrest\n\nimpl<'de> Deserialize<'de> for State {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 62, "score": 14.939128771077598 }, { "content": " for faction in &system_data.factions {\n\n minor_factions.insert(faction.name.clone());\n\n }\n\n // historical factions in system\n\n for history in &system_data.history {\n\n for faction in &history.factions {\n\n minor_factions.insert(faction.name.clone());\n\n }\n\n }\n\n\n\n let system_data:data::System = system_data.into();\n\n let n = format!(\"{}/systems/{}.json\", datadir, system_name);\n\n let mut f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &system_data).unwrap();\n\n }\n\n // we assume the latest day is bgs day\n\n let day = all_dates.iter().max().unwrap();\n\n info!(\"Current BGS day: {}\", day);\n\n // report systems that are older\n\n for &(system,date) in &system_dates {\n", "file_path": "src/fetch.rs", "rank": 63, "score": 14.775959376014246 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, Clone, Copy)]\n\npub enum Security {\n\n #[serde(rename = \"$system_security_medium;\")]\n\n Medium,\n\n #[serde(rename = \"$system_security_low;\")]\n\n Low,\n\n #[serde(rename = \"$system_security_high;\")]\n\n High,\n\n #[serde(rename = \"$system_security_anarchy;\")]\n\n Anarchy,\n\n #[serde(rename = \"$galaxy_map_info_state_anarchy;\")]\n\n Anarchy2,\n\n #[serde(rename = \"$system_security_lawless;\")]\n\n Lawless,\n\n}\n\n\n\n#[derive(Debug,Deserialize, Serialize, Clone, Copy)]\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 64, "score": 14.76508238391091 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nextern crate chrono;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate reqwest;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde_json;\n\n#[macro_use]\n\nextern crate tera;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n\n pub short_name: String,\n\n pub report_name: String,\n\n pub main_systems: Vec<String>,\n\n pub other_systems: Vec<String>,\n\n}\n", "file_path": "src/lib.rs", "rank": 66, "score": 14.19097521330027 }, { "content": " let mut system_dates = Vec::new();\n\n for system_name in &system_names {\n\n // fetch data\n\n let url = format!(\"{}systems?name={}&timemin={}&timemax={}\", ebgsv4::URL, system_name, then, now);\n\n //info!(\"url: {}\", url);\n\n let ebgs_system_data = client.get(&url).send().unwrap().text().unwrap();\n\n //info!(\"data: {}\", ebgs_system_data);\n\n let mut systems_page:ebgsv4::SystemsPage = serde_json::from_str(&ebgs_system_data).unwrap();\n\n let system_data = systems_page.docs.remove(0);\n\n let update_date = system_data.updated_at.date();\n\n\n\n // store to file\n\n create_dir_all(format!(\"{}/systems/ebgsv4\", datadir)).unwrap();\n\n let n = format!(\"{}/systems/ebgsv4/{}.json\", datadir, system_name);\n\n let mut f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &system_data).unwrap();\n\n // collect dates\n\n all_dates.insert(update_date);\n\n system_dates.push((system_name, update_date));\n\n // present factions in system\n", "file_path": "src/fetch.rs", "rank": 67, "score": 13.496132661109792 }, { "content": " let n = format!(\"{}/minor_factions.json\", datadir);\n\n let f = File::open(&n).unwrap();\n\n serde_json::from_reader(&f).unwrap()\n\n };\n\n\n\n for minor_faction_name in minor_faction_names {\n\n info!(\"Looking at {}...\", minor_faction_name);\n\n let data_faction: data::Faction = {\n\n let n = format!(\"{}/factions/{}.json\", datadir, minor_faction_name);\n\n let f = File::open(&n).unwrap();\n\n serde_json::from_reader(&f).unwrap()\n\n };\n\n if data_faction.government == Government::Engineer {\n\n continue;\n\n }\n\n\n\n let fgs: FactionGlobalState = (&data_faction).into();\n\n global_factions.insert(minor_faction_name.clone(), fgs);\n\n\n\n for system_name in data_faction.systems() {\n", "file_path": "src/calculate.rs", "rank": 69, "score": 12.88362798076286 }, { "content": " D: Deserializer<'de>,\n\n {\n\n let s = String::deserialize(deserializer)?.to_lowercase();\n\n let state = match s.as_str() {\n\n \"expansion\" => State::Expansion,\n\n \"war\" => State::War,\n\n \"civil unrest\" | \"civilunrest\" => State::CivilUnrest,\n\n \"civil war\" | \"civilwar\" => State::CivilWar,\n\n \"election\" => State::Election,\n\n \"boom\" => State::Boom,\n\n \"bust\" => State::Bust,\n\n \"famine\" => State::Famine,\n\n \"lockdown\" => State::Lockdown,\n\n \"investment\" => State::Investment,\n\n \"retreat\" => State::Retreat,\n\n \"outbreak\" => State::Outbreak,\n\n \"none\" => State::None,\n\n other => { return Err(de::Error::custom(format!(\"Invalid state '{}'\", other))); },\n\n };\n\n Ok(state)\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 70, "score": 12.170292548291451 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse chrono::{Date, Utc};\n\nuse serde_json;\n\n\n\nuse std::collections::{BTreeSet, HashMap};\n\nuse std::fs::File;\n\n\n\nuse data;\n\nuse webdata::*;\n\nuse data::Government;\n\nuse Config;\n\n\n", "file_path": "src/calculate.rs", "rank": 72, "score": 11.417151990865488 }, { "content": " let systems = Systems {\n\n report_name: config.report_name.clone(),\n\n systems: s2,\n\n dates: dates2,\n\n dates10: dates10,\n\n bgs_day: format!(\"{}\", bgs_day.format(\"%d/%m\")),\n\n factions: global_factions,\n\n };\n\n serde_json::to_writer_pretty(&f, &systems).unwrap();\n\n}\n", "file_path": "src/calculate.rs", "rank": 73, "score": 11.180554891608677 }, { "content": "}\n\n\n\n#[derive(Debug,Deserialize, Serialize, Clone, Copy)]\n\npub enum Government {\n\n #[serde(rename = \"$government_corporate;\")]\n\n Corporate,\n\n #[serde(rename = \"$government_cooperative;\")]\n\n Cooperative,\n\n #[serde(rename = \"$government_patronage;\")]\n\n Patronage,\n\n #[serde(rename = \"$government_democracy;\")]\n\n Democracy,\n\n #[serde(rename = \"$government_dictatorship;\")]\n\n Dictatorship,\n\n #[serde(rename = \"$government_anarchy;\")]\n\n Anarchy,\n\n #[serde(rename = \"$government_communism;\")]\n\n Communism,\n\n #[serde(rename = \"$government_confederacy;\")]\n\n Confederacy,\n", "file_path": "src/extdata/ebgsv4.rs", "rank": 74, "score": 10.547940999001387 }, { "content": " if *day != date {\n\n warn!(\"System is not up to date: {}\", system);\n\n }\n\n }\n\n\n\n // write list of minor factions to file\n\n info!(\"Minor factions involved: {:?}\", minor_factions); \n\n let n = format!(\"{}/minor_factions.json\", datadir);\n\n let f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &minor_factions).unwrap();\n\n}\n", "file_path": "src/fetch.rs", "rank": 75, "score": 9.148067982750709 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse chrono::Utc;\n\nuse reqwest;\n\nuse serde_json;\n\n\n\nuse std::collections::BTreeSet;\n\nuse std::fs::{create_dir_all, File};\n\n\n\nuse extdata::ebgsv4;\n\nuse data;\n\nuse Config;\n\n\n", "file_path": "src/fetch.rs", "rank": 76, "score": 7.30112632993503 }, { "content": " create_dir_all(format!(\"{}/factions/eddbv3\", datadir)).unwrap();\n\n let n = format!(\"{}/factions/eddbv3/{}.json\", datadir, faction_name);\n\n let mut f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &eddb_faction).unwrap();\n\n\n\n\n\n // then fetch ebgs data\n\n let url = format!(\"{}factions?name={}&timemin={}&timemax={}\", ebgsv4::URL, faction_name, then, now);\n\n let res = client.get(&url).send().unwrap().text().unwrap();\n\n let mut faction_page:ebgsv4::FactionsPage = serde_json::from_str(&res).unwrap();\n\n let ebgs_faction = faction_page.docs.remove(0);\n\n create_dir_all(format!(\"{}/factions/ebgsv4\", datadir)).unwrap();\n\n let n = format!(\"{}/factions/ebgsv4/{}.json\", datadir, faction_name);\n\n let mut f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &ebgs_faction).unwrap();\n\n\n\n // combine to our data\n\n let faction:data::Faction = data::Faction::from(eddb_faction, ebgs_faction);\n\n let n = format!(\"{}/factions/{}.json\", datadir, faction_name);\n\n let mut f = File::create(&n).unwrap();\n\n serde_json::to_writer_pretty(&f, &faction).unwrap();\n\n }\n\n}\n", "file_path": "src/fetch_fact.rs", "rank": 77, "score": 7.089633734773406 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse reqwest;\n\n\n\nuse serde_json;\n\n\n\nuse chrono::Utc;\n\nuse std::time::Duration;\n\n\n\nuse std::collections::BTreeSet;\n\nuse std::fs::{create_dir_all, File};\n\n\n\nuse extdata::eddbv3;\n\nuse extdata::ebgsv4;\n\nuse data;\n\nuse Config;\n\n\n", "file_path": "src/fetch_fact.rs", "rank": 78, "score": 7.029701648738147 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\nuse serde_json;\n\nuse std::fs::{create_dir_all, File, read_dir, copy};\n\nuse std::path::PathBuf;\n\nuse std::io::Write;\n\nuse webdata::*;\n\nuse Config;\n\n\n", "file_path": "src/webpage.rs", "rank": 79, "score": 3.510615375509019 }, { "content": "// (c) 2018 Joost Yervante Damad <[email protected]>\n\n\n\npub mod eddbv3;\n\npub mod ebgsv4;\n", "file_path": "src/extdata/mod.rs", "rank": 80, "score": 2.649989359384133 }, { "content": "pub enum Economy {\n\n #[serde(rename = \"$economy_industrial;\")]\n\n Industrial,\n\n #[serde(rename = \"$economy_extraction;\")]\n\n Extraction,\n\n #[serde(rename = \"$economy_colony;\")]\n\n Colony,\n\n #[serde(rename = \"$economy_agri;\")]\n\n Agriculture,\n\n #[serde(rename = \"$economy_tourism;\")]\n\n Tourism,\n\n #[serde(rename = \"$economy_hightech;\")]\n\n HighTech,\n\n #[serde(rename = \"$economy_terraforming;\")]\n\n Terraforming,\n\n #[serde(rename = \"$economy_refinery;\")]\n\n Refinery,\n\n #[serde(rename = \"$economy_military;\")]\n\n Military,\n\n}", "file_path": "src/extdata/ebgsv4.rs", "rank": 81, "score": 1.2940187546844895 } ]
Rust
crossterm_input/src/input/input.rs
defiori/crossterm
3d92f62be470973772e2d3d83fd672c2613f0f87
use super::*; use std::{thread, time::Duration}; use crossterm_utils::TerminalOutput; pub struct TerminalInput<'stdout> { terminal_input: Box<ITerminalInput + Sync + Send>, stdout: Option<&'stdout Arc<TerminalOutput>>, } impl<'stdout> TerminalInput<'stdout> { pub fn new() -> TerminalInput<'stdout> { #[cfg(target_os = "windows")] let input = Box::from(WindowsInput::new()); #[cfg(not(target_os = "windows"))] let input = Box::from(UnixInput::new()); TerminalInput { terminal_input: input, stdout: None, } } pub fn from_output(stdout: &'stdout Arc<TerminalOutput>) -> TerminalInput<'stdout> { #[cfg(target_os = "windows")] let input = Box::from(WindowsInput::new()); #[cfg(not(target_os = "windows"))] let input = Box::from(UnixInput::new()); TerminalInput { terminal_input: input, stdout: Some(stdout), } } pub fn read_line(&self) -> io::Result<String> { if let Some(stdout) = self.stdout { if stdout.is_in_raw_mode { return Err(Error::new(ErrorKind::Other, "Crossterm does not support readline in raw mode this should be done instead whit `read_async` or `read_async_until`")); } } let mut rv = String::new(); io::stdin().read_line(&mut rv)?; let len = rv.trim_right_matches(&['\r', '\n'][..]).len(); rv.truncate(len); Ok(rv) } pub fn read_char(&self) -> io::Result<char> { self.terminal_input.read_char(&self.stdout) } pub fn read_async(&self) -> AsyncReader { self.terminal_input.read_async(&self.stdout) } pub fn read_until_async(&self, delimiter: u8) -> AsyncReader { self.terminal_input .read_until_async(delimiter, &self.stdout) } pub fn wait_until(&self, key_event: KeyEvent) { let mut stdin = self.read_async().bytes(); loop { let pressed_key: Option<Result<u8, Error>> = stdin.next(); match pressed_key { Some(Ok(value)) => match key_event { KeyEvent::OnKeyPress(ascii_code) => { if value == ascii_code { break; } } KeyEvent::OnEnter => { if value == b'\r' { break; } } KeyEvent::OnAnyKeyPress => { break; } }, _ => {} } thread::sleep(Duration::from_millis(10)); } } } pub fn input<'stdout>() -> TerminalInput<'stdout> { TerminalInput::new() }
use super::*; use std::{thread, time::Duration}; use crossterm_utils::TerminalOutput; pub struct TerminalInput<'stdout> { terminal_input: Box<ITerminalInput + Sync + Send>, stdout: Option<&'stdout Arc<TerminalOutput>>, } impl<'stdout> TerminalInput<'stdout> { pub fn new() -> TerminalInput<'stdout> { #[cfg(target_os = "windows")] let input = Box::from(WindowsInput::new()); #[cfg(not(target_os = "windows"))] let input = Box::from(UnixInput::new()); TerminalInput { terminal_input: input, stdout: None, } } pub fn from_output(stdout: &'stdout Arc<TerminalOutput>) -> TerminalInput<'stdout> { #[cfg(target_os = "windows")] let input = Box::from(WindowsInput::new()); #[cfg(not(target_os = "windows"))] let input = Box::from(UnixInput::new()); TerminalInput { terminal_input: input, stdout: Some(stdout), } } pub fn read_line(&self) -> io::Result<String> { if let Some(stdout) = self.stdout { if stdout.is_in_raw_mode { return Err(Error::new(ErrorKind::Other, "Crossterm does not support readline in raw mode this should be done instead whit `read_async` or `read_async_until`")); } } let mut rv = String::new();
pub fn read_until_async(&self, delimiter: u8) -> AsyncReader { self.terminal_input .read_until_async(delimiter, &self.stdout) } pub fn wait_until(&self, key_event: KeyEvent) { let mut stdin = self.read_async().bytes(); loop { let pressed_key: Option<Result<u8, Error>> = stdin.next(); match pressed_key { Some(Ok(value)) => match key_event { KeyEvent::OnKeyPress(ascii_code) => { if value == ascii_code { break; } } KeyEvent::OnEnter => { if value == b'\r' { break; } } KeyEvent::OnAnyKeyPress => { break; } }, _ => {} } thread::sleep(Duration::from_millis(10)); } } } pub fn input<'stdout>() -> TerminalInput<'stdout> { TerminalInput::new() }
io::stdin().read_line(&mut rv)?; let len = rv.trim_right_matches(&['\r', '\n'][..]).len(); rv.truncate(len); Ok(rv) } pub fn read_char(&self) -> io::Result<char> { self.terminal_input.read_char(&self.stdout) } pub fn read_async(&self) -> AsyncReader { self.terminal_input.read_async(&self.stdout) }
random
[ { "content": "pub fn raw_modes() {\n\n // create a Screen instance who operates on the default output; io::stdout().\n\n let screen = Screen::default();\n\n\n\n // create a Screen instance who operates on the default output; io::stdout(). By passing in 'true' we make this screen 'raw'\n\n let screen = Screen::new(true);\n\n\n\n drop(screen); // <-- by dropping the screen raw modes will be disabled.\n\n}\n", "file_path": "crossterm_screen/examples/raw_mode.rs", "rank": 1, "score": 235689.42020383675 }, { "content": "// This trait provides an interface for switching to raw mode and back.\n\npub trait IRawScreenCommand: Sync + Send {\n\n fn enable(&mut self) -> io::Result<()>;\n\n fn disable(&self) -> io::Result<()>;\n\n}\n", "file_path": "crossterm_screen/src/sys/mod.rs", "rank": 2, "score": 235187.583929438 }, { "content": "// This trait provides an interface for switching to raw mode and back.\n\npub trait IRawScreenCommand: Sync + Send {\n\n fn enable(&mut self) -> io::Result<()>;\n\n fn disable(&self) -> io::Result<()>;\n\n}\n", "file_path": "crossterm_utils/src/commands/mod.rs", "rank": 3, "score": 235187.583929438 }, { "content": "pub fn print_wait_screen_on_alternate_window() {\n\n let screen = Screen::default();\n\n\n\n // by passing in 'true' the alternate screen will be in raw modes.\n\n if let Ok(ref mut alternate) = screen.enable_alternate_modes(true) {\n\n print_wait_screen(&mut alternate.screen);\n\n } // <- drop alternate screen; this will cause the alternate screen to drop.\n\n\n\n drop(screen); // <- drop screen; this will cause raw mode to be turned off.\n\n\n\n println!(\"Whe are back at the main screen\");\n\n}\n\n\n", "file_path": "examples/raw_mode.rs", "rank": 4, "score": 222243.47781186184 }, { "content": "/// Transform the given mode into an raw mode (non-canonical) mode.\n\npub fn make_raw(termios: &mut Termios) {\n\n extern \"C\" {\n\n pub fn cfmakeraw(termptr: *mut Termios);\n\n }\n\n unsafe { cfmakeraw(termios) }\n\n}\n\n\n", "file_path": "crossterm_utils/src/sys/unix.rs", "rank": 5, "score": 219247.22707575758 }, { "content": "/// this will capture the input until the given key.\n\n/// TODO: make sure terminal is in raw mode before this function is called.\n\n/// for more information checkout `crossterm_screen` or use crossterm with the `screen` feature flag.\n\npub fn read_async_until() {\n\n // TODO: make sure terminal is in raw mode.\n\n // for more information checkout `crossterm_screen` or use crossterm with the `screen` feature flag.\n\n\n\n // init some modules we use for this demo\n\n let input = input();\n\n\n\n let mut stdin = input.read_until_async(b'\\r').bytes();\n\n\n\n for _i in 0..100 {\n\n let a = stdin.next();\n\n\n\n println!(\"pressed key: {:?}\", a);\n\n\n\n if let Some(Ok(b'\\r')) = a {\n\n println!(\"The enter key is hit and program is not listening to input anymore.\");\n\n break;\n\n }\n\n\n\n if let Some(Ok(b'x')) = a {\n\n println!(\"The key: x was pressed and program is terminated.\");\n\n break;\n\n }\n\n\n\n thread::sleep(time::Duration::from_millis(100));\n\n }\n\n}\n\n\n", "file_path": "crossterm_input/examples/async_input.rs", "rank": 6, "score": 214634.0573652092 }, { "content": "/// this will read pressed characters async until `x` is typed.\n\n/// TODO: make sure terminal is in raw mode before this function is called.\n\n/// for more information checkout `crossterm_screen` or use crossterm with the `screen` feature flag.\n\npub fn read_async() {\n\n let input = input();\n\n\n\n let mut stdin = input.read_async().bytes();\n\n\n\n for _i in 0..100 {\n\n let a = stdin.next();\n\n\n\n println!(\"pressed key: {:?}\", a);\n\n\n\n if let Some(Ok(b'x')) = a {\n\n println!(\"The key: `x` was pressed and program is terminated.\");\n\n break;\n\n }\n\n\n\n thread::sleep(time::Duration::from_millis(50));\n\n }\n\n}\n\n\n", "file_path": "crossterm_input/examples/async_input.rs", "rank": 7, "score": 214630.0599759007 }, { "content": "pub fn into_raw_mode() -> io::Result<RawFd> {\n\n let tty_f;\n\n\n\n let fd = unsafe {\n\n if libc::isatty(libc::STDIN_FILENO) == 1 {\n\n libc::STDIN_FILENO\n\n } else {\n\n tty_f = fs::File::open(\"/dev/tty\")?;\n\n tty_f.as_raw_fd()\n\n }\n\n };\n\n\n\n let mut termios = Termios::from_fd(fd)?;\n\n let original = termios.clone();\n\n\n\n unsafe {\n\n if ORIGINAL_TERMINAL_MODE.is_none() {\n\n ORIGINAL_TERMINAL_MODE = Some(original.clone())\n\n }\n\n }\n\n\n\n make_raw(&mut termios);\n\n tcsetattr(fd, TCSADRAIN, &termios)?;\n\n\n\n Ok(fd)\n\n}\n\n\n", "file_path": "crossterm_utils/src/sys/unix.rs", "rank": 8, "score": 204838.18534213054 }, { "content": "/// Get a `TerminalColor` implementation whereon color related actions can be performed.\n\npub fn color<'stdout>() -> TerminalColor<'stdout> {\n\n TerminalColor::new()\n\n}\n", "file_path": "crossterm_style/src/color.rs", "rank": 9, "score": 203897.0982268382 }, { "content": "/// Get a `Terminal` instance whereon terminal related actions could performed.\n\npub fn terminal<'stdout>() -> Terminal<'stdout> {\n\n Terminal::new()\n\n}\n", "file_path": "crossterm_terminal/src/terminal/terminal.rs", "rank": 10, "score": 203897.09822683822 }, { "content": "pub fn disable_raw_mode() -> io::Result<()> {\n\n let tty_f;\n\n\n\n let fd = unsafe {\n\n if libc::isatty(libc::STDIN_FILENO) == 1 {\n\n libc::STDIN_FILENO\n\n } else {\n\n tty_f = fs::File::open(\"/dev/tty\")?;\n\n tty_f.as_raw_fd()\n\n }\n\n };\n\n\n\n if let Some(original) = unsafe { ORIGINAL_TERMINAL_MODE } {\n\n tcsetattr(fd, TCSADRAIN, &original)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "crossterm_utils/src/sys/unix.rs", "rank": 11, "score": 200607.7210383502 }, { "content": "// This trait provides an interface for switching to alternate screen and back.\n\npub trait IAlternateScreenCommand: Sync + Send {\n\n fn enable(&self, stdout: &mut TerminalOutput) -> io::Result<()>;\n\n fn disable(&self, stdout: &TerminalOutput) -> io::Result<()>;\n\n}\n\n\n", "file_path": "crossterm_utils/src/commands/mod.rs", "rank": 12, "score": 198968.77723865167 }, { "content": "// This trait provides an interface for switching to alternate screen and back.\n\npub trait IAlternateScreenCommand: Sync + Send {\n\n fn enable(&self, stdout: &mut TerminalOutput) -> io::Result<()>;\n\n fn disable(&self, stdout: &TerminalOutput) -> io::Result<()>;\n\n}\n\n\n", "file_path": "crossterm_screen/src/sys/mod.rs", "rank": 13, "score": 198968.77723865167 }, { "content": "pub fn read_line() {\n\n let input = input();\n\n\n\n match input.read_line() {\n\n Ok(s) => println!(\"string typed: {}\", s),\n\n Err(e) => println!(\"error: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "crossterm_input/examples/input.rs", "rank": 14, "score": 190020.9454132357 }, { "content": "pub fn read_char() {\n\n let input = input();\n\n\n\n match input.read_char() {\n\n Ok(s) => println!(\"char typed: {}\", s),\n\n Err(e) => println!(\"char error : {}\", e),\n\n }\n\n}\n\n\n", "file_path": "crossterm_input/examples/input.rs", "rank": 15, "score": 190020.9454132357 }, { "content": "pub fn pause_terminal() {\n\n println!(\"Press 'x' to quit...\");\n\n let terminal_input = TerminalInput::new();\n\n terminal_input.wait_until(KeyEvent::OnKeyPress(b'x'));\n\n}\n\n\n", "file_path": "crossterm_input/examples/input.rs", "rank": 16, "score": 190020.9454132357 }, { "content": "/// TODO: make sure terminal is in raw mode before this function is called.\n\n/// for more information checkout `crossterm_screen` or use crossterm with the `screen` feature flag.\n\npub fn read_async_demo() {\n\n // init some modules we use for this demo\n\n let input = input();\n\n\n\n // this will setup the async reading.\n\n let mut stdin = input.read_async().bytes();\n\n\n\n // clear terminal and reset the cursor.\n\n terminal.clear(ClearType::All);\n\n cursor.goto(1, 1);\n\n\n\n // loop until the enter key (\\r) is pressed.\n\n loop {\n\n terminal.clear(ClearType::All);\n\n cursor.goto(1, 1);\n\n\n\n // get the next pressed key\n\n let pressed_key = stdin.next();\n\n terminal.write(format!(\"{:?} <- Character pressed\", pressed_key));\n\n\n\n // check if pressed key is enter (\\r)\n\n if let Some(Ok(b'\\r')) = pressed_key {\n\n break;\n\n }\n\n\n\n // wait 200 ms and reset cursor write\n\n thread::sleep(Duration::from_millis(200));\n\n }\n\n}\n\n\n", "file_path": "crossterm_input/examples/async_input.rs", "rank": 17, "score": 184646.83453924023 }, { "content": "fn print_wait_screen(screen: &mut Screen) {\n\n let crossterm = Crossterm::from_screen(screen);\n\n let terminal = crossterm.terminal();\n\n let cursor = crossterm.cursor();\n\n\n\n terminal.clear(ClearType::All);\n\n\n\n cursor.hide();\n\n cursor.goto(0, 0);\n\n screen.write(b\"Welcome to the wait screen.\");\n\n cursor.goto(0, 1);\n\n screen.write(b\"Please wait a few seconds until we arrive back at the main screen.\");\n\n cursor.goto(0, 2);\n\n screen.write(b\"Progress:\");\n\n cursor.goto(0, 3);\n\n\n\n // print some progress example.\n\n for i in 1..5 {\n\n // print the current counter at the line of `Seconds to Go: {counter}`\n\n cursor.goto(10, 2);\n", "file_path": "examples/raw_mode.rs", "rank": 18, "score": 184150.48020946293 }, { "content": "/// use the `Crossterm` to get an instance to the cursor module | demonstration.\n\npub fn crossterm() {\n\n // Create the crossterm type to access different modules.\n\n let crossterm = Crossterm::new();\n\n\n\n // pass a reference to the current screen.\n\n let cursor = crossterm.cursor();\n\n let color = crossterm.color();\n\n let terminal = crossterm.terminal();\n\n let terminal = crossterm.input();\n\n let style = crossterm.style(\"Black font on green background\").with(Color::Black).on(Color::Green);\n\n\n\n // TODO: perform some actions with the instances above.\n\n}\n", "file_path": "examples/crossterm.rs", "rank": 19, "score": 183400.90005144454 }, { "content": "/// TODO: make sure terminal is in raw mode before this function is called.\n\n/// for more information checkout `crossterm_screen` or use crossterm with the `screen` feature flag.\n\npub fn async_reading_on_alternate_screen() {\n\n let screen = Screen::new(false);\n\n\n\n // switch to alternate screen\n\n if let Ok(alternate) = screen.enable_alternate_modes(true) {\n\n let crossterm = Crossterm::from_screen(&alternate.screen);\n\n // init some modules we use for this demo\n\n let input = crossterm.input();\n\n let terminal = crossterm.terminal();\n\n let mut cursor = crossterm.cursor();\n\n\n\n // this will setup the async reading.\n\n let mut stdin = input.read_async().bytes();\n\n\n\n // loop until the enter key (\\r) is pressed.\n\n loop {\n\n terminal.clear(ClearType::All);\n\n cursor.goto(1, 1);\n\n\n\n // get the next pressed key\n", "file_path": "crossterm_input/examples/async_input.rs", "rank": 20, "score": 182096.66085411783 }, { "content": "/// this will capture the input until the given key.\n\npub fn read_async_until() {\n\n // create raw screen\n\n let screen = Screen::new(true);\n\n\n\n let input = TerminalInput::from_output(&screen.stdout);\n\n\n\n let mut stdin = input.read_until_async(b'\\r').bytes();\n\n\n\n for _i in 0..100 {\n\n let a = stdin.next();\n\n\n\n println!(\"pressed key: {:?}\", a);\n\n\n\n if let Some(Ok(b'\\r')) = a {\n\n println!(\"The enter key is hit and program is not listening to input anymore.\");\n\n break;\n\n }\n\n\n\n if let Some(Ok(b'x')) = a {\n\n println!(\"The key: x was pressed and program is terminated.\");\n\n break;\n\n }\n\n\n\n thread::sleep(time::Duration::from_millis(100));\n\n }\n\n}\n\n\n", "file_path": "examples/async_input.rs", "rank": 21, "score": 180587.84220758593 }, { "content": "/// this will read pressed characters async until `x` is typed.\n\npub fn read_async() {\n\n // create raw screen\n\n let screen = Screen::new(true);\n\n\n\n let input = TerminalInput::from_output(&screen.stdout);\n\n\n\n let mut stdin = input.read_async().bytes();\n\n\n\n for _i in 0..100 {\n\n let a = stdin.next();\n\n\n\n println!(\"pressed key: {:?}\", a);\n\n\n\n if let Some(Ok(b'x')) = a {\n\n println!(\"The key: `x` was pressed and program is terminated.\");\n\n break;\n\n }\n\n\n\n thread::sleep(time::Duration::from_millis(50));\n\n }\n\n}\n\n\n", "file_path": "examples/async_input.rs", "rank": 22, "score": 180583.4258543984 }, { "content": "/// Print all supported RGB colors, not supported for Windows systems < 10 | demonstration.\n\npub fn print_supported_colors() {\n\n let count = color().get_available_color_count().unwrap();\n\n\n\n for i in 0..count {\n\n println!(\n\n \"{}\",\n\n style(format!(\"White : \\t {}\", i)).on(Color::AnsiValue(i as u8))\n\n );\n\n }\n\n}\n\n\n", "file_path": "crossterm_style/examples/style.rs", "rank": 23, "score": 178985.0162698694 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 24, "score": 178963.75363875827 }, { "content": "pub fn change_console_mode() {\n\n let console_mode = ConsoleMode::new().unwrap();\n\n\n\n // get the current console mode:\n\n let mode: u32 = console_mode.mode().unwrap();\n\n\n\n // set the console mode (not sure if this is an actual value xp)\n\n console_mode.set_mode(10);\n\n}\n\n\n", "file_path": "crossterm_winapi/examples/console.rs", "rank": 25, "score": 178824.78810986388 }, { "content": "/// print wait screen on alternate screen, then switch back.\n\npub fn print_wait_screen_on_alternate_window() {\n\n let screen = Screen::default();\n\n\n\n // move to alternate screen, 'false' means if the alternate screen should be in raw modes.\n\n if let Ok(alternate) = screen.enable_alternate_modes(false) {\n\n // do some stuff on the alternate screen.\n\n } // <- alternate screen will be disabled when dropped.\n\n}\n", "file_path": "crossterm_screen/examples/alternate_screen.rs", "rank": 26, "score": 170701.20217595465 }, { "content": "pub fn read_char() -> io::Result<char> {\n\n let mut buf = [0u8; 20];\n\n\n\n let fd = unix::into_raw_mode()?;\n\n\n\n // read input and convert it to char\n\n let rv = unsafe {\n\n let read = libc::read(fd, buf.as_mut_ptr() as *mut libc::c_void, 20);\n\n\n\n if read < 0 {\n\n Err(io::Error::last_os_error())\n\n } else if buf[0] == b'\\x03' {\n\n Err(io::Error::new(\n\n io::ErrorKind::Interrupted,\n\n \"read interrupted\",\n\n ))\n\n } else {\n\n let mut pressed_char = Ok(' ');\n\n\n\n if let Ok(s) = ::std::str::from_utf8(&buf[..read as usize]) {\n", "file_path": "crossterm_input/src/sys/unix.rs", "rank": 27, "score": 161410.918727303 }, { "content": "/// Get the TTY device.\n\n///\n\n/// This allows for getting stdio representing _only_ the TTY, and not other streams.\n\npub fn get_tty() -> io::Result<fs::File> {\n\n let mut tty_f: fs::File = unsafe { ::std::mem::zeroed() };\n\n\n\n let _fd = unsafe {\n\n if libc::isatty(libc::STDIN_FILENO) == 1 {\n\n libc::STDIN_FILENO\n\n } else {\n\n tty_f = fs::File::open(\"/dev/tty\")?;\n\n tty_f.as_raw_fd()\n\n }\n\n };\n\n\n\n Ok(tty_f)\n\n}\n\n\n", "file_path": "crossterm_input/src/sys/unix.rs", "rank": 28, "score": 156856.04547813427 }, { "content": "pub fn read_char() {\n\n let input = input();\n\n\n\n match input.read_char() {\n\n Ok(s) => println!(\"char typed: {}\", s),\n\n Err(e) => println!(\"char error : {}\", e),\n\n }\n\n}\n\n\n", "file_path": "examples/input.rs", "rank": 29, "score": 155762.80637057798 }, { "content": "pub fn read_line() {\n\n let input = input();\n\n\n\n match input.read_line() {\n\n Ok(s) => println!(\"string typed: {}\", s),\n\n Err(e) => println!(\"error: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "examples/input.rs", "rank": 30, "score": 155762.80637057798 }, { "content": "pub fn pause_terminal() {\n\n println!(\"Press 'x' to quit...\");\n\n let screen = Screen::new(true);\n\n let terminal_input = TerminalInput::from_output(&screen.stdout);\n\n terminal_input.wait_until(KeyEvent::OnKeyPress(b'x'));\n\n}\n\n\n", "file_path": "examples/input.rs", "rank": 31, "score": 155762.80637057798 }, { "content": "/// This function is used by 'ANSI' modules. Those modules are using an `Option` of `TerminalOutput`.\n\n/// Because it is an option it could be either 'None' or 'Some'.\n\n/// When the `TerminalOutput` is 'None' we write our 'ANSI' escape codes to the default `stdout()` if it is a `Some`\n\n/// - which means we are in alternate screen modes or we have raw screen enabled - we should write to the screen passed by the user.\n\n/// This way our commands or our writes will be done with the passed `TerminalOutput`.\n\npub fn write(stdout: &Option<&Arc<TerminalOutput>>, string: String) -> io::Result<usize> {\n\n match stdout {\n\n None => {\n\n print!(\"{}\", string.as_str());\n\n\n\n match io::stdout().flush() {\n\n Ok(_) => Ok(string.len()),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n Some(output) => output.write_string(string),\n\n }\n\n}\n\n\n", "file_path": "crossterm_utils/src/functions.rs", "rank": 32, "score": 155574.16152725447 }, { "content": "#[cfg(unix)]\n\npub fn print_supported_colors() {\n\n let count = color().get_available_color_count().unwrap();\n\n\n\n for i in 0..count {\n\n println!(\n\n \"{}\",\n\n style(format!(\"White : \\t {}\", i)).on(Color::AnsiValue(i as u8))\n\n );\n\n }\n\n}\n\n\n", "file_path": "examples/style.rs", "rank": 33, "score": 154051.21006716695 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 34, "score": 154038.7199694221 }, { "content": "///! This trait defines the actions that can be performed with the terminal cursor.\n\n///! This trait can be implemented so that a concrete implementation of the ITerminalCursor can fulfill\n\n///! the wishes to work on a specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WinApi` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that cursor related actions can be performed on both UNIX and Windows systems.\n\ntrait ITerminalCursor: Sync + Send {\n\n /// Goto some location (x,y) in the context.\n\n fn goto(&self, x: u16, y: u16, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Get the location (x,y) of the current cursor in the context\n\n fn pos(&self) -> (u16, u16);\n\n /// Move cursor n times up\n\n fn move_up(&self, count: u16, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Move the cursor `n` times to the right.\n\n fn move_right(&self, count: u16, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Move the cursor `n` times down.\n\n fn move_down(&self, count: u16, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Move the cursor `n` times left.\n\n fn move_left(&self, count: u16, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Save cursor position so that its saved position can be recalled later. Note that this position is stored program based not per instance of the cursor struct.\n\n fn save_position(&self, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Return to saved cursor position\n\n fn reset_position(&self, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Hide the terminal cursor.\n\n fn hide(&self, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Show the terminal cursor\n\n fn show(&self, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n /// Enable or disable the blinking of the cursor.\n\n fn blink(&self, blink: bool, stdout: &Option<&Arc<TerminalOutput>>) -> Result<()>;\n\n}\n", "file_path": "crossterm_cursor/src/cursor/mod.rs", "rank": 35, "score": 153973.63164739066 }, { "content": "/// This function is used by 'ANSI' modules. Those modules are using an `Option` of `TerminalOutput`.\n\n/// Because it is an option it could be either 'None' or 'Some'.\n\n/// When the `TerminalOutput` is 'None' we write our 'ANSI' escape codes to the default `stdout()` if it is a `Some`\n\n/// - which means we are in alternate screen modes or we have raw screen enabled - we should write to the screen passed by the user.\n\n/// This way our commands or our writes will be done with the passed `TerminalOutput`.\n\npub fn write_str(stdout: &Option<&Arc<TerminalOutput>>, string: &str) -> io::Result<usize> {\n\n match stdout {\n\n None => match io::stdout().flush() {\n\n Ok(_) => {\n\n write!(io::stdout(), \"{}\", string)?;\n\n Ok(string.len())\n\n }\n\n Err(e) => Err(e),\n\n },\n\n Some(output) => output.write_str(string),\n\n }\n\n}\n\n\n", "file_path": "crossterm_utils/src/functions.rs", "rank": 36, "score": 153475.35370844614 }, { "content": "pub fn read_async_demo() {\n\n // create raw screen\n\n let screen = Screen::new(true);\n\n\n\n let crossterm = Crossterm::from_screen(&screen);\n\n\n\n let input = crossterm.input();\n\n let terminal = crossterm.terminal();\n\n let cursor = crossterm.cursor();\n\n\n\n // this will setup the async reading.\n\n let mut stdin = input.read_async().bytes();\n\n\n\n // clear terminal and reset the cursor.\n\n terminal.clear(ClearType::All);\n\n cursor.goto(1, 1);\n\n\n\n // loop until the enter key (\\r) is pressed.\n\n loop {\n\n terminal.clear(ClearType::All);\n", "file_path": "examples/async_input.rs", "rank": 37, "score": 150831.0841455151 }, { "content": "pub fn async_reading_on_alternate_screen() {\n\n // create raw screen\n\n let screen = Screen::new(true);\n\n\n\n let input = TerminalInput::from_output(&screen.stdout);\n\n\n\n // switch to alternate screen\n\n if let Ok(alternate) = screen.enable_alternate_modes(true) {\n\n let crossterm = Crossterm::from_screen(&alternate.screen);\n\n // init some modules we use for this demo\n\n let input = crossterm.input();\n\n let terminal = crossterm.terminal();\n\n let mut cursor = crossterm.cursor();\n\n\n\n // this will setup the async reading.\n\n let mut stdin = input.read_async().bytes();\n\n\n\n // loop until the enter key (\\r) is pressed.\n\n loop {\n\n terminal.clear(ClearType::All);\n", "file_path": "examples/async_input.rs", "rank": 38, "score": 148515.6378267812 }, { "content": "/// print wait screen on alternate screen, then switch back.\n\npub fn print_wait_screen_on_alternate_window() {\n\n let screen = Screen::default();\n\n\n\n if let Ok(alternate) = screen.enable_alternate_modes(false) {\n\n print_wait_screen(&alternate.screen);\n\n }\n\n}\n\n\n", "file_path": "examples/alternate_screen.rs", "rank": 39, "score": 146936.96124897886 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(5);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 40, "score": 144791.20275160257 }, { "content": "/// get the cursor position\n\npub fn pos() {\n\n // Get the cursor\n\n let cursor = cursor();\n\n // get the cursor position.\n\n let (x, y) = cursor.pos();\n\n\n\n println!(\"{} {}\", x, y);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 41, "score": 144791.20275160257 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto() {\n\n // Get the cursor\n\n let cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10, 5);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 42, "score": 144791.20275160257 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up() {\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(10);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 43, "score": 144791.20275160257 }, { "content": "/// exit the current proccess.\n\npub fn exit() {\n\n let terminal = terminal();\n\n terminal.exit();\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 44, "score": 144791.20275160257 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down() {\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 45, "score": 144791.20275160257 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 46, "score": 144791.20275160257 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground() {\n\n // Create a styled object.\n\n // Call the method `with()` on the object given by `style()` and pass in any Color from the Color enum.\n\n let styledobject = style(\"Red foreground\").with(Color::Red);\n\n\n\n // Print the object to the given screen and.\n\n println!(\"Colored text: {}\", styledobject);\n\n\n\n // Or print inline\n\n println!(\n\n \"Colored text: {}\",\n\n style(\"Blue foreground\").with(Color::Blue)\n\n );\n\n}\n\n\n", "file_path": "crossterm_style/examples/style.rs", "rank": 47, "score": 142553.0569855548 }, { "content": "/// Show cursor display | demonstration.\n\npub fn show_cursor() {\n\n let cursor = cursor();\n\n cursor.show();\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 48, "score": 142553.0569855548 }, { "content": "/// Show cursor display, only works on certain terminals.| demonstration\n\npub fn blink_cursor() {\n\n let cursor = cursor();\n\n cursor.blink(false);\n\n cursor.blink(false);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 49, "score": 142553.0569855548 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 50, "score": 142553.0569855548 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background() {\n\n // Create a styled object.\n\n // Call the method `with()` on the object given by `style()` and pass in any Color from the Color enum.\n\n let styledobject = style(\"Red foreground\").on(Color::Red);\n\n\n\n // Print the object to the given screen and.\n\n println!(\"Colored text: {}\", styledobject);\n\n\n\n // Or print inline\n\n println!(\"Colored text: {}\", style(\"Red foreground\").on(Color::Blue));\n\n}\n\n\n", "file_path": "crossterm_style/examples/style.rs", "rank": 51, "score": 142553.0569855548 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 52, "score": 142553.0569855548 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 53, "score": 142553.0569855548 }, { "content": "/// Exit the current process.\n\npub fn exit() {\n\n ::std::process::exit(256);\n\n}\n\n\n", "file_path": "crossterm_terminal/src/sys/winapi.rs", "rank": 54, "score": 142553.0569855548 }, { "content": "/// Hide cursor display | demonstration.\n\npub fn hide_cursor() {\n\n let cursor = cursor();\n\n cursor.hide();\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 55, "score": 142553.0569855548 }, { "content": "/// Resize the terminal to X: 10, Y: 10 | demonstration.\n\npub fn resize_terminal() {\n\n let terminal = terminal();\n\n\n\n // Get terminal size\n\n terminal.set_size(10, 10);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 56, "score": 142553.0569855548 }, { "content": "pub fn exit() {\n\n ::std::process::exit(0);\n\n}\n\n\n\n/// A representation of the size of the current terminal.\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub struct UnixSize {\n\n /// number of rows\n\n pub rows: c_ushort,\n\n /// number of columns\n\n pub cols: c_ushort,\n\n pub ws_xpixel: c_ushort,\n\n pub ws_ypixel: c_ushort,\n\n}\n\n\n", "file_path": "crossterm_terminal/src/sys/unix.rs", "rank": 57, "score": 142553.0569855548 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right() {\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 58, "score": 142553.0569855548 }, { "content": "#[cfg(windows)]\n\npub fn print_font_with_attributes() {\n\n println!(\"{}\", style(\"Normal text\"));\n\n println!(\"{}\", style(\"Bold text\").bold());\n\n println!(\"{}\", style(\"Underlined text\").underlined());\n\n println!(\"{}\", style(\"Negative text\").negative());\n\n}\n\n\n", "file_path": "crossterm_style/examples/style.rs", "rank": 59, "score": 140410.324064743 }, { "content": "/// Set the terminal size to width 10, height: 10 | demonstration.\n\npub fn set_terminal_size() {\n\n let terminal = terminal();\n\n\n\n terminal.set_size(10, 10);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 60, "score": 140405.9274929373 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors() {\n\n println!(\n\n \"{}\",\n\n style(format!(\"Black : \\t\\t {} \\n\", \"■\")).with(Color::Black)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"Red : \\t\\t {} \\n\", \"■\")).with(Color::Red)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"Cyan : \\t\\t {} \\n\", \"■\")).with(Color::Cyan)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"DarkCyan : \\t {} \\n\", \"■\")).with(Color::DarkCyan)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"DarkRed : \\t {} \\n\", \"■\")).with(Color::DarkRed)\n", "file_path": "crossterm_style/examples/style.rs", "rank": 61, "score": 140405.9274929373 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_current_line() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 3);\n\n\n\n // Clear current line cells.\n\n terminal.clear(ClearType::CurrentLine);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 62, "score": 140405.9274929373 }, { "content": "/// Print the the current terminal size | demonstration.\n\npub fn print_terminal_size() {\n\n let terminal = terminal();\n\n\n\n // Get terminal size\n\n let (width, height) = terminal.terminal_size();\n\n\n\n // Print results\n\n print!(\"X: {}, y: {}\", width, height);\n\n}\n\n\n", "file_path": "crossterm_terminal/examples/terminal.rs", "rank": 63, "score": 140405.9274929373 }, { "content": "/// Save and reset cursor position | demonstration..\n\npub fn safe_and_reset_position() {\n\n let cursor = cursor();\n\n\n\n // Goto X: 5 Y: 5\n\n cursor.goto(5, 5);\n\n // Safe cursor position: X: 5 Y: 5\n\n cursor.save_position();\n\n // Goto X: 5 Y: 20\n\n cursor.goto(5, 20);\n\n // Print at X: 5 Y: 20.\n\n println!(\"Yea!\");\n\n // Reset back to X: 5 Y: 5.\n\n cursor.reset_position();\n\n // Print Back at X: 5 Y: 5.\n\n println!(\"Back\");\n\n\n\n println!()\n\n}\n\n\n", "file_path": "crossterm_cursor/examples/cursor.rs", "rank": 64, "score": 140405.9274929373 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors() {\n\n println!(\n\n \"{}\",\n\n style(format!(\"Black : \\t {} \\n\", \"■\")).on(Color::Black)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"Red : \\t\\t {} \\n\", \"■\")).on(Color::Red)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"Cyan : \\t\\t {} \\n\", \"■\")).on(Color::Cyan)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"DarkCyan : \\t {} \\n\", \"■\")).on(Color::DarkCyan)\n\n );\n\n println!(\n\n \"{}\",\n\n style(format!(\"DarkRed : \\t {} \\n\", \"■\")).on(Color::DarkRed)\n", "file_path": "crossterm_style/examples/style.rs", "rank": 65, "score": 140405.9274929373 }, { "content": "fn main() {\n\n print_wait_screen_on_alternate_window();\n\n}", "file_path": "examples/raw_mode.rs", "rank": 66, "score": 140273.46451025142 }, { "content": "pub fn clear_after_cursor(\n\n location: Coord,\n\n buffer_size: Size,\n\n current_attribute: u16,\n\n) -> Result<()> {\n\n let (mut x, mut y) = (location.x, location.y);\n\n\n\n // if cursor position is at the outer right position\n\n if x as i16 > buffer_size.width {\n\n y += 1;\n\n x = 0;\n\n }\n\n\n\n // location where to start clearing\n\n let start_location = Coord::new(x, y);\n\n\n\n // get sum cells before cursor\n\n let cells_to_write = buffer_size.width as u32 * buffer_size.height as u32;\n\n\n\n clear(start_location, cells_to_write, current_attribute)\n\n}\n\n\n", "file_path": "crossterm_terminal/src/terminal/winapi_terminal.rs", "rank": 67, "score": 138344.37301882583 }, { "content": "pub fn clear_before_cursor(\n\n location: Coord,\n\n buffer_size: Size,\n\n current_attribute: u16,\n\n) -> Result<()> {\n\n let (xpos, ypos) = (location.x, location.y);\n\n\n\n // one cell after cursor position\n\n let x = 0;\n\n // one at row of cursor position\n\n let y = 0;\n\n\n\n // location where to start clearing\n\n let start_location = Coord::new(x, y);\n\n\n\n // get sum cells before cursor\n\n let cells_to_write = (buffer_size.width as u32 * ypos as u32) + (xpos as u32 + 1);\n\n\n\n // clear everything before cursor position\n\n clear(start_location, cells_to_write, current_attribute)\n\n}\n\n\n", "file_path": "crossterm_terminal/src/terminal/winapi_terminal.rs", "rank": 68, "score": 138344.37301882583 }, { "content": "pub fn clear_current_line(\n\n location: Coord,\n\n buffer_size: Size,\n\n current_attribute: u16,\n\n) -> Result<()> {\n\n // location where to start clearing\n\n let start_location = Coord::new(0, location.y);\n\n\n\n // get sum cells before cursor\n\n let cells_to_write = buffer_size.width as u32;\n\n\n\n // clear the whole current line\n\n clear(start_location, cells_to_write, current_attribute)?;\n\n\n\n // put the cursor back at cell 1 on current row\n\n let cursor = Cursor::new()?;\n\n cursor.goto(0, location.y)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crossterm_terminal/src/terminal/winapi_terminal.rs", "rank": 69, "score": 136363.37756230388 }, { "content": "fn main() {}\n", "file_path": "crossterm_input/examples/input.rs", "rank": 70, "score": 136097.95376532897 }, { "content": "fn main() {}\n", "file_path": "crossterm_input/examples/async_input.rs", "rank": 71, "score": 133972.61669050402 }, { "content": "/// Get a `TerminalCursor` instance whereon cursor related actions can be performed.\n\npub fn cursor() -> TerminalCursor<'static> {\n\n TerminalCursor::new()\n\n}\n", "file_path": "crossterm_cursor/src/cursor/cursor.rs", "rank": 72, "score": 130092.61787774885 }, { "content": "/// Parses the given integer to an bool by checking if the value is 0 or 1.\n\n/// This is currently used for checking if a WinApi called succeeded, this might be moved into a macro at some time.\n\n/// So please don't use this :(.\n\npub fn is_true(value: i32) -> bool {\n\n if value == 0 {\n\n return false;\n\n } else {\n\n return true;\n\n }\n\n}\n", "file_path": "crossterm_winapi/src/lib.rs", "rank": 73, "score": 129678.35423589447 }, { "content": "#[cfg(windows)]\n\npub fn get_terminal_size() -> (u16, u16) {\n\n if let Ok(buffer) = ScreenBuffer::current() {\n\n let size = buffer.info().unwrap().terminal_size();\n\n (size.width as u16, size.height as u16)\n\n } else {\n\n (0, 0)\n\n }\n\n}\n", "file_path": "crossterm_terminal/src/sys/winapi.rs", "rank": 74, "score": 128191.94651591236 }, { "content": "#[cfg(windows)]\n\npub fn get_cursor_position() -> (u16, u16) {\n\n if let Ok(cursor) = Cursor::new() {\n\n cursor.position().unwrap().into()\n\n } else {\n\n (0, 0)\n\n }\n\n}\n\n\n\npub use crossterm_winapi::{is_true, Coord, Handle, HandleType, ScreenBuffer};\n\n\n\nuse winapi::{\n\n shared::minwindef::{FALSE, TRUE},\n\n um::wincon::{SetConsoleCursorInfo, SetConsoleCursorPosition, CONSOLE_CURSOR_INFO, COORD},\n\n um::winnt::HANDLE,\n\n};\n\n\n\nuse std::io::{self, Result};\n\n\n\n/// This stores the cursor pos, at program level. So it can be recalled later.\n\nstatic mut SAVED_CURSOR_POS: (u16, u16) = (0, 0);\n", "file_path": "crossterm_cursor/src/sys/winapi.rs", "rank": 75, "score": 128191.94651591236 }, { "content": "/// Get the current terminal size.\n\npub fn get_terminal_size() -> (u16, u16) {\n\n // http://rosettacode.org/wiki/Terminal_control/Dimensions#Library:_BSD_libc\n\n let us = UnixSize {\n\n rows: 0,\n\n cols: 0,\n\n ws_xpixel: 0,\n\n ws_ypixel: 0,\n\n };\n\n\n\n let r = unsafe { ioctl(STDOUT_FILENO, TIOCGWINSZ, &us) };\n\n\n\n if r == 0 {\n\n // because crossterm works starts counting at 0 and unix terminal starts at cell 1 you have subtract one to get 0-based results.\n\n (us.cols, us.rows)\n\n } else {\n\n (0, 0)\n\n }\n\n}\n", "file_path": "crossterm_terminal/src/sys/unix.rs", "rank": 76, "score": 128187.5499441067 }, { "content": "#[cfg(unix)]\n\npub fn get_cursor_position() -> (u16, u16) {\n\n if let Ok(pos) = pos() {\n\n pos\n\n } else {\n\n (0, 0)\n\n }\n\n}\n\n\n", "file_path": "crossterm_cursor/src/sys/unix.rs", "rank": 77, "score": 128187.5499441067 }, { "content": "fn new_sync_flag(initial_state: bool) -> (SyncFlagTx, SyncFlagRx) {\n\n let state = Arc::new(Mutex::new(initial_state));\n\n let tx = SyncFlagTx {\n\n inner: state.clone(),\n\n };\n\n let rx = SyncFlagRx {\n\n inner: state.clone(),\n\n };\n\n\n\n return (tx, rx);\n\n}\n\n\n", "file_path": "examples/program_examples/logging.rs", "rank": 78, "score": 124523.8878470644 }, { "content": "pub fn pos() -> io::Result<(u16, u16)> {\n\n // if we enable raw modes with screen, this could cause problems if raw mode is already enabled in applicaition.\n\n // I am not completely happy with this approach so feel free to find an other way.\n\n\n\n unsafe {\n\n if !unix::RAW_MODE_ENABLED_BY_USER || !unix::RAW_MODE_ENABLED_BY_SYSTEM {\n\n // set this boolean so that we know that the systems has enabled raw mode.\n\n unix::RAW_MODE_ENABLED_BY_SYSTEM = true;\n\n unix::into_raw_mode()?;\n\n }\n\n }\n\n\n\n // Where is the cursor?\n\n // Use `ESC [ 6 n`.\n\n let mut stdout = io::stdout();\n\n\n\n // Write command\n\n stdout.write_all(b\"\\x1B[6n\")?;\n\n stdout.flush()?;\n\n\n", "file_path": "crossterm_cursor/src/sys/unix.rs", "rank": 79, "score": 123736.20472475424 }, { "content": "//! This is a WINDOWS specific implementation for input related action.\n\n\n\nuse super::*;\n\n\n\nuse crossterm_utils::TerminalOutput;\n\nuse std::char;\n\nuse std::thread;\n\nuse winapi::um::winnt::INT;\n\n\n\npub struct WindowsInput;\n\n\n\nimpl WindowsInput {\n\n pub fn new() -> WindowsInput {\n\n WindowsInput\n\n }\n\n}\n\n\n\nimpl ITerminalInput for WindowsInput {\n\n fn read_char(&self, stdout: &Option<&Arc<TerminalOutput>>) -> io::Result<char> {\n\n let is_raw_screen = match stdout {\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 80, "score": 122958.69243915108 }, { "content": "\n\n match char::from_u32(pressed_char as u32) {\n\n Some(c) => {\n\n return Ok(c);\n\n }\n\n None => Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Could not parse given input to char\",\n\n )),\n\n }\n\n }\n\n\n\n fn read_async(&self, stdout: &Option<&Arc<TerminalOutput>>) -> AsyncReader {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let is_raw_screen = match stdout {\n\n Some(output) => output.is_in_raw_mode,\n\n None => false,\n\n };\n\n\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 81, "score": 122946.1891400795 }, { "content": " Some(output) => output.is_in_raw_mode,\n\n None => false,\n\n };\n\n\n\n // _getwch is without echo and _getwche is with echo\n\n let pressed_char = unsafe {\n\n if is_raw_screen {\n\n _getwch()\n\n } else {\n\n _getwche()\n\n }\n\n };\n\n\n\n // we could return error but maybe option to keep listening until valid character is inputted.\n\n if pressed_char == 0 || pressed_char == 0xe0 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Given input char is not a valid char, mostly occurs when pressing special keys\",\n\n ));\n\n }\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 82, "score": 122941.3032491605 }, { "content": " });\n\n\n\n AsyncReader { recv: rx }\n\n }\n\n\n\n fn read_until_async(\n\n &self,\n\n delimiter: u8,\n\n stdout: &Option<&Arc<TerminalOutput>>,\n\n ) -> AsyncReader {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let is_raw_screen = match stdout {\n\n Some(output) => output.is_in_raw_mode,\n\n None => false,\n\n };\n\n\n\n thread::spawn(move || {\n\n loop {\n\n // _getwch is without echo and _getwche is with echo\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 83, "score": 122938.79047683538 }, { "content": " let pressed_char = unsafe {\n\n if is_raw_screen {\n\n _getwch()\n\n } else {\n\n _getwche()\n\n }\n\n } as u8;\n\n\n\n let end_of_stream = pressed_char == delimiter;\n\n\n\n // we could return error but maybe option to keep listening until valid character is inputted.\n\n if pressed_char == 0 || pressed_char == 0xe0 || end_of_stream {\n\n return;\n\n }\n\n\n\n if let Err(_) = tx.send(Ok(pressed_char as u8)) {\n\n println!(\"Could not send pressed char to receiver.\")\n\n }\n\n }\n\n });\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 84, "score": 122934.40964160209 }, { "content": " thread::spawn(move || {\n\n loop {\n\n // _getwch is without echo and _getwche is with echo\n\n let pressed_char = unsafe {\n\n if is_raw_screen {\n\n _getwch()\n\n } else {\n\n _getwche()\n\n }\n\n };\n\n\n\n // we could return error but maybe option to keep listening until valid character is inputted.\n\n if pressed_char == 0 || pressed_char == 0xe0 {\n\n return;\n\n }\n\n\n\n if let Err(_) = tx.send(Ok(pressed_char as u8)) {\n\n println!(\"Could not send pressed char to receiver.\")\n\n }\n\n }\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 85, "score": 122934.22905539942 }, { "content": "\n\n AsyncReader { recv: rx }\n\n }\n\n}\n\n\n\nextern \"C\" {\n\n fn _getwche() -> INT;\n\n fn _getwch() -> INT;\n\n}\n", "file_path": "crossterm_input/src/input/windows_input.rs", "rank": 86, "score": 122922.86059188974 }, { "content": "#[derive(Clone)]\n\nstruct WorkQueue<T: Send + Clone> {\n\n inner: Arc<Mutex<VecDeque<T>>>,\n\n}\n\n\n\nimpl<T: Send + Clone> WorkQueue<T> {\n\n fn new() -> Self {\n\n Self {\n\n inner: Arc::new(Mutex::new(VecDeque::new())),\n\n }\n\n }\n\n\n\n // get an item from the que if exists\n\n fn get_work(&self) -> Option<T> {\n\n let maybe_queue = self.inner.lock();\n\n\n\n if let Ok(mut queue) = maybe_queue {\n\n queue.pop_front()\n\n } else {\n\n panic!(\"WorkQueue::get_work() tried to lock a poisoned mutex\");\n\n }\n", "file_path": "examples/program_examples/logging.rs", "rank": 87, "score": 120726.81991281931 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down() {\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/cursor.rs", "rank": 88, "score": 117250.14489407097 }, { "content": "/// get the cursor position\n\npub fn pos() {\n\n // Get the cursor\n\n let cursor = cursor();\n\n // get the cursor position.\n\n let (x, y) = cursor.pos();\n\n\n\n println!(\"{} {}\", x, y);\n\n}\n\n\n", "file_path": "examples/cursor.rs", "rank": 89, "score": 117250.14489407097 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto() {\n\n // Get the cursor\n\n let cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10, 5);\n\n}\n\n\n", "file_path": "examples/cursor.rs", "rank": 90, "score": 117250.14489407097 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 91, "score": 117250.14489407097 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(5);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 92, "score": 117250.14489407097 }, { "content": "/// exit the current proccess.\n\npub fn exit() {\n\n let terminal = terminal();\n\n terminal.exit();\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 93, "score": 117250.14489407097 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up() {\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(10);\n\n}\n\n\n", "file_path": "examples/cursor.rs", "rank": 94, "score": 117250.14489407097 }, { "content": "/// This could be used to style a type who is implementing `Display` with colors and attributes.\n\n///\n\n/// # Example\n\n/// ```rust\n\n/// // get an styled object which could be painted to the terminal.\n\n/// let styled_object = style(\"Some Blue colored text on black background\")\n\n/// .with(Color::Blue)\n\n/// .on(Color::Black);\n\n///\n\n/// // print the styled font * times to the current screen.\n\n/// for i in 1..10\n\n/// {\n\n/// println!(\"{}\", styled_object);\n\n/// }\n\n/// ```\n\npub fn style<'a, D: 'a>(val: D) -> StyledObject<D>\n\nwhere\n\n D: Display,\n\n{\n\n ObjectStyle::new().apply_to(val)\n\n}\n\n\n\n/// Attributes that could be applied on some text. (*nix values)\n\n#[cfg(unix)]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]\n\npub enum Attribute {\n\n Bold = 1,\n\n Dim = 2,\n\n Italic = 3,\n\n Underlined = 4,\n\n SlowBlink = 5,\n\n RapidBlink = 6,\n\n Reverse = 7,\n\n Hidden = 8,\n\n CrossedOut = 9,\n", "file_path": "crossterm_style/src/lib.rs", "rank": 95, "score": 117221.57492491174 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 96, "score": 115511.43949404953 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 97, "score": 115511.43949404953 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up() {\n\n let terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor().goto(4, 4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/terminal.rs", "rank": 98, "score": 115511.43949404953 } ]
Rust
src/transport.rs
balajijinnah/nilai-rs
22510cca078d4de0069491ccb745b95349460f77
/* * Copyright 2019 balajijinnah and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use super::types::*; use failure::Error; use futures::channel::mpsc; use futures::channel::oneshot; use futures::SinkExt; use futures::StreamExt; use log::{info, warn}; use num_enum::TryFromPrimitive; use rmp_serde::{Deserializer, Serializer}; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::net::SocketAddr; use tokio::net::udp::split::{UdpSocketRecvHalf, UdpSocketSendHalf}; fn decode_msg(buf: &Vec<u8>) -> Result<Message, Error> { let mut deserializer = Deserializer::new(&buf[1..]); match MessageType::try_from(*&buf[0])? { MessageType::PingMsg => { let msg: Ping = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::PingMsg(msg)); } MessageType::IndirectPingMsg => { let msg: IndirectPing = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::IndirectPingMsg(msg)); } MessageType::AckRespMsg => { let msg: AckRes = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Ack(msg)); } MessageType::SuspectMsg => { let msg: Suspect = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::SuspectMsg(msg)); } MessageType::AliveMsg => { let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Alive(msg)); } MessageType::DeadMsg => { let msg: Dead = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Dead(msg)); } MessageType::StateSync => { println!("got udp state sync"); let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::StateSync(msg)); } MessageType::StateSyncRes => { let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::StateSyncRes(msg)); } } } fn encode_msg(msg: Message, buf: &mut Vec<u8>) -> Result<(), Error> { match msg { Message::PingMsg(msg) => { buf.push(MessageType::PingMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::IndirectPingMsg(msg) => { buf.push(MessageType::IndirectPingMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Ack(msg) => { buf.push(MessageType::AckRespMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::SuspectMsg(msg) => { buf.push(MessageType::SuspectMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Alive(msg) => { buf.push(MessageType::AliveMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Dead(msg) => { buf.push(MessageType::DeadMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::StateSync(msg) => { println!("sending state sync"); buf.push(MessageType::StateSync as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::StateSyncRes(msg) => { buf.push(MessageType::StateSyncRes as u8); msg.serialize(&mut Serializer::new(buf))?; } _ => { unimplemented!(); } } Ok(()) } #[derive(Debug)] pub(crate) struct TransportReceiver { pub handler_ch: mpsc::Sender<UdpMessage>, pub udp_socket_receiver: UdpSocketRecvHalf, pub closer: oneshot::Receiver<i32>, } impl TransportReceiver { pub(crate) async fn listen(&mut self) { let mut buf = vec![0; 1024]; loop { if let Ok(opt) = self.closer.try_recv() { if let Some(_) = opt { info!("stopping transport receiver"); break; } } match self.udp_socket_receiver.recv_from(&mut buf).await { Ok((read_bytes, from)) => { info!("{} bytes received", read_bytes); if read_bytes == 0 { continue; } match decode_msg(&buf) { Ok(msg) => { self.send_msg(from, msg).await; } Err(err) => { println!("unable to decode"); warn!("unable to decode the message {}", err); } } } Err(err) => { warn!("{} error while receiving the packets", err); } } } } async fn send_msg(&mut self, from: SocketAddr, msg: Message) { if let Err(e) = self .handler_ch .send(UdpMessage { peer: Some(from), msg: msg, }) .await { warn!("unable to send to the nilai handler {}", e); } } } #[derive(Debug, TryFromPrimitive)] #[repr(u8)] pub(crate) enum MessageType { PingMsg = 0, IndirectPingMsg = 1, AckRespMsg = 2, SuspectMsg = 3, AliveMsg = 4, DeadMsg = 5, StateSync = 6, StateSyncRes = 7, } pub(crate) struct TransportSender { pub udp_socket_sender: UdpSocketSendHalf, pub handler_recv_ch: mpsc::Receiver<UdpMessage>, pub closer: oneshot::Receiver<i32>, } impl TransportSender { pub(crate) async fn listen(&mut self) { let mut buf = vec![0; 1024]; loop { if let Ok(opt) = self.closer.try_recv() { if let Some(_) = opt { info!("stopping transport sender"); break; } } buf.clear(); match self.handler_recv_ch.next().await { Some(udp_msg) => { let peer = udp_msg.peer.unwrap(); match encode_msg(udp_msg.msg, &mut buf) { Ok(_) => { match self .udp_socket_sender .send_to(&buf[..buf.len()], &peer) .await { Err(e) => { warn!("error while sending udp message {} {}", e, peer); continue; } Ok(bytes_sent) => { info!("bytes sent {}", bytes_sent); } } } Err(e) => { warn!("unable to decode the message {} ", e); } } } None => { info!("stopping to listen for handler message"); break; } } } } }
/* * Copyright 2019 balajijinnah and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use super::types::*; use failure::Error; use futures::channel::mpsc; use futures::channel::oneshot; use futures::SinkExt; use futures::StreamExt; use log::{info, warn}; use num_enum::TryFromPrimitive; use rmp_serde::{Deserializer, Serializer}; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::net::SocketAddr; use tokio::net::udp::split::{UdpSocketRecvHalf, UdpSocketSendHalf}; fn decode_msg(buf: &Vec<u8>) -> Result<Message, Error> { let mut deserializer = Deserializer::new(&buf[1..]); match MessageType::try_from(*&buf[0])? { MessageType::PingMsg => { let msg: Ping = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::PingMsg(msg)); } MessageType::IndirectPingMsg => { let msg: IndirectPing = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::IndirectPingMsg(msg)); } MessageType::AckRespMsg => { let msg: AckRes = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Ack(msg)); } MessageType::SuspectMsg => { let msg: Suspect = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::SuspectMsg(msg)); } MessageType::AliveMsg => { let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Alive(msg)); } MessageType::DeadMsg => { let msg: Dead = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::Dead(msg)); } MessageType::StateSync => { println!("got udp state sync"); let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::StateSync(msg)); } MessageType::StateSyncRes => { let msg: Alive = Deserialize::deserialize(&mut deserializer)?; return Ok(Message::StateSyncRes(msg)); } } } fn encode_msg(msg: Message, buf: &mut Vec<u8>) -> Result<(), Error> { match msg { Message::PingMsg(msg) => { buf.push(MessageType::PingMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::IndirectPingMsg(msg) => { buf.push(MessageType::IndirectPingMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Ack(msg) => { buf.push(MessageType::AckRespMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::SuspectMsg(msg) => { buf.push(MessageType::SuspectMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Alive(msg) => { buf.push(MessageType::AliveMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::Dead(msg) => { buf.push(MessageType::DeadMsg as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::StateSync(msg) => { println!("sending state sync"); buf.push(MessageType::StateSync as u8); msg.serialize(&mut Serializer::new(buf))?; } Message::StateSyncRes(msg) => { buf.push(MessageType::StateSyncRes as u8); msg.serialize(&mut Serializer::new(buf))?; } _ => { unimplemented!(); } } Ok(()) } #[derive(Debug)] pub(crate) struct TransportReceiver { pub handler_ch: mpsc::Sender<UdpMessage>, pub udp_socket_receiver: UdpSocketRecvHalf, pub closer: oneshot::Receiver<i32>, } impl TransportReceiver { pub(crate) async fn listen(&mut self) { let mut buf = vec![0; 1024]; loop { if let Ok(opt) = self.closer.try_recv() { if let Some(_) = opt { info!("stopping transport receiver"); break; } } match self.udp_socket_receiver.recv_from(&mut buf).await { Ok((read_bytes, from)) => { info!("{} bytes received", read_bytes); if read_bytes == 0 { continue; } match decode_msg(&buf) { Ok(msg) => { self.send_msg(from, msg).await; } Err(err) => { println!("unable to decode"); warn!("unable to decode the message {}", err); } } } Err(err) => { warn!("{} error while receiving the packets", err); } } } } async fn send_msg(&mut self, from: SocketAddr, msg: Message) { if let Err(e) = self .handler_ch .send(UdpMessage { peer: Some(from), msg: msg, }) .await { warn!("unable to send to the nilai handler {}", e); } } } #[derive(Debug, TryFromPrimitive)] #[repr(u8)] pub(crate) enum MessageType { PingMsg = 0, IndirectPingMsg = 1, AckRespMsg = 2, SuspectMsg = 3, AliveMsg = 4, DeadMsg = 5, StateSync = 6, StateSyncRes = 7, } pub(crate) struct TransportSender { pub udp_socket_sender: UdpSocketSendHalf, pub handler_recv_ch: mpsc::Receiver<UdpMessage>, pub closer: oneshot::Receiver<i32>, } impl TransportSender { pub(crate) async fn listen(&mut self) { let mut buf = vec![0; 1024]; loop {
buf.clear(); match self.handler_recv_ch.next().await { Some(udp_msg) => { let peer = udp_msg.peer.unwrap(); match encode_msg(udp_msg.msg, &mut buf) { Ok(_) => { match self .udp_socket_sender .send_to(&buf[..buf.len()], &peer) .await { Err(e) => { warn!("error while sending udp message {} {}", e, peer); continue; } Ok(bytes_sent) => { info!("bytes sent {}", bytes_sent); } } } Err(e) => { warn!("unable to decode the message {} ", e); } } } None => { info!("stopping to listen for handler message"); break; } } } } }
if let Ok(opt) = self.closer.try_recv() { if let Some(_) = opt { info!("stopping transport sender"); break; } }
if_condition
[ { "content": "fn do_main() -> Result<(), Error> {\n\n let nilai_builder = builder::NilaiBuilder::new(\"127.0.0.1:5001\".parse()?);\n\n let closer = nilai_builder\n\n .alive_delegate(Box::new(|_: types::Node| println!(\"new node joined\")))\n\n .execute()?;\n\n // nilai is running so block the current thread.\n\n thread::sleep(Duration::from_secs(5));\n\n closer.stop();\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/nilai.rs", "rank": 2, "score": 86292.86082431856 }, { "content": "fn main() {\n\n CombinedLogger::init(vec![\n\n TermLogger::new(LevelFilter::Warn, Config::default(), TerminalMode::Mixed).unwrap(),\n\n TermLogger::new(LevelFilter::Info, Config::default(), TerminalMode::Mixed).unwrap(),\n\n ])\n\n .unwrap();\n\n let nilai_builder = builder::NilaiBuilder::new(\"127.0.0.1:5002\".parse().unwrap());\n\n let closer = nilai_builder\n\n .alive_delegate(Box::new(|_: types::Node| println!(\"new node joined\")))\n\n .peers(vec![\"127.0.0.1:5001\".parse().unwrap()])\n\n .execute()\n\n .unwrap();\n\n // nilai is running so block the current thread.\n\n thread::sleep(Duration::from_secs(20));\n\n}\n", "file_path": "examples/nilai_with_peers.rs", "rank": 3, "score": 65098.61887370613 }, { "content": "fn main() {\n\n match do_main() {\n\n Err(err) => {\n\n println!(\"not able to run nilai handler {:?}\", err);\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "examples/nilai.rs", "rank": 4, "score": 47280.283465923 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse nilai::builder;\n\nuse nilai::types;\n\nuse simplelog::*;\n\nuse std::thread;\n\nuse std::time::Duration;\n", "file_path": "examples/nilai_with_peers.rs", "rank": 5, "score": 39531.393694104736 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse super::delegate;\n\nuse super::types::*;\n\nuse super::utils;\n\nuse futures::channel::mpsc;\n", "file_path": "src/nilai_handler.rs", "rank": 6, "score": 35511.43312673174 }, { "content": "\n\n pub(crate) async fn listen(&mut self, peers: Vec<SocketAddr>) {\n\n info!(\"nilai started listening\");\n\n // initialize the handlers and send state sync to all the peers.\n\n let (sender, closer) = oneshot::channel();\n\n self.init(peers, closer).await;\n\n\n\n // listen for incoming messages.\n\n loop {\n\n if let Ok(opt) = self.closer.try_recv() {\n\n if let Some(_) = opt {\n\n if let Err(_) = sender.send(1) {\n\n warn!(\"error while closing probe handler\");\n\n }\n\n info!(\"stoping nilai handler\");\n\n break;\n\n }\n\n }\n\n\n\n match self.msg_rcv.next().await {\n", "file_path": "src/nilai_handler.rs", "rank": 7, "score": 35495.83019987427 }, { "content": " self.send_msg(UdpMessage {\n\n peer: Some(peers[i]),\n\n msg: Message::StateSync(alive_msg),\n\n })\n\n .await;\n\n }\n\n\n\n // Initialize the probing.\n\n let mut sender = self.timeout_sender.clone();\n\n let probe_interval = self.probe_interval;\n\n tokio::spawn(async move {\n\n loop {\n\n if let Ok(opt) = closer.try_recv() {\n\n if let Some(_) = opt {\n\n info!(\"stopping probe\");\n\n break;\n\n }\n\n }\n\n match Delay::new(probe_interval).await {\n\n Ok(_) => {\n", "file_path": "src/nilai_handler.rs", "rank": 8, "score": 35487.58765395966 }, { "content": " })\n\n .await;\n\n }\n\n\n\n async fn send_msg(&mut self, msg: UdpMessage) {\n\n match self.msg_sender.send(msg).await {\n\n Err(err) => warn!(\"unable to push to channel {}\", err),\n\n _ => info!(\"pushed to channel successfully\"),\n\n }\n\n }\n\n\n\n async fn handle_probe(&mut self) {\n\n let to_be_probed: String;\n\n loop {\n\n // we don't probe if there is no nodes to probe.\n\n if self.nodes.len() == 1 {\n\n return;\n\n }\n\n if self.probe_id >= self.nodes.len() {\n\n self.probe_id = 0;\n", "file_path": "src/nilai_handler.rs", "rank": 9, "score": 35475.973918115116 }, { "content": "use futures::channel::oneshot;\n\nuse futures::prelude::*;\n\nuse futures_timer::Delay;\n\nuse log::{info, warn};\n\nuse runtime::native::Native;\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\nuse std::net::SocketAddr;\n\nuse std::time::Duration;\n\n\n\n// TODO: sync the state for name change.\n\n// not important right now. But if we needed we'll do it in future.\n\n\n\n/// NilaiHandler is responsible for handling the state. Basically, it is the heart of the nilai.\n\n/// It receives message from channel and updates it's state. It also probes all the nodes in\n\n/// the cluster for failure detection and send udp message via channel.\n\npub(crate) struct NilaiHandler {\n\n pub(crate) msg_rcv: mpsc::Receiver<UdpMessage>,\n\n pub(crate) msg_sender: mpsc::Sender<UdpMessage>,\n\n pub(crate) nodes: HashMap<String, Node>,\n", "file_path": "src/nilai_handler.rs", "rank": 10, "score": 35475.002411940404 }, { "content": " // but still okay.\n\n self.handle_alive(msg).await;\n\n }\n\n\n\n async fn handle_state_sync(&mut self, msg: Alive) {\n\n let peer_addr: String = msg.addr.parse().unwrap();\n\n // any ways we'll send our local state to the peer to update itself.\n\n self.send_msg(UdpMessage {\n\n peer: Some(msg.addr.parse().unwrap()),\n\n msg: Message::StateSyncRes(Alive {\n\n addr: self.addr.clone(),\n\n incarnation: self.local_incarnation,\n\n name: self.name.clone(),\n\n }),\n\n })\n\n .await;\n\n // gossip this state sync to the rest of the cluster to have faster\n\n // convergence.\n\n self.gossip(Message::StateSync(msg.clone()), self.gossip_nodes)\n\n .await;\n", "file_path": "src/nilai_handler.rs", "rank": 11, "score": 35474.00772318549 }, { "content": " self.gossip(Message::Alive(alive_msg), self.gossip_nodes)\n\n .await;\n\n }\n\n\n\n async fn gossip(&mut self, msg: Message, k: usize) {\n\n let k_nodes = utils::k_random_nodes(&self.nodes, &self.node_ids, k, |n: &Node| {\n\n match n.state {\n\n State::Dead => true,\n\n // we'll gossip to the both suspect and alive nodes.\n\n _ => false,\n\n }\n\n });\n\n let mut msgs = Vec::new();\n\n for n in k_nodes {\n\n // gossip should be a separate queue and have to\n\n // compound batching based on udp MTU.\n\n msgs.push(UdpMessage {\n\n msg: msg.clone(),\n\n peer: Some(n.addr.parse().unwrap()),\n\n });\n", "file_path": "src/nilai_handler.rs", "rank": 12, "score": 35473.43686178452 }, { "content": " self.handle_suspect(msg).await;\n\n }\n\n Message::SuspectMsgTimeout(msg) => {\n\n info!(\"got suspicious timeout for the node {}\", msg.node_id);\n\n self.handle_suspect_timeout(msg).await;\n\n }\n\n Message::Alive(msg) => {\n\n info!(\"got alive message from node {} {}\", msg.name, msg.addr);\n\n self.handle_alive(msg).await;\n\n }\n\n Message::Dead(msg) => {\n\n info!(\"got dead message for the node {}\", msg.node);\n\n self.handle_dead(msg).await;\n\n }\n\n Message::StateSync(msg) => {\n\n info!(\"got state sync request from {} {}\", msg.name, msg.addr);\n\n // It'll update the local state from the message.\n\n self.handle_state_sync(msg).await;\n\n }\n\n Message::StateSyncRes(msg) => {\n", "file_path": "src/nilai_handler.rs", "rank": 13, "score": 35473.4160289258 }, { "content": " break;\n\n }\n\n },\n\n _ => {\n\n break;\n\n }\n\n }\n\n }\n\n assert!(dead_msg_exist);\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_handle_state_sync_res() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, mut recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_state_sync_res(Alive {\n", "file_path": "src/nilai_handler.rs", "rank": 14, "score": 35473.32245916187 }, { "content": " incarnation: 0,\n\n })\n\n .await;\n\n let mut dead_msg_exist = false;\n\n // expect dead msg from the nilai.\n\n loop {\n\n match recv_udp.try_next() {\n\n Ok(msg) => match msg {\n\n Some(udp_msg) => match udp_msg.msg {\n\n Message::Dead(msg) => {\n\n dead_msg_exist = true;\n\n assert_eq!(msg.incarnation, 0);\n\n assert_eq!(msg.node, String::from(\"127.1.1.1:8000\"));\n\n assert_eq!(msg.from, nl.addr);\n\n }\n\n _ => {\n\n continue;\n\n }\n\n },\n\n None => {\n", "file_path": "src/nilai_handler.rs", "rank": 15, "score": 35473.20261000006 }, { "content": " /// init initialize the current node and sends statesync to all the node.\n\n async fn init(&mut self, peers: Vec<SocketAddr>, mut closer: oneshot::Receiver<i32>) {\n\n // initialize local state.\n\n self.nodes.insert(\n\n self.addr.clone(),\n\n Node {\n\n addr: self.addr.clone(),\n\n name: self.name.clone(),\n\n state: State::Alive,\n\n incarnation: 0,\n\n },\n\n );\n\n // send state sync message to all the peers.\n\n self.node_ids.push(self.addr.clone());\n\n for i in 0..peers.len() {\n\n let alive_msg = Alive {\n\n name: self.name.clone(),\n\n addr: self.addr.clone(),\n\n incarnation: 0,\n\n };\n", "file_path": "src/nilai_handler.rs", "rank": 16, "score": 35472.90167457417 }, { "content": " // Now you got indirect ack and nl send ack with seq_no 1.\n\n nl.handle_ack(nl.seq_no - 1).await;\n\n\n\n let mut ack_exist = false;\n\n let mut ack_seq_no = 0;\n\n loop {\n\n match recv_udp.try_next() {\n\n Ok(msg) => match msg {\n\n Some(udp_msg) => match udp_msg.msg {\n\n Message::Ack(msg) => {\n\n ack_exist = true;\n\n ack_seq_no = msg.seq_no;\n\n }\n\n _ => {\n\n continue;\n\n }\n\n },\n\n None => {\n\n break;\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 17, "score": 35472.24167718941 }, { "content": "\n\n fn increment_seq_no(&mut self) {\n\n self.seq_no = self.seq_no + 1;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n // Note this useful idiom: importing names from outer (for mod tests) scope.\n\n use super::*;\n\n use tokio::runtime::Runtime;\n\n\n\n fn get_mock_nilai(\n\n recv: mpsc::Receiver<UdpMessage>,\n\n send_udp: mpsc::Sender<UdpMessage>,\n\n send: mpsc::Sender<UdpMessage>,\n\n ) -> NilaiHandler {\n\n let (handler_closer_signal, closer) = oneshot::channel();\n\n let mut nl = NilaiHandler {\n\n msg_rcv: recv,\n", "file_path": "src/nilai_handler.rs", "rank": 18, "score": 35472.19492521142 }, { "content": " let to_addr: SocketAddr = node_id.parse().unwrap();\n\n info!(\"sending ping to {}\", node_id);\n\n self.send_msg(UdpMessage {\n\n peer: Some(to_addr),\n\n msg: Message::PingMsg(ping),\n\n })\n\n .await;\n\n }\n\n\n\n async fn spawn_timeout_msg(&mut self, msg: UdpMessage, duration: Duration) {\n\n let mut sender = self.timeout_sender.clone();\n\n tokio::spawn(async move {\n\n Delay::new(duration).await;\n\n sender.send(msg).await;\n\n });\n\n }\n\n\n\n fn increment_probe_id(&mut self) {\n\n self.probe_id = self.probe_id + 1;\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 19, "score": 35472.18994559072 }, { "content": "\n\n if let Some(node) = self.nodes.get(&peer_addr) {\n\n if node.state == State::Dead || node.state == State::Suspect {\n\n // let the node node that it is dead. so that it can refute and update it's\n\n // state and gossip alive state.\n\n self.send_msg(UdpMessage {\n\n peer: Some(msg.addr.parse().unwrap()),\n\n msg: Message::Dead(Dead {\n\n from: self.addr.clone(),\n\n node: msg.addr.clone(),\n\n incarnation: node.incarnation,\n\n }),\n\n })\n\n .await;\n\n return;\n\n }\n\n }\n\n // It's not a dead restart so handle this alive\n\n self.handle_alive(msg).await;\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 20, "score": 35472.12603484548 }, { "content": " Some(msg) => {\n\n self.handle_msg(msg.peer, msg.msg).await;\n\n }\n\n // Channels are closed so simply quit.\n\n None => break,\n\n }\n\n }\n\n }\n\n\n\n async fn handle_msg(&mut self, from: Option<SocketAddr>, msg: Message) {\n\n match msg {\n\n Message::PingMsg(ping_msg) => {\n\n info!(\"got ping message from {}\", ping_msg.node);\n\n self.handle_ping(from.unwrap(), ping_msg).await;\n\n }\n\n Message::Probe => {\n\n info!(\"got probe interval\");\n\n self.handle_probe().await;\n\n }\n\n Message::PingTimeOut(seq_no) => {\n", "file_path": "src/nilai_handler.rs", "rank": 21, "score": 35471.66787262292 }, { "content": " assert_eq!(nl.ack_checker.len(), 1);\n\n // need some smart way to test it.\n\n // rustaceans please help me here.\n\n let mut indirect_ping_exist = false;\n\n let mut indirect_ping_node = String::from(\"\");\n\n loop {\n\n match recv_udp.try_next() {\n\n Ok(msg) => match msg {\n\n Some(udp_msg) => match udp_msg.msg {\n\n Message::IndirectPingMsg(msg) => {\n\n indirect_ping_node = msg.to;\n\n indirect_ping_exist = true;\n\n }\n\n _ => {\n\n continue;\n\n }\n\n },\n\n None => {\n\n break;\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 22, "score": 35471.56982033695 }, { "content": " None => {\n\n // here you don't care. You just try to remove\n\n // it.\n\n self.ack_checker.remove(&seq_no);\n\n }\n\n }\n\n }\n\n async fn handle_ping(&mut self, from: SocketAddr, msg: Ping) {\n\n self.send_ack(from, msg.seq_no).await;\n\n }\n\n\n\n async fn send_ack(&mut self, from: SocketAddr, seq_no: u32) {\n\n let res = AckRes { seq_no: seq_no };\n\n info!(\n\n \"sending ack response to the peer {} with seqno {}\",\n\n from, res.seq_no\n\n );\n\n self.send_msg(UdpMessage {\n\n msg: Message::Ack(res),\n\n peer: Some(from),\n", "file_path": "src/nilai_handler.rs", "rank": 23, "score": 35471.0926685871 }, { "content": "\n\n // It should mark the node as suspect, because suspect has msg higher incarnation.\n\n assert_eq!(\n\n nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap().state,\n\n State::Suspect\n\n );\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_indirect_ping() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, mut recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n", "file_path": "src/nilai_handler.rs", "rank": 24, "score": 35469.81282811471 }, { "content": " self.increment_probe_id();\n\n continue;\n\n }\n\n _ => {\n\n // send ping message.\n\n to_be_probed = node_id.to_string();\n\n break;\n\n }\n\n }\n\n }\n\n self.increment_probe_id();\n\n }\n\n\n\n let seq_no = self.seq_no.clone();\n\n self.ack_checker.insert(seq_no, to_be_probed.to_string());\n\n // send ping message and spawn the ping timeout, so that if there is no ack,\n\n // we can mark this node as suspect.\n\n self.send_ping(seq_no, &to_be_probed).await;\n\n let msg = UdpMessage {\n\n peer: None,\n", "file_path": "src/nilai_handler.rs", "rank": 25, "score": 35469.79264073122 }, { "content": " UdpMessage {\n\n msg: suspect_timeout,\n\n peer: None,\n\n },\n\n Duration::from_millis(self.probe_timeout.as_secs() * self.suspicious_multiplier),\n\n )\n\n .await;\n\n }\n\n\n\n async fn handle_ack(&mut self, seq_no: u32) {\n\n // first you check wether it is indirect ping or our ping.\n\n // if it is indirect ping send ack res to from node.\n\n // other wise clear ouetrself.\n\n // removing the checker so that when probe timeout come.\n\n // we know that ack have come so no need to send indirect ping.\n\n match self.indirect_ack_checker.get(&seq_no) {\n\n Some(indirect_msg) => {\n\n let from = indirect_msg.from.parse().unwrap();\n\n self.send_ack(from, indirect_msg.seq_no).await;\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 26, "score": 35469.38646040739 }, { "content": " if let Err(e) = sender\n\n .send(UdpMessage {\n\n peer: None,\n\n msg: Message::Probe,\n\n })\n\n .await\n\n {\n\n // need to understand how channel work in rust. will this block?\n\n // or we only get here only if the channel is closed.\n\n warn!(\"I think channel is closed. so breaking the probe {}\", e);\n\n break;\n\n }\n\n }\n\n Err(e) => {\n\n warn!(\"something went wrong on probing future {}\", e);\n\n }\n\n }\n\n }\n\n });\n\n }\n", "file_path": "src/nilai_handler.rs", "rank": 27, "score": 35469.013192214996 }, { "content": " name: suspect_node.name.to_string(),\n\n incarnation: suspect_node.incarnation,\n\n state: State::Dead,\n\n });\n\n }\n\n self.gossip(Message::Dead(dead_msg), self.gossip_nodes)\n\n .await;\n\n }\n\n None => return,\n\n }\n\n }\n\n\n\n async fn handle_suspect(&mut self, msg: Suspect) {\n\n let suspect_node = self.nodes.get(&msg.node);\n\n if !suspect_node.is_some() {\n\n // unknown node. So, let's not talk about it.\n\n return;\n\n }\n\n let suspect_node = suspect_node.unwrap();\n\n match suspect_node.state {\n", "file_path": "src/nilai_handler.rs", "rank": 28, "score": 35468.79016109812 }, { "content": " let local_state = self.nodes.get_mut(&msg.addr);\n\n match local_state {\n\n Some(local_state) => {\n\n // we'll just ignore if the state matches with the local state\n\n // so we don't do any thing. So that gossip will get avoided.\n\n // since we're going any queueing. This will work for us time\n\n // being.\n\n if local_state.incarnation >= msg.incarnation {\n\n // This is some old message.\n\n // May be node crashed and restarted. So send a dead\n\n return;\n\n }\n\n // we'll send notification only if the previous state is dead or suspect.\n\n if local_state.state == State::Dead || local_state.state == State::Suspect {\n\n // send notification to delegate if any. cuz it's\n\n // restarted\n\n if let Some(delegate) = &self.alive_delegate {\n\n delegate(Node {\n\n addr: msg.addr.clone(),\n\n name: msg.name.clone(),\n", "file_path": "src/nilai_handler.rs", "rank": 29, "score": 35468.71728705121 }, { "content": " )\n\n .await;\n\n }\n\n None => {\n\n // This means, we got ping no need to do any thing.\n\n return;\n\n }\n\n }\n\n let msg = UdpMessage {\n\n msg: Message::IndirectPingTimeout(seq_no),\n\n peer: None,\n\n };\n\n self.spawn_timeout_msg(msg, self.probe_timeout).await;\n\n }\n\n\n\n async fn send_ping(&mut self, seq_no: u32, node_id: &String) {\n\n let ping = Ping {\n\n seq_no: seq_no,\n\n node: node_id.to_string(),\n\n };\n", "file_path": "src/nilai_handler.rs", "rank": 30, "score": 35468.37191942127 }, { "content": " info!(\"got state sync response from {}\", msg.addr);\n\n self.handle_state_sync_res(msg).await;\n\n }\n\n }\n\n }\n\n\n\n async fn handle_state_sync_res(&mut self, msg: Alive) {\n\n if let Some(node) = self.nodes.get_mut(&msg.addr) {\n\n if node.incarnation > msg.incarnation {\n\n // ignore if it is old message\n\n return;\n\n }\n\n // update the local state\n\n node.incarnation = msg.incarnation;\n\n node.state = State::Alive;\n\n node.name = msg.name;\n\n return;\n\n }\n\n // just update the local state and gossip.\n\n // actually no need to gossip here.\n", "file_path": "src/nilai_handler.rs", "rank": 31, "score": 35468.33017317442 }, { "content": " },\n\n _ => {\n\n break;\n\n }\n\n }\n\n }\n\n assert!(ack_exist);\n\n assert_eq!(ack_seq_no, 5);\n\n });\n\n }\n\n #[test]\n\n fn test_handle_state_sync() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, mut recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_state_sync(Alive {\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n name: String::from(\"node 2\"),\n", "file_path": "src/nilai_handler.rs", "rank": 32, "score": 35468.138493319515 }, { "content": " let (send_udp, mut recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n assert_eq!(nl.nodes.len(), 2);\n\n\n\n // send suspect message with less incarnation number.\n\n nl.handle_suspect(Suspect {\n\n node: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 1,\n\n from: String::from(\"127.1.1.0:8000\"),\n\n })\n\n .await;\n\n\n", "file_path": "src/nilai_handler.rs", "rank": 33, "score": 35468.09096960633 }, { "content": " msg: Message::PingTimeOut(seq_no),\n\n };\n\n self.spawn_timeout_msg(msg, self.probe_timeout).await;\n\n self.increment_seq_no();\n\n }\n\n\n\n async fn handle_timeout(&mut self, seq_no: u32) {\n\n let node_id = self.ack_checker.get(&seq_no);\n\n match node_id {\n\n Some(id) => {\n\n let indirect_ping = IndirectPing {\n\n seq_no: seq_no,\n\n to: id.to_string(),\n\n from: self.addr.to_string(),\n\n };\n\n // TODO: gossip may send indirect ping\n\n // to target node.so use k_node here.\n\n self.gossip(\n\n Message::IndirectPingMsg(indirect_ping),\n\n self.indirect_checks,\n", "file_path": "src/nilai_handler.rs", "rank": 34, "score": 35467.85278090855 }, { "content": " // send notification to delegate if any.\n\n if let Some(delegate) = &self.dead_delegate {\n\n delegate(Node {\n\n addr: dead_node.addr.to_string(),\n\n name: dead_node.name.to_string(),\n\n incarnation: dead_node.incarnation,\n\n state: State::Dead,\n\n });\n\n }\n\n self.gossip(Message::Dead(msg), self.gossip_nodes).await;\n\n }\n\n }\n\n None => {\n\n // some random node. So, ignoring it.\n\n return;\n\n }\n\n }\n\n }\n\n\n\n async fn handle_alive(&mut self, msg: Alive) {\n", "file_path": "src/nilai_handler.rs", "rank": 35, "score": 35467.68247991448 }, { "content": " async fn handle_suspect_timeout(&mut self, msg: SuspectTimeout) {\n\n let suspect_node = self.nodes.get_mut(&msg.node_id);\n\n match suspect_node {\n\n Some(suspect_node) => {\n\n if suspect_node.incarnation > msg.incarnation {\n\n // okay we got refute message. No need to progress further.\n\n return;\n\n }\n\n // okay we're confirming as dead.\n\n suspect_node.state = State::Dead;\n\n // We find this node as dead so gossip it.\n\n let dead_msg = Dead {\n\n from: self.name.to_string(),\n\n node: suspect_node.addr.to_string(),\n\n incarnation: msg.incarnation,\n\n };\n\n // send notification to delegate if any.\n\n if let Some(delegate) = &self.dead_delegate {\n\n delegate(Node {\n\n addr: suspect_node.addr.to_string(),\n", "file_path": "src/nilai_handler.rs", "rank": 36, "score": 35467.43466302024 }, { "content": "\n\n async fn handle_dead(&mut self, msg: Dead) {\n\n let dead_node = self.nodes.get_mut(&msg.node);\n\n match dead_node {\n\n Some(dead_node) => {\n\n if dead_node.incarnation > msg.incarnation {\n\n // old incarnation number. So, ignore it.\n\n return;\n\n }\n\n\n\n if msg.node == self.addr {\n\n // If this is about us then, just refute.\n\n self.refute(msg.incarnation).await;\n\n return;\n\n }\n\n // if the message is new then update and gossip.\n\n if dead_node.state != State::Dead && dead_node.incarnation != msg.incarnation {\n\n info!(\"marking {} as a dead node\", dead_node.addr);\n\n dead_node.state = State::Dead;\n\n dead_node.incarnation = msg.incarnation;\n", "file_path": "src/nilai_handler.rs", "rank": 37, "score": 35467.132408078825 }, { "content": " };\n\n let suspect_incarnation = suspect_node.incarnation;\n\n let suspect_node_addr = suspect_node.addr.to_string();\n\n // here we'll send suspect message to the node it self\n\n // so that it can refute.\n\n self.send_msg(UdpMessage {\n\n peer: Some(suspect_msg.node.parse().unwrap()),\n\n msg: Message::SuspectMsg(suspect_msg.clone()),\n\n })\n\n .await;\n\n // Now gossip the message across the cluster.\n\n self.gossip(Message::SuspectMsg(suspect_msg), self.gossip_nodes)\n\n .await;\n\n // If we didn't get any alive message with higher incarnation number\n\n // we'll consider this node as dead.\n\n let suspect_timeout = Message::SuspectMsgTimeout(SuspectTimeout {\n\n node_id: suspect_node_addr,\n\n incarnation: suspect_incarnation,\n\n });\n\n self.spawn_timeout_msg(\n", "file_path": "src/nilai_handler.rs", "rank": 38, "score": 35467.10513741275 }, { "content": " let s_n_id = s_n_id.unwrap().to_string();\n\n info!(\"got indirect ping timeout {}\", s_n_id);\n\n self.suspect_node(&s_n_id).await;\n\n }\n\n\n\n async fn suspect_node(&mut self, suspect_node_id: &String) {\n\n // choose k random node and gossip that suspect. and also do\n\n // dead timer.\n\n let suspect_node = self.nodes.get_mut(suspect_node_id);\n\n if !suspect_node.is_some() {\n\n // node may be left.\n\n // we need some method for leaving the cluster.\n\n return;\n\n }\n\n let suspect_node = suspect_node.unwrap();\n\n suspect_node.state = State::Suspect;\n\n let suspect_msg = Suspect {\n\n from: self.addr.to_string(),\n\n node: suspect_node.addr.to_string(),\n\n incarnation: suspect_node.incarnation,\n", "file_path": "src/nilai_handler.rs", "rank": 39, "score": 35466.65603248708 }, { "content": " assert_eq!(node_2.state, State::Suspect);\n\n\n\n // sending with higher incarnation number so that it'll mark\n\n // the state as alive\n\n nl.gossip_nodes = 1;\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 3,\n\n })\n\n .await;\n\n let node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n assert_eq!(node_2.state, State::Alive);\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_handle_dead() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, mut recv_udp) = mpsc::channel(100);\n", "file_path": "src/nilai_handler.rs", "rank": 40, "score": 35466.04170787734 }, { "content": " assert_eq!(\n\n nl.nodes.get(&indirect_ping_node).unwrap().state,\n\n State::Suspect\n\n );\n\n\n\n nl.handle_suspect_timeout(SuspectTimeout {\n\n incarnation: 2,\n\n node_id: indirect_ping_node.clone(),\n\n })\n\n .await;\n\n // not refuted, so it'll mark the node as dead.\n\n assert_eq!(\n\n nl.nodes.get(&indirect_ping_node).unwrap().state,\n\n State::Dead\n\n );\n\n });\n\n }\n\n #[test]\n\n fn test_suspect_msg() {\n\n let (mut send, recv) = mpsc::channel(100);\n", "file_path": "src/nilai_handler.rs", "rank": 41, "score": 35465.695482253075 }, { "content": " pub(crate) seq_no: u32,\n\n pub(crate) node_ids: Vec<String>,\n\n pub(crate) ack_checker: HashMap<u32, String>,\n\n pub(crate) probe_id: usize,\n\n pub(crate) timeout_sender: mpsc::Sender<UdpMessage>,\n\n pub(crate) name: String,\n\n pub(crate) indirect_ack_checker: HashMap<u32, IndirectPing>,\n\n pub(crate) local_incarnation: u32,\n\n pub(crate) addr: String,\n\n pub(crate) indirect_checks: usize,\n\n pub(crate) gossip_nodes: usize,\n\n pub(crate) probe_timeout: Duration,\n\n pub(crate) probe_interval: Duration,\n\n pub(crate) suspicious_multiplier: u64,\n\n pub(crate) alive_delegate: Option<delegate::Handler>,\n\n pub(crate) dead_delegate: Option<delegate::Handler>,\n\n pub(crate) closer: oneshot::Receiver<i32>,\n\n}\n\n\n\nimpl NilaiHandler {\n", "file_path": "src/nilai_handler.rs", "rank": 42, "score": 35465.6011568538 }, { "content": " let node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n assert_eq!(node_2.state, State::Alive);\n\n\n\n // dead with high incarnation number so that\n\n // it will considered as dead.\n\n nl.handle_dead(Dead {\n\n from: String::from(\"127.0.0.0:8000\"),\n\n node: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 3,\n\n })\n\n .await;\n\n let node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n assert_eq!(node_2.state, State::Dead);\n\n // node is restarted so it is sending alive\n\n // with 0 incarnation now Nilai should send dead message.\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 0,\n\n })\n", "file_path": "src/nilai_handler.rs", "rank": 43, "score": 35465.55291505635 }, { "content": " }\n\n for msg in msgs {\n\n self.send_msg(msg).await;\n\n }\n\n }\n\n\n\n async fn handle_indirect_ping(&mut self, msg: IndirectPing) {\n\n self.send_ping(self.seq_no, &msg.to).await;\n\n self.indirect_ack_checker.insert(self.seq_no, msg);\n\n self.increment_seq_no();\n\n // do we need to timeout here to delete the ack checker?\n\n }\n\n\n\n async fn handle_indirect_ping_timeout(&mut self, seq_no: u32) {\n\n let s_n_id = self.ack_checker.get(&seq_no);\n\n if !s_n_id.is_some() {\n\n // we received ack so simply return.\n\n return;\n\n }\n\n\n", "file_path": "src/nilai_handler.rs", "rank": 44, "score": 35465.25544030644 }, { "content": " let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n nl.gossip_nodes = 2;\n\n let mut rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n assert_eq!(nl.nodes.len(), 2);\n\n\n\n // dead with lower incarnation number so that\n\n // it won't considered as dead.\n\n nl.handle_dead(Dead {\n\n from: String::from(\"127.0.0.0:8000\"),\n\n node: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 1,\n\n })\n\n .await;\n", "file_path": "src/nilai_handler.rs", "rank": 45, "score": 35465.15439576254 }, { "content": " incarnation: msg.incarnation,\n\n state: State::Alive,\n\n });\n\n }\n\n }\n\n // suspect may turn into alive on this update.\n\n local_state.state = State::Alive;\n\n // let's update the local state and gossip it.\n\n local_state.incarnation = msg.incarnation;\n\n self.gossip(Message::Alive(msg), self.gossip_nodes).await;\n\n }\n\n None => {\n\n // This is some new node. So, just insert the\n\n // node into your list and gossip.\n\n info!(\"some new node is joining the cluster yay!!\");\n\n self.nodes.insert(\n\n msg.addr.to_string(),\n\n Node {\n\n addr: msg.addr.clone(),\n\n name: msg.name.clone(),\n", "file_path": "src/nilai_handler.rs", "rank": 46, "score": 35465.10956242212 }, { "content": " info!(\"got ping timeout\");\n\n self.handle_timeout(seq_no).await;\n\n }\n\n Message::Ack(ack_res) => {\n\n info!(\"got ack res for the seq_no {}\", ack_res.seq_no);\n\n self.handle_ack(ack_res.seq_no).await;\n\n }\n\n Message::IndirectPingTimeout(seq_no) => {\n\n info!(\"got indirect ping timeout {}\", seq_no);\n\n self.handle_indirect_ping_timeout(seq_no).await;\n\n }\n\n Message::IndirectPingMsg(msg) => {\n\n info!(\"got indirect ping message\");\n\n self.handle_indirect_ping(msg).await;\n\n }\n\n Message::SuspectMsg(msg) => {\n\n info!(\n\n \"got suspect message for the node {} from {}\",\n\n msg.node, msg.from\n\n );\n", "file_path": "src/nilai_handler.rs", "rank": 47, "score": 35465.046668082745 }, { "content": " State::Alive => {\n\n // we'll send suspect and do the the dead timeout.\n\n }\n\n State::Suspect => {\n\n // already suspected no need to suspect more. when suspect timeout\n\n // happens we'll gossip dead message. In memeberlist they have timer\n\n // check for faster gossip. Hope, time will let us to do those features.\n\n // It is good to have not for now. But, we'll do it.\n\n return;\n\n }\n\n State::Dead => {\n\n // already dead so no need to suspect.\n\n return;\n\n }\n\n }\n\n if msg.incarnation < suspect_node.incarnation {\n\n // old suspect message so just ignore it.\n\n return;\n\n }\n\n if msg.node == self.addr {\n", "file_path": "src/nilai_handler.rs", "rank": 48, "score": 35464.72697774392 }, { "content": " nl.nodes.insert(\n\n String::from(\"127.0.0.0:8000\"),\n\n Node {\n\n addr: String::from(\"127.0.0.0:8000\"),\n\n name: String::from(\"yo\"),\n\n state: State::Alive,\n\n incarnation: 0,\n\n },\n\n );\n\n nl.node_ids.push(String::from(\"127.0.0.0:8000\"));\n\n return nl;\n\n }\n\n #[test]\n\n fn test_handle_alive() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n let mut rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_alive(Alive {\n", "file_path": "src/nilai_handler.rs", "rank": 49, "score": 35464.152331160694 }, { "content": " .await;\n\n });\n\n }\n\n #[test]\n\n fn test_handle_timeout() {\n\n let (mut send, recv) = mpsc::channel(100);\n\n let (send_udp, mut recv_udp) = mpsc::channel(100);\n\n let mut nl = get_mock_nilai(recv, send_udp, send.clone());\n\n nl.indirect_checks = 2;\n\n nl.gossip_nodes = 3;\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async {\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n assert_eq!(nl.nodes.len(), 2);\n\n nl.handle_probe().await;\n", "file_path": "src/nilai_handler.rs", "rank": 50, "score": 35463.64364931257 }, { "content": " // now probe should send ping message\n\n let _ = recv_udp.try_next().unwrap().unwrap();\n\n nl.handle_ack(nl.seq_no - 1).await;\n\n // now there should not be ack checker\n\n assert_eq!(nl.ack_checker.len(), 0);\n\n assert_eq!(\n\n nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap().state,\n\n State::Alive\n\n );\n\n // add one more node.\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 3\"),\n\n addr: String::from(\"127.1.1.3:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n nl.handle_probe().await;\n\n // now probe should send ping message\n\n let _ = recv_udp.try_next().unwrap().unwrap();\n\n nl.handle_timeout(nl.seq_no - 1).await;\n", "file_path": "src/nilai_handler.rs", "rank": 51, "score": 35463.59911428706 }, { "content": " // now we send a refute message.\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 3,\n\n })\n\n .await;\n\n // after refute it should be alive.\n\n assert_eq!(\n\n nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap().state,\n\n State::Alive\n\n );\n\n\n\n // send suspect message with higher incarnation number.\n\n nl.handle_suspect(Suspect {\n\n node: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 4,\n\n from: String::from(\"127.1.1.0:8000\"),\n\n })\n\n .await;\n", "file_path": "src/nilai_handler.rs", "rank": 52, "score": 35463.2182141452 }, { "content": " incarnation: 0,\n\n })\n\n .await;\n\n\n\n // It has to be updated in the local node details.\n\n assert_eq!(nl.node_ids.len(), 2);\n\n assert_eq!(nl.node_ids[1], String::from(\"127.1.1.1:8000\"));\n\n\n\n let mut node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n\n\n assert_eq!(node_2.name, String::from(\"node 2\"));\n\n assert_eq!(node_2.addr, String::from(\"127.1.1.1:8000\"));\n\n assert_eq!(node_2.incarnation, 0);\n\n assert_eq!(node_2.state, State::Alive);\n\n\n\n // let's mark this node as dead and send state sync.\n\n node_2.state = State::Dead;\n\n nl.handle_state_sync(Alive {\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n name: String::from(\"node 2\"),\n", "file_path": "src/nilai_handler.rs", "rank": 53, "score": 35462.50962788314 }, { "content": " // It's me who is been suspected.\n\n // so refute it.\n\n self.refute(msg.incarnation).await;\n\n return;\n\n }\n\n // suspect node.\n\n self.suspect_node(&msg.node).await;\n\n }\n\n\n\n async fn refute(&mut self, mut incarnation: u32) {\n\n if self.local_incarnation <= incarnation {\n\n // advancing incarnation number.\n\n incarnation = incarnation + 1;\n\n self.local_incarnation = incarnation;\n\n }\n\n let alive_msg = Alive {\n\n name: self.name.to_string(),\n\n addr: self.addr.parse().unwrap(),\n\n incarnation: self.local_incarnation,\n\n };\n", "file_path": "src/nilai_handler.rs", "rank": 54, "score": 35462.4744825782 }, { "content": " state: State::Alive,\n\n incarnation: msg.incarnation,\n\n },\n\n );\n\n self.node_ids.push(msg.addr.clone());\n\n // send notification if any delegate.\n\n if let Some(delegate) = &self.alive_delegate {\n\n delegate(Node {\n\n addr: msg.addr.clone(),\n\n name: msg.name.clone(),\n\n incarnation: msg.incarnation,\n\n state: State::Alive,\n\n });\n\n }\n\n self.gossip(Message::Alive(msg), self.gossip_nodes).await;\n\n return;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/nilai_handler.rs", "rank": 55, "score": 35462.01277623184 }, { "content": " name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n // self and alive node. So, it is two.\n\n assert_eq!(nl.nodes.len(), 2);\n\n\n\n // checking suspect refute for the suspect.\n\n let node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n node_2.state = State::Suspect;\n\n // sending alive with less incarnation number so that\n\n // it won't update the state\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 2\"),\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 1,\n\n })\n\n .await;\n\n let node_2 = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n", "file_path": "src/nilai_handler.rs", "rank": 56, "score": 35461.89986471968 }, { "content": " // It should not mark the node as suspect, because suspect has less incarnation.\n\n assert_eq!(\n\n nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap().state,\n\n State::Alive\n\n );\n\n\n\n // send suspect message with same incarnation number.\n\n nl.handle_suspect(Suspect {\n\n node: String::from(\"127.1.1.1:8000\"),\n\n incarnation: 2,\n\n from: String::from(\"127.1.1.0:8000\"),\n\n })\n\n .await;\n\n\n\n // It should mark the node as suspect, because suspect has same incarnation.\n\n assert_eq!(\n\n nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap().state,\n\n State::Suspect\n\n );\n\n\n", "file_path": "src/nilai_handler.rs", "rank": 57, "score": 35461.00476454865 }, { "content": " addr: String::from(\"127.1.1.1:8000\"),\n\n name: String::from(\"node 2\"),\n\n incarnation: 0,\n\n })\n\n .await;\n\n // The above should update the nl state.\n\n assert_eq!(nl.nodes.len(), 2);\n\n assert_eq!(nl.node_ids.len(), 2);\n\n let alive_node = nl.nodes.get_mut(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n assert_eq!(alive_node.state, State::Alive);\n\n assert_eq!(alive_node.incarnation, 0);\n\n assert_eq!(alive_node.name, String::from(\"node 2\"));\n\n\n\n // mark this node as dead.\n\n alive_node.state = State::Dead;\n\n\n\n // alive with higher incarnation.\n\n nl.handle_state_sync_res(Alive {\n\n addr: String::from(\"127.1.1.1:8000\"),\n\n name: String::from(\"node 2\"),\n", "file_path": "src/nilai_handler.rs", "rank": 58, "score": 35460.45695614476 }, { "content": " return;\n\n }\n\n let node_id = self.node_ids.get(self.probe_id).unwrap();\n\n // sanity check\n\n match node_id.cmp(&self.addr) {\n\n Ordering::Equal => {\n\n // This is us so no need to probe.\n\n self.increment_probe_id();\n\n continue;\n\n }\n\n _ => {\n\n // this is someone else so move forward.\n\n }\n\n }\n\n let node = self.nodes.get(node_id);\n\n if node.is_some() {\n\n let node = node.unwrap();\n\n match node.state {\n\n State::Dead => {\n\n // no need to send message.\n", "file_path": "src/nilai_handler.rs", "rank": 59, "score": 35460.00818909982 }, { "content": " },\n\n _ => {\n\n break;\n\n }\n\n }\n\n }\n\n assert_eq!(indirect_ping_exist, true);\n\n assert_eq!(nl.ack_checker.len(), 1);\n\n // so there no indirect ack and we'll do indirect ping timeout.\n\n nl.handle_indirect_ping_timeout(nl.seq_no - 1).await;\n\n assert_eq!(\n\n nl.nodes.get(&indirect_ping_node).unwrap().state,\n\n State::Suspect\n\n );\n\n nl.handle_suspect_timeout(SuspectTimeout {\n\n incarnation: 0,\n\n node_id: indirect_ping_node.clone(),\n\n })\n\n .await;\n\n // lesser incarnation number means it'll ignore.\n", "file_path": "src/nilai_handler.rs", "rank": 60, "score": 35459.73411512203 }, { "content": " msg_sender: send_udp,\n\n nodes: HashMap::default(),\n\n seq_no: 0,\n\n node_ids: Vec::default(),\n\n ack_checker: HashMap::default(),\n\n probe_id: 0,\n\n timeout_sender: send,\n\n name: String::from(\"yo\"),\n\n indirect_ack_checker: HashMap::default(),\n\n local_incarnation: 0,\n\n addr: String::from(\"127.0.0.0:8000\"),\n\n alive_delegate: None,\n\n dead_delegate: None,\n\n indirect_checks: 0,\n\n gossip_nodes: 0,\n\n probe_timeout: Duration::from_millis(200),\n\n probe_interval: Duration::from_millis(200),\n\n suspicious_multiplier: 2,\n\n closer: closer,\n\n };\n", "file_path": "src/nilai_handler.rs", "rank": 61, "score": 35458.573658440386 }, { "content": " })\n\n .await;\n\n nl.handle_alive(Alive {\n\n name: String::from(\"node 3\"),\n\n addr: String::from(\"127.3.3.3:8000\"),\n\n incarnation: 2,\n\n })\n\n .await;\n\n assert_eq!(nl.nodes.len(), 3);\n\n\n\n // send indirect ping from node 3 to node 2.\n\n nl.handle_indirect_ping(IndirectPing {\n\n seq_no: 5,\n\n to: String::from(\"127.1.1.1:8000\"),\n\n from: String::from(\"127.3.3.3:8000\"),\n\n })\n\n .await;\n\n\n\n assert_eq!(nl.indirect_ack_checker.len(), 1);\n\n\n", "file_path": "src/nilai_handler.rs", "rank": 62, "score": 35457.4246273234 }, { "content": " incarnation: 1,\n\n })\n\n .await;\n\n\n\n // let's check the state.\n\n let alive_node = nl.nodes.get(&String::from(\"127.1.1.1:8000\")).unwrap();\n\n assert_eq!(alive_node.state, State::Alive);\n\n });\n\n }\n\n}\n", "file_path": "src/nilai_handler.rs", "rank": 63, "score": 35455.682851279096 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n\n\nuse futures::channel::oneshot;\n\n// NilaiCloser close all the channels which let to close the nilai handler.\n\npub struct NilaiCloser {\n", "file_path": "src/closer.rs", "rank": 64, "score": 23966.975886641256 }, { "content": " pub(crate) handler_signal: oneshot::Sender<i32>,\n\n pub(crate) transport_receiver_signal: oneshot::Sender<i32>,\n\n pub(crate) transport_sender_signal: oneshot::Sender<i32>,\n\n}\n\n\n\nimpl NilaiCloser {\n\n /// stop will close the nilai handler. But it won't wait until every resource will\n\n /// cleaned up. But it'll send signal to the respective resource to stop itself.\n\n pub fn stop(self) {\n\n // stop receiving udp packets.\n\n self.transport_receiver_signal.send(1);\n\n\n\n // stop handler.\n\n self.handler_signal.send(1);\n\n\n\n // stop sending packets.\n\n self.transport_sender_signal.send(1);\n\n\n\n // It'll be good to wait here until everything stops.\n\n // use some golang waitgroup alternative.\n\n }\n\n}\n", "file_path": "src/closer.rs", "rank": 65, "score": 23921.600797937208 }, { "content": "### Nilai-rs\n\n\n\nNilai is a simple, lightweight embedded failure detection protocol based on SWIM. Nilai detects the failure state by pinging every node in a cluster by round-robin fashion.\n\n\n\nIt spreads the node state by infection style. Nilai is lightweight because it's built on top of tokio runtime.\n\n\n\nNilai derived from thamizh word (நிலை), which means state.\n\n\n\n\n\n### Example \n\n\n\n```rust\n\nuse failure::Error;\n\nuse nilai::builder;\n\nuse nilai::types;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nfn do_main() -> Result<(), Error> {\n\n let nilai_builder = builder::NilaiBuilder::new(\"127.0.0.1:5001\".parse()?);\n\n let closer = nilai_builder\n\n .alive_delegate(Box::new(|_: types::Node| println!(\"new node joined\")))\n\n .execute()?;\n\n // nilai is running so block the current thread.\n\n thread::sleep(Duration::from_secs(5));\n\n closer.stop();\n\n Ok(())\n\n}\n\n\n\nfn main() {\n\n match do_main() {\n\n Err(err) => {\n\n println!(\"not able to run nilai handler {:?}\", err);\n\n }\n\n _ => {}\n\n }\n\n}\n\n\n\n\n", "file_path": "README.md", "rank": 76, "score": 22611.978424249024 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse failure::Error;\n\nuse nilai::builder;\n\nuse nilai::types;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/nilai.rs", "rank": 77, "score": 18837.72124755293 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub mod builder;\n\npub mod closer;\n\npub mod delegate;\n\nmod nilai_handler;\n", "file_path": "src/lib.rs", "rank": 78, "score": 71.79552959663374 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n\n\nuse super::closer;\n\nuse super::delegate;\n\nuse super::nilai_handler;\n", "file_path": "src/builder.rs", "rank": 79, "score": 71.14481434379213 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::net::SocketAddr;\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum State {\n", "file_path": "src/types.rs", "rank": 80, "score": 69.90828406434493 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n\n\nuse super::types;\n\n\n\n/// handler is used for giving notification.\n\n/// Ideally, handler is called when new node comes or\n\n/// node is dead. Please, don't have blocking code here\n\n/// If you need blocking operation. Please send the\n\n/// details via channel to another thread and handle\n\n/// it there.\n\npub type Handler = Box<dyn Fn(types::Node) + Send>;\n", "file_path": "src/delegate.rs", "rank": 81, "score": 65.27926140156379 }, { "content": "/*\n\n * Copyright 2019 balajijinnah and Contributors\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse super::types::Node;\n\nuse rand::Rng;\n\nuse std::collections::HashMap;\n\n/// k_random_nodes returns random nodes from the given nodes list based on the filtering closure.\n", "file_path": "src/utils.rs", "rank": 82, "score": 60.98330904955548 }, { "content": " Probe,\n\n PingTimeOut(u32),\n\n IndirectPingTimeout(u32),\n\n SuspectMsgTimeout(SuspectTimeout),\n\n SuspectMsg(Suspect),\n\n Alive(Alive),\n\n Dead(Dead),\n\n StateSync(Alive),\n\n StateSyncRes(Alive),\n\n}\n\n\n\n/// SuspectTimeout is triggered after sending suspect message. To check whether we received any\n\n/// alive message\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct SuspectTimeout {\n\n pub incarnation: u32,\n\n pub node_id: String,\n\n}\n\n\n\n/// Ping is used to probe a node.\n", "file_path": "src/types.rs", "rank": 83, "score": 34.19307543609365 }, { "content": " /// should take before considering a node as dead.\n\n /// suspicious timeout will be calculated by probe_timeout * suspicious multiplier.\n\n pub fn suspicious_multiplier(mut self, multiplier: u64) -> NilaiBuilder {\n\n self.suspicious_multiplier = multiplier;\n\n self\n\n }\n\n\n\n pub fn execute(self) -> Result<closer::NilaiCloser, Error> {\n\n let socket = block_on(UdpSocket::bind(self.addr))?;\n\n let (handler_sender, udp_receiver) = mpsc::channel(1000);\n\n let (udp_sender, handler_receiver) = mpsc::channel(1000);\n\n let (read_half, send_half) = socket.split();\n\n let alive_delegate = self.alive_delegate;\n\n let dead_delegate = self.dead_delegate;\n\n let (handler_closer_signal, closer) = oneshot::channel();\n\n let mut handler = nilai_handler::NilaiHandler {\n\n msg_rcv: udp_receiver,\n\n msg_sender: udp_sender.clone(),\n\n nodes: HashMap::new(),\n\n seq_no: 0,\n", "file_path": "src/builder.rs", "rank": 84, "score": 30.66991060508451 }, { "content": " Alive = 0,\n\n Suspect = 1,\n\n Dead = 2,\n\n}\n\n\n\n/// Node contains details of the node, which includes address,name, state, incarnation.\n\n#[derive(Debug, Clone)]\n\npub struct Node {\n\n pub addr: String,\n\n pub name: String,\n\n pub state: State,\n\n pub incarnation: u32,\n\n}\n\n\n\n/// Message enum is used to send and receive message over the channel.\n\n#[derive(Debug, Clone)]\n\npub(crate) enum Message {\n\n PingMsg(Ping),\n\n IndirectPingMsg(IndirectPing),\n\n Ack(AckRes),\n", "file_path": "src/types.rs", "rank": 85, "score": 26.30168783198279 }, { "content": " handler_ch: handler_sender.clone(),\n\n udp_socket_receiver: read_half,\n\n closer: closer,\n\n };\n\n\n\n let (tansport_sender_closer_signal, closer) = oneshot::channel();\n\n let mut transport_sender = transport::TransportSender {\n\n udp_socket_sender: send_half,\n\n handler_recv_ch: handler_receiver,\n\n closer: closer,\n\n };\n\n let peers = self.peers.clone();\n\n // now start the state machine.\n\n thread::spawn(move || {\n\n let rt = Runtime::new().unwrap();\n\n rt.block_on(async move {\n\n join3(\n\n handler.listen(peers),\n\n transport_receiver.listen(),\n\n transport_sender.listen(),\n", "file_path": "src/builder.rs", "rank": 86, "score": 25.335727489177454 }, { "content": "}\n\n\n\n/// AckRes is used to send response for the ping.\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct AckRes {\n\n pub seq_no: u32,\n\n}\n\n\n\n/// Alive is used to let know the cluster that, I'm Alive.\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct Alive {\n\n pub name: String,\n\n pub addr: String,\n\n pub incarnation: u32,\n\n}\n\n\n\n/// Dead is used to determine that the node is dead.\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct Dead {\n\n pub from: String,\n", "file_path": "src/types.rs", "rank": 87, "score": 21.988122201417067 }, { "content": " pub node: String,\n\n pub incarnation: u32,\n\n}\n\n\n\n/// UdpMessage is used to send and receive udp message between nodes in the cluster.\n\n/// Sometimes, UdpMessage is used to send to message to itself, for example timeout.\n\n/// In timeout scenario, we'll keep peer as None and send timeout message to itself.\n\n/// To check wether we got any response before the timeout, If we didn't get any.\n\n/// Then we'll do the necessary steps for the respective timeout.\n\n#[derive(Debug)]\n\npub(crate) struct UdpMessage {\n\n pub msg: Message,\n\n pub peer: Option<SocketAddr>,\n\n}\n", "file_path": "src/types.rs", "rank": 88, "score": 21.144224748294246 }, { "content": " )\n\n .await;\n\n });\n\n });\n\n let closer = closer::NilaiCloser {\n\n handler_signal: handler_closer_signal,\n\n transport_receiver_signal: tansport_receiver_closer_signal,\n\n transport_sender_signal: tansport_sender_closer_signal,\n\n };\n\n Ok(closer)\n\n }\n\n}\n", "file_path": "src/builder.rs", "rank": 89, "score": 20.854523096428785 }, { "content": " alive_delegate: None,\n\n dead_delegate: None,\n\n suspicious_multiplier: 2,\n\n };\n\n }\n\n\n\n /// alive_delegate sets the alive handler. This handler will be called any new node joins\n\n /// the cluster or the node states changes from dead to alive.\n\n pub fn alive_delegate(mut self, h: delegate::Handler) -> NilaiBuilder {\n\n self.alive_delegate = Some(h);\n\n self\n\n }\n\n\n\n /// dead_delegate sets the dead handler. This handler will be called if the nilai marks\n\n /// any node as dead.\n\n pub fn dead_delegate(mut self, h: delegate::Handler) -> NilaiBuilder {\n\n self.dead_delegate = Some(h);\n\n self\n\n }\n\n\n", "file_path": "src/builder.rs", "rank": 90, "score": 20.124595370380387 }, { "content": " pub fn probe_timeout(mut self, timeout: Duration) -> NilaiBuilder {\n\n self.probe_timeout = timeout;\n\n self\n\n }\n\n\n\n /// peers is used to set all the peers in the cluster. When nilai is started, nilai will\n\n /// send request to all the peers to updates it's local state. mean while the peers also\n\n /// updates about the local node state.\n\n pub fn peers(mut self, peers: Vec<SocketAddr>) -> NilaiBuilder {\n\n self.peers = peers;\n\n self\n\n }\n\n\n\n /// name is used to set the name of the nilai node.\n\n pub fn name(mut self, name: String) -> NilaiBuilder {\n\n self.name = name;\n\n self\n\n }\n\n\n\n /// suspicious_multiplier is used to set the multiplier timeout. That is, how long nilai\n", "file_path": "src/builder.rs", "rank": 91, "score": 19.423812613480486 }, { "content": "#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct Ping {\n\n pub seq_no: u32,\n\n pub node: String,\n\n}\n\n\n\n/// IndirectPing is used to probe the target node, with the help of other nodes in the cluster.\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct IndirectPing {\n\n pub seq_no: u32,\n\n pub to: String,\n\n pub from: String,\n\n}\n\n\n\n/// Suspect is used to suspect a node.\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Clone)]\n\npub(crate) struct Suspect {\n\n pub incarnation: u32,\n\n pub from: String,\n\n pub node: String,\n", "file_path": "src/types.rs", "rank": 92, "score": 18.343545423323604 }, { "content": " probe_timeout: Duration,\n\n peers: Vec<SocketAddr>,\n\n alive_delegate: Option<delegate::Handler>,\n\n dead_delegate: Option<delegate::Handler>,\n\n suspicious_multiplier: u64,\n\n}\n\n\n\nimpl NilaiBuilder {\n\n /// new is used to construct the nilai builder with default configuration and\n\n /// returns NilaiBuilder, which will be used to tune the NilaiBuilder and to\n\n /// run nilai.\n\n pub fn new(addr: SocketAddr) -> NilaiBuilder {\n\n return NilaiBuilder {\n\n addr: addr,\n\n indirect_checks: 3,\n\n gossip_nodes: 3,\n\n name: String::from(\"nilai node\"),\n\n probe_interval: Duration::from_secs(1),\n\n probe_timeout: Duration::from_millis(500),\n\n peers: Vec::new(),\n", "file_path": "src/builder.rs", "rank": 93, "score": 17.95921598801667 }, { "content": " node_ids: Vec::new(),\n\n ack_checker: HashMap::new(),\n\n probe_id: 0,\n\n timeout_sender: handler_sender.clone(),\n\n name: self.name,\n\n indirect_ack_checker: HashMap::new(),\n\n local_incarnation: 0,\n\n addr: self.addr.to_string(),\n\n indirect_checks: self.indirect_checks,\n\n gossip_nodes: self.gossip_nodes,\n\n probe_timeout: self.probe_timeout,\n\n probe_interval: self.probe_interval,\n\n suspicious_multiplier: self.suspicious_multiplier,\n\n alive_delegate: alive_delegate,\n\n dead_delegate: dead_delegate,\n\n closer: closer,\n\n };\n\n\n\n let (tansport_receiver_closer_signal, closer) = oneshot::channel();\n\n let mut transport_receiver = transport::TransportReceiver {\n", "file_path": "src/builder.rs", "rank": 94, "score": 16.967184176051095 }, { "content": " /// indirect_checks sets the number of check indirect pings has to be sent before, considering\n\n /// the node as dead.\n\n pub fn indirect_checks(mut self, checks: usize) -> NilaiBuilder {\n\n self.indirect_checks = checks;\n\n self\n\n }\n\n\n\n /// gossip_nodes determine how many nodes the message has to be gossiped.\n\n pub fn gossip_nodes(mut self, gossip_nodes: usize) -> NilaiBuilder {\n\n self.gossip_nodes = gossip_nodes;\n\n self\n\n }\n\n\n\n /// probe_interval is used to set, how often the nodes has to pinged for health check.\n\n pub fn probe_interval(mut self, interval: Duration) -> NilaiBuilder {\n\n self.probe_interval = interval;\n\n self\n\n }\n\n\n\n /// probe_timeout is used to determine how long nilai has to wait for ack.\n", "file_path": "src/builder.rs", "rank": 95, "score": 16.6592398820444 }, { "content": "use super::transport;\n\nuse failure::Error;\n\nuse future::join3;\n\nuse futures::channel::mpsc;\n\nuse futures::channel::oneshot;\n\nuse futures::executor::block_on;\n\nuse futures::prelude::*;\n\nuse std::collections::HashMap;\n\nuse std::net::SocketAddr;\n\nuse std::thread;\n\nuse std::time::Duration;\n\nuse tokio::net::UdpSocket;\n\nuse tokio::runtime::Runtime;\n\n/// NilaiBuilder is used to configure and execute the nilai\n\npub struct NilaiBuilder {\n\n indirect_checks: usize,\n\n gossip_nodes: usize,\n\n addr: SocketAddr,\n\n name: String,\n\n probe_interval: Duration,\n", "file_path": "src/builder.rs", "rank": 96, "score": 13.496107249082995 }, { "content": "pub(crate) fn k_random_nodes<'a, F>(\n\n nodes: &'a HashMap<String, Node>,\n\n nodes_ids: &Vec<String>,\n\n n: usize,\n\n filter: F,\n\n) -> Vec<&'a Node>\n\nwhere\n\n F: Fn(&Node) -> bool,\n\n{\n\n let mut k_nodes = Vec::new();\n\n let mut rng = rand::thread_rng();\n\n let mut i = 0;\n\n while i < nodes_ids.len() && k_nodes.len() < n {\n\n let idx = rng.gen_range(0, nodes_ids.len());\n\n let node_id = nodes_ids.get(idx).unwrap();\n\n let node = nodes.get(node_id).unwrap();\n\n // we're unwraping here, I'm confident enough that it won't panic. If it's panic\n\n // it is good to know, that there is a bug.\n\n if filter(&node) {\n\n i = i + 1;\n\n continue;\n\n }\n\n i = i + 1;\n\n k_nodes.push(node);\n\n }\n\n return k_nodes;\n\n}\n", "file_path": "src/utils.rs", "rank": 97, "score": 7.723801333312521 }, { "content": "mod transport;\n\npub mod types;\n\nmod utils;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 98, "score": 5.72301901533638 } ]
Rust
wasm/src/message/unpack.rs
spivachuk/didcomm-rust
9a24b3b60f07a11822666dda46e5616a138af056
use std::rc::Rc; use didcomm::{ error::{ErrorKind, ResultExt}, UnpackOptions, }; use js_sys::{Array, Promise}; use wasm_bindgen::prelude::*; use wasm_bindgen_futures::future_to_promise; use crate::{ error::JsResult, utils::set_panic_hook, DIDResolver, JsDIDResolver, JsSecretsResolver, Message, SecretsResolver, }; #[wasm_bindgen(skip_typescript)] impl Message { #[wasm_bindgen(skip_typescript)] pub fn unpack( msg: String, did_resolver: DIDResolver, secrets_resolver: SecretsResolver, options: JsValue, ) -> Promise { set_panic_hook(); let did_resolver = JsDIDResolver(did_resolver); let secrets_resolver = JsSecretsResolver(secrets_resolver); future_to_promise(async move { let options: UnpackOptions = options .into_serde() .kind(ErrorKind::Malformed, "Options param is malformed") .as_js()?; let (msg, metadata) = didcomm::Message::unpack(&msg, &did_resolver, &secrets_resolver, &options) .await .as_js()?; let metadata = JsValue::from_serde(&metadata) .kind(ErrorKind::InvalidState, "Unable serialize UnpackMetadata") .as_js()?; let res = { let res = Array::new_with_length(2); res.set(0, Message(Rc::new(msg)).into()); res.set(1, metadata); res }; Ok(res.into()) }) } } #[wasm_bindgen(typescript_custom_section)] const MESSAGE_UNPACK_TS: &'static str = r#" export namespace Message { /** * Unpacks the packed message by doing decryption and verifying the signatures. * This method supports all DID Comm message types (encrypted, signed, plaintext). * * If unpack options expect a particular property (for example that a message is encrypted) * and the packed message doesn't meet the criteria (it's not encrypted), then a MessageUntrusted * error will be returned. * * @param `packed_msg` the message as JSON string to be unpacked * @param `did_resolver` instance of `DIDResolver` to resolve DIDs * @param `secrets_resolver` instance of SecretsResolver` to resolve sender DID keys secrets * @param `options` allow fine configuration of unpacking process and imposing additional restrictions * to message to be trusted. * * @returns Tuple `[message, metadata]`. * - `message` plain message instance * - `metadata` additional metadata about this `unpack` execution like used keys identifiers, * trust context, algorithms and etc. * * @throws DIDCommDIDNotResolved * @throws DIDCommDIDUrlNotFound * @throws DIDCommMalformed * @throws DIDCommIoError * @throws DIDCommInvalidState * @throws DIDCommNoCompatibleCrypto * @throws DIDCommUnsupported * @throws DIDCommIllegalArgument */ function unpack( msg: string, did_resolver: DIDResolver, secrets_resolver: SecretsResolver, options: UnpackOptions, ): Promise<[Message, UnpackMetadata]>; } "#; #[wasm_bindgen(typescript_custom_section)] const PACK_UNPACK_OPTIONS_TS: &'static str = r#" /** * Allows fine customization of unpacking process */ type UnpackOptions = { /** * Whether the plaintext must be decryptable by all keys resolved by the secrets resolver. * False by default. */ expect_decrypt_by_all_keys?: boolean, /** * If `true` and the packed message is a `Forward` * wrapping a plaintext packed for the given recipient, then both Forward and packed plaintext are unpacked automatically, * and the unpacked plaintext will be returned instead of unpacked Forward. * False by default. */ unwrap_re_wrapping_forward?: boolean, } "#; #[wasm_bindgen(typescript_custom_section)] const UNPACK_METADATA_TS: &'static str = r#" /** * Additional metadata about this `unpack` method execution like trust predicates * and used keys identifiers. */ type UnpackMetadata = { /** * Whether the plaintext has been encrypted. */ encrypted: boolean, /** * Whether the plaintext has been authenticated. */ authenticated: boolean, /** * Whether the plaintext has been signed. */ non_repudiation: boolean, /** * Whether the sender ID was hidden or protected. */ anonymous_sender: boolean, /** * Whether the plaintext was re-wrapped in a forward message by a mediator. */ re_wrapped_in_forward: boolean, /** * Key ID of the sender used for authentication encryption * if the plaintext has been authenticated and encrypted. */ encrypted_from_kid?: string, /** * Target key IDS for encryption if the plaintext has been encrypted. */ encrypted_to_kids?: Array<string>, /** * Key ID used for signature if the plaintext has been signed. */ sign_from: string, /** * Key ID used for from_prior header signature if from_prior header is present */ from_prior_issuer_kid?: string, /** * Algorithm used for authenticated encryption. * Default "A256cbcHs512Ecdh1puA256kw" */ enc_alg_auth?: "A256cbcHs512Ecdh1puA256kw", /** * Algorithm used for anonymous encryption. * Default "Xc20pEcdhEsA256kw" */ enc_alg_anon?: "A256cbcHs512EcdhEsA256kw" | "Xc20pEcdhEsA256kw" | "A256gcmEcdhEsA256kw", /** * Algorithm used for message signing. */ sign_alg?: "EdDSA" | "ES256" | "ES256K", /** * If the plaintext has been signed, the JWS is returned for non-repudiation purposes. */ signed_message?: string, /** * If plaintext contains from_prior header, its unpacked value is returned */ from_prior?: IFromPrior, } "#;
use std::rc::Rc; use didcomm::{ error::{ErrorKind, ResultExt}, UnpackOptions, }; use js_sys::{Array, Promise}; use wasm_bindgen::prelude::*; use wasm_bindgen_futures::future_to_promise; use crate::{ error::JsResult, utils::set_panic_hook, DIDResolver, JsDIDResolver, JsSecretsResolver, Message, SecretsResolver, }; #[wasm_bindgen(skip_typescript)] impl Message { #[wasm_bindgen(skip_typescript)] pub fn unpack( msg: String, did_resolver: DIDResolver, secrets_resolver: SecretsResolver, options: JsValue, ) -> Promise { set_panic_hook(); let did_resolver = JsDIDResolver(did_resolver); let secrets_resolver = JsSecretsResolver(secrets_resolver); future_to_promise(async move { let options: UnpackOptions = options .into_serde() .kind(ErrorKind::Malformed, "Options param is malformed") .as_js()?; let (msg, metadata) = didcomm::Messag
} #[wasm_bindgen(typescript_custom_section)] const MESSAGE_UNPACK_TS: &'static str = r#" export namespace Message { /** * Unpacks the packed message by doing decryption and verifying the signatures. * This method supports all DID Comm message types (encrypted, signed, plaintext). * * If unpack options expect a particular property (for example that a message is encrypted) * and the packed message doesn't meet the criteria (it's not encrypted), then a MessageUntrusted * error will be returned. * * @param `packed_msg` the message as JSON string to be unpacked * @param `did_resolver` instance of `DIDResolver` to resolve DIDs * @param `secrets_resolver` instance of SecretsResolver` to resolve sender DID keys secrets * @param `options` allow fine configuration of unpacking process and imposing additional restrictions * to message to be trusted. * * @returns Tuple `[message, metadata]`. * - `message` plain message instance * - `metadata` additional metadata about this `unpack` execution like used keys identifiers, * trust context, algorithms and etc. * * @throws DIDCommDIDNotResolved * @throws DIDCommDIDUrlNotFound * @throws DIDCommMalformed * @throws DIDCommIoError * @throws DIDCommInvalidState * @throws DIDCommNoCompatibleCrypto * @throws DIDCommUnsupported * @throws DIDCommIllegalArgument */ function unpack( msg: string, did_resolver: DIDResolver, secrets_resolver: SecretsResolver, options: UnpackOptions, ): Promise<[Message, UnpackMetadata]>; } "#; #[wasm_bindgen(typescript_custom_section)] const PACK_UNPACK_OPTIONS_TS: &'static str = r#" /** * Allows fine customization of unpacking process */ type UnpackOptions = { /** * Whether the plaintext must be decryptable by all keys resolved by the secrets resolver. * False by default. */ expect_decrypt_by_all_keys?: boolean, /** * If `true` and the packed message is a `Forward` * wrapping a plaintext packed for the given recipient, then both Forward and packed plaintext are unpacked automatically, * and the unpacked plaintext will be returned instead of unpacked Forward. * False by default. */ unwrap_re_wrapping_forward?: boolean, } "#; #[wasm_bindgen(typescript_custom_section)] const UNPACK_METADATA_TS: &'static str = r#" /** * Additional metadata about this `unpack` method execution like trust predicates * and used keys identifiers. */ type UnpackMetadata = { /** * Whether the plaintext has been encrypted. */ encrypted: boolean, /** * Whether the plaintext has been authenticated. */ authenticated: boolean, /** * Whether the plaintext has been signed. */ non_repudiation: boolean, /** * Whether the sender ID was hidden or protected. */ anonymous_sender: boolean, /** * Whether the plaintext was re-wrapped in a forward message by a mediator. */ re_wrapped_in_forward: boolean, /** * Key ID of the sender used for authentication encryption * if the plaintext has been authenticated and encrypted. */ encrypted_from_kid?: string, /** * Target key IDS for encryption if the plaintext has been encrypted. */ encrypted_to_kids?: Array<string>, /** * Key ID used for signature if the plaintext has been signed. */ sign_from: string, /** * Key ID used for from_prior header signature if from_prior header is present */ from_prior_issuer_kid?: string, /** * Algorithm used for authenticated encryption. * Default "A256cbcHs512Ecdh1puA256kw" */ enc_alg_auth?: "A256cbcHs512Ecdh1puA256kw", /** * Algorithm used for anonymous encryption. * Default "Xc20pEcdhEsA256kw" */ enc_alg_anon?: "A256cbcHs512EcdhEsA256kw" | "Xc20pEcdhEsA256kw" | "A256gcmEcdhEsA256kw", /** * Algorithm used for message signing. */ sign_alg?: "EdDSA" | "ES256" | "ES256K", /** * If the plaintext has been signed, the JWS is returned for non-repudiation purposes. */ signed_message?: string, /** * If plaintext contains from_prior header, its unpacked value is returned */ from_prior?: IFromPrior, } "#;
e::unpack(&msg, &did_resolver, &secrets_resolver, &options) .await .as_js()?; let metadata = JsValue::from_serde(&metadata) .kind(ErrorKind::InvalidState, "Unable serialize UnpackMetadata") .as_js()?; let res = { let res = Array::new_with_length(2); res.set(0, Message(Rc::new(msg)).into()); res.set(1, metadata); res }; Ok(res.into()) }) }
function_block-function_prefixed
[ { "content": "/// Tries to parse plaintext message into `ParsedForward` structure if the message is Forward.\n\n/// (https://identity.foundation/didcomm-messaging/spec/#messages)\n\n///\n\n/// # Parameters\n\n/// - `msg` plaintext message to try to parse into `ParsedForward` structure\n\n///\n\n/// # Returns\n\n/// `Some` with `ParsedForward` structure if `msg` is Forward message, otherwise `None`.\n\npub fn try_parse_forward(msg: &Message) -> Option<ParsedForward> {\n\n if msg.type_ != FORWARD_MSG_TYPE {\n\n return None;\n\n }\n\n\n\n let next = match msg.body {\n\n Value::Object(ref body) => match body.get(\"next\") {\n\n Some(&Value::String(ref next)) => Some(next),\n\n _ => None,\n\n },\n\n _ => None,\n\n };\n\n\n\n if next.is_none() {\n\n return None;\n\n }\n\n\n\n let next = next.unwrap();\n\n\n\n let json_attachment_data = match msg.attachments {\n", "file_path": "src/protocols/routing/mod.rs", "rank": 0, "score": 214332.1600901674 }, { "content": "pub fn remove_field(msg: &str, field: &str) -> String {\n\n let parsed: Value = serde_json::from_str(&msg).unwrap();\n\n let mut msg_dict: Map<String, Value> = parsed.as_object().unwrap().clone();\n\n msg_dict.remove(field);\n\n serde_json::to_string(&msg_dict).unwrap()\n\n}\n\n\n", "file_path": "src/test_vectors/common.rs", "rank": 1, "score": 179783.6861184393 }, { "content": "pub fn remove_protected_field(msg: &str, field: &str) -> String {\n\n let parsed: Value = serde_json::from_str(&msg).unwrap();\n\n let mut msg_dict: Map<String, Value> = parsed.as_object().unwrap().clone();\n\n\n\n let mut buffer = Vec::<u8>::new();\n\n base64::decode_config_buf(\n\n msg_dict.get(\"protected\").unwrap().as_str().unwrap(),\n\n base64::URL_SAFE_NO_PAD,\n\n &mut buffer,\n\n )\n\n .unwrap();\n\n let parsed_protected: Value = serde_json::from_slice(&buffer).unwrap();\n\n let mut protected_dict: Map<String, Value> = parsed_protected.as_object().unwrap().clone();\n\n protected_dict.remove(field);\n\n let protected_str = serde_json::to_string(&protected_dict).unwrap();\n\n let protected_str_base64 = base64::encode_config(protected_str, base64::URL_SAFE_NO_PAD);\n\n\n\n msg_dict.insert(String::from(\"protected\"), protected_str_base64.into());\n\n serde_json::to_string(&msg_dict).unwrap()\n\n}\n", "file_path": "src/test_vectors/common.rs", "rank": 2, "score": 176230.5440663549 }, { "content": "pub fn update_field(msg: &str, field: &str, value: &str) -> String {\n\n let parsed: Value = serde_json::from_str(&msg).unwrap();\n\n let mut msg_dict: Map<String, Value> = parsed.as_object().unwrap().clone();\n\n msg_dict.insert(String::from(field), value.into());\n\n serde_json::to_string(&msg_dict).unwrap()\n\n}\n\n\n", "file_path": "src/test_vectors/common.rs", "rank": 3, "score": 167497.0533784915 }, { "content": "pub fn update_protected_field(msg: &str, field: &str, value: &str) -> String {\n\n let parsed: Value = serde_json::from_str(&msg).unwrap();\n\n let mut msg_dict: Map<String, Value> = parsed.as_object().unwrap().clone();\n\n\n\n let mut buffer = Vec::<u8>::new();\n\n base64::decode_config_buf(\n\n msg_dict.get(\"protected\").unwrap().as_str().unwrap(),\n\n base64::URL_SAFE_NO_PAD,\n\n &mut buffer,\n\n )\n\n .unwrap();\n\n let parsed_protected: Value = serde_json::from_slice(&buffer).unwrap();\n\n let mut protected_dict: Map<String, Value> = parsed_protected.as_object().unwrap().clone();\n\n protected_dict.insert(String::from(field), value.into());\n\n let protected_str = serde_json::to_string(&protected_dict).unwrap();\n\n println!(\"{}\", &protected_str);\n\n let protected_str_base64 = base64::encode_config(protected_str, base64::URL_SAFE_NO_PAD);\n\n msg_dict.insert(String::from(\"protected\"), protected_str_base64.into());\n\n serde_json::to_string(&msg_dict).unwrap()\n\n}\n\n\n", "file_path": "src/test_vectors/common.rs", "rank": 4, "score": 164264.1295910727 }, { "content": "fn generate_message_id() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n\n\n", "file_path": "src/protocols/routing/mod.rs", "rank": 5, "score": 153991.45270499616 }, { "content": "pub fn err_msg<D>(kind: ErrorKind, msg: D) -> Error\n\nwhere\n\n D: fmt::Display + fmt::Debug + Send + Sync + 'static,\n\n{\n\n Error::msg(kind, msg)\n\n}\n", "file_path": "src/error.rs", "rank": 6, "score": 144811.7886844403 }, { "content": "pub trait OnUnpackResult: Sync + Send {\n\n fn success(&self, result: Message, metadata: UnpackMetadata);\n\n fn error(&self, err: ErrorKind, err_msg: String);\n\n}\n\n\n\nimpl DIDComm {\n\n pub fn unpack<'a>(\n\n &self,\n\n msg: String,\n\n options: &'a UnpackOptions,\n\n cb: Box<dyn OnUnpackResult>,\n\n ) -> ErrorCode {\n\n let msg = msg.clone();\n\n let options = options.clone();\n\n let did_resolver = DIDResolverAdapter::new(self.did_resolver.clone());\n\n let secret_resolver = SecretsResolverAdapter::new(self.secret_resolver.clone());\n\n\n\n let future =\n\n async move { Message::unpack(&msg, &did_resolver, &secret_resolver, &options).await };\n\n EXECUTOR.spawn_ok(async move {\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 7, "score": 140968.57245633093 }, { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "wasm/src/utils.rs", "rank": 10, "score": 133211.23925721235 }, { "content": "pub trait OnFromPriorUnpackResult: Sync + Send {\n\n fn success(&self, from_prior: FromPrior, kid: String);\n\n fn error(&self, err: ErrorKind, err_msg: String);\n\n}\n\n\n\nimpl DIDComm {\n\n pub fn pack_from_prior(\n\n &self,\n\n msg: &FromPrior,\n\n issuer_kid: Option<String>,\n\n cb: Box<dyn OnFromPriorPackResult>,\n\n ) -> ErrorCode {\n\n let msg = msg.clone();\n\n let did_resolver = DIDResolverAdapter::new(self.did_resolver.clone());\n\n let secret_resolver = SecretsResolverAdapter::new(self.secret_resolver.clone());\n\n\n\n let future = async move {\n\n msg.pack(issuer_kid.as_deref(), &did_resolver, &secret_resolver)\n\n .await\n\n };\n", "file_path": "uniffi/src/didcomm/from_prior.rs", "rank": 11, "score": 126309.05329089412 }, { "content": " function unpack(\n\n from_prior: string,\n\n did_resolver: DIDResolver,\n\n ): Promise<[FromPrior, string]>;\n\n}\n\n\"#;\n", "file_path": "wasm/src/message/from_prior/unpack.rs", "rank": 13, "score": 116051.97134147464 }, { "content": "fn _message() -> MessageBuilder {\n\n Message::build(\n\n \"1234567890\".to_owned(),\n\n \"http://example.com/protocols/lets_do_lunch/1.0/proposal\".to_owned(),\n\n json!({\"messagespecificattribute\": \"and its value\"}),\n\n )\n\n .from(ALICE_DID.to_owned())\n\n .to(BOB_DID.to_owned())\n\n .created_time(1516269022)\n\n .expires_time(1516385931)\n\n}\n", "file_path": "src/test_vectors/message.rs", "rank": 14, "score": 114597.71894459467 }, { "content": "fn build_forward_message(\n\n forwarded_msg: &str,\n\n next: &str,\n\n headers: Option<&HashMap<String, Value>>,\n\n) -> Result<String> {\n\n let body = json!({ \"next\": next });\n\n\n\n // TODO: Think how to avoid extra deserialization of forwarded_msg here.\n\n // (This deserializtion is a double work because the whole Forward message with the attachments\n\n // will then be serialized.)\n\n let attachment = Attachment::json(\n\n serde_json::from_str(forwarded_msg)\n\n .kind(ErrorKind::Malformed, \"Unable deserialize forwarded message\")?,\n\n )\n\n .finalize();\n\n\n\n let mut msg_builder = Message::build(generate_message_id(), FORWARD_MSG_TYPE.to_owned(), body);\n\n\n\n if let Some(headers) = headers {\n\n for (name, value) in headers {\n", "file_path": "src/protocols/routing/mod.rs", "rank": 15, "score": 103238.67503602737 }, { "content": "RustBuffer didcomm_f20e_DIDComm_unpack(\n\n void*_Nonnull ptr,RustBuffer msg,RustBuffer options,uint64_t cb,\n\n RustCallStatus *_Nonnull out_status\n", "file_path": "wrappers/swift/didcomm/didcommFFI.h", "rank": 16, "score": 93305.53352090283 }, { "content": "RustBuffer didcomm_f20e_DIDComm_unpack_from_prior(\n\n void*_Nonnull ptr,RustBuffer from_prior_jwt,uint64_t cb,\n\n RustCallStatus *_Nonnull out_status\n", "file_path": "wrappers/swift/didcomm/didcommFFI.h", "rank": 17, "score": 91884.77403619784 }, { "content": "import { Message } from \"didcomm\";\n\nimport {\n\n ALICE_DID_DOC,\n\n BOB_DID_DOC,\n\n BOB_SECRETS,\n\n CHARLIE_DID_DOC,\n\n ExampleDIDResolver,\n\n ExampleSecretsResolver,\n\n IMESSAGE_FROM_PRIOR,\n\n IMESSAGE_MINIMAL,\n\n IMESSAGE_SIMPLE,\n\n PLAINTEXT_FROM_PRIOR,\n\n PLAINTEXT_MSG_MINIMAL,\n\n PLAINTEXT_MSG_SIMPLE,\n\n} from \"../test-vectors\";\n\n\n\ntest.each([\n\n {\n\n case: \"Minimal\",\n\n msg: PLAINTEXT_MSG_MINIMAL,\n\n options: {},\n\n expMsg: IMESSAGE_MINIMAL,\n\n expMetadata: {\n\n anonymous_sender: false,\n\n authenticated: false,\n\n enc_alg_anon: null,\n\n enc_alg_auth: null,\n\n encrypted: false,\n\n encrypted_from_kid: null,\n\n encrypted_to_kids: null,\n\n from_prior: null,\n\n from_prior_issuer_kid: null,\n\n non_repudiation: false,\n\n re_wrapped_in_forward: false,\n\n sign_alg: null,\n\n sign_from: null,\n\n signed_message: null,\n\n },\n\n },\n\n {\n\n case: \"Simple\",\n\n msg: PLAINTEXT_MSG_SIMPLE,\n\n options: {},\n\n expMsg: IMESSAGE_SIMPLE,\n\n expMetadata: {\n\n anonymous_sender: false,\n\n authenticated: false,\n\n enc_alg_anon: null,\n\n enc_alg_auth: null,\n\n encrypted: false,\n\n encrypted_from_kid: null,\n\n encrypted_to_kids: null,\n\n from_prior: null,\n\n from_prior_issuer_kid: null,\n\n non_repudiation: false,\n\n re_wrapped_in_forward: false,\n\n sign_alg: null,\n\n sign_from: null,\n\n signed_message: null,\n\n },\n\n },\n\n {\n\n case: \"FromPrior\",\n\n msg: PLAINTEXT_FROM_PRIOR,\n\n options: {},\n\n expMsg: IMESSAGE_FROM_PRIOR,\n\n expMetadata: {\n\n anonymous_sender: false,\n\n authenticated: false,\n\n enc_alg_anon: null,\n\n enc_alg_auth: null,\n\n encrypted: false,\n\n encrypted_from_kid: null,\n\n encrypted_to_kids: null,\n\n from_prior: {\n\n aud: \"123\",\n\n exp: 1234,\n\n iat: 123456,\n\n iss: \"did:example:charlie\",\n\n jti: \"dfg\",\n\n nbf: 12345,\n\n sub: \"did:example:alice\",\n\n },\n\n from_prior_issuer_kid: \"did:example:charlie#key-1\",\n\n non_repudiation: false,\n\n re_wrapped_in_forward: false,\n\n sign_alg: null,\n\n sign_from: null,\n\n signed_message: null,\n\n },\n\n },\n\n])(\n\n \"Message.unpack works for $case\",\n\n async ({ msg, options, expMsg, expMetadata }) => {\n\n const didResolver = new ExampleDIDResolver([\n\n ALICE_DID_DOC,\n\n BOB_DID_DOC,\n\n CHARLIE_DID_DOC,\n\n ]);\n\n\n\n const secretsResolver = new ExampleSecretsResolver(BOB_SECRETS);\n\n\n\n const [unpacked, metadata] = await Message.unpack(\n\n msg,\n\n didResolver,\n\n secretsResolver,\n\n options\n\n );\n\n\n\n expect(unpacked.as_value()).toStrictEqual(expMsg);\n\n expect(metadata).toStrictEqual(expMetadata);\n\n }\n\n);\n", "file_path": "wasm/tests-js/src/message/unpack.test.ts", "rank": 18, "score": 91866.75884687538 }, { "content": "type MessagingServiceMetadata = {\n\n /**\n\n * Identifier (DID URL) of used messaging service.\n\n */\n\n id: string,\n\n\n\n /**\n\n * Service endpoint of used messaging service.\n\n */\n\n service_endpoint: string,\n\n}\n\n\"#;\n", "file_path": "wasm/src/message/pack_encrypted.rs", "rank": 19, "score": 91408.96415159178 }, { "content": " match future.await {\n\n Ok((result, metadata)) => cb.success(result, metadata),\n\n Err(err) => cb.error(err.kind(), err.to_string()),\n\n }\n\n });\n\n\n\n ErrorCode::Success\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::test_helper::{\n\n create_did_resolver, create_secrets_resolver, get_error, get_ok, PackResult, UnpackResult,\n\n };\n\n use crate::DIDComm;\n\n use didcomm_core::error::ErrorKind;\n\n use didcomm_core::{PackEncryptedOptions, UnpackOptions};\n\n\n\n use didcomm_core::test_vectors::{ALICE_DID, BOB_DID, MESSAGE_SIMPLE};\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 20, "score": 89936.10059012927 }, { "content": "use didcomm_core::{error::ErrorKind, Message, UnpackMetadata, UnpackOptions};\n\n\n\nuse crate::common::EXECUTOR;\n\nuse crate::did_resolver_adapter::DIDResolverAdapter;\n\nuse crate::DIDComm;\n\nuse crate::{secrets_resolver_adapter::SecretsResolverAdapter, ErrorCode};\n\n\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 21, "score": 89933.11168912884 }, { "content": " let didcomm = DIDComm::new(create_did_resolver(), create_secrets_resolver());\n\n\n\n let (cb, receiver) = PackResult::new();\n\n didcomm.pack_signed(&msg, String::from(ALICE_DID), cb);\n\n let res = get_ok(receiver).await;\n\n\n\n let (cb, receiver) = UnpackResult::new();\n\n didcomm.unpack(res, &UnpackOptions::default(), cb);\n\n let res = get_ok(receiver).await;\n\n\n\n assert_eq!(res, msg);\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_encrypted() {\n\n let msg = MESSAGE_SIMPLE.clone();\n\n let didcomm = DIDComm::new(create_did_resolver(), create_secrets_resolver());\n\n\n\n let (cb, receiver) = PackResult::new();\n\n didcomm.pack_encrypted(\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 22, "score": 89925.37218645038 }, { "content": " &msg,\n\n String::from(BOB_DID),\n\n Some(String::from(ALICE_DID)),\n\n Some(String::from(ALICE_DID)),\n\n &PackEncryptedOptions {\n\n forward: false,\n\n ..PackEncryptedOptions::default()\n\n },\n\n cb,\n\n );\n\n let res = get_ok(receiver).await;\n\n\n\n let (cb, receiver) = UnpackResult::new();\n\n didcomm.unpack(res, &UnpackOptions::default(), cb);\n\n let res = get_ok(receiver).await;\n\n\n\n assert_eq!(res, msg);\n\n }\n\n\n\n #[tokio::test]\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 23, "score": 89923.85756309587 }, { "content": "\n\n #[tokio::test]\n\n async fn unpack_works_plaintext() {\n\n let msg = MESSAGE_SIMPLE.clone();\n\n let didcomm = DIDComm::new(create_did_resolver(), create_secrets_resolver());\n\n\n\n let (cb, receiver) = PackResult::new();\n\n didcomm.pack_plaintext(&MESSAGE_SIMPLE, cb);\n\n let res = get_ok(receiver).await;\n\n\n\n let (cb, receiver) = UnpackResult::new();\n\n didcomm.unpack(res, &UnpackOptions::default(), cb);\n\n let res = get_ok(receiver).await;\n\n\n\n assert_eq!(res, msg);\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_signed() {\n\n let msg = MESSAGE_SIMPLE.clone();\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 24, "score": 89923.42394432296 }, { "content": " async fn unpack_works_malformed() {\n\n let (cb, receiver) = UnpackResult::new();\n\n DIDComm::new(create_did_resolver(), create_secrets_resolver()).unpack(\n\n String::from(\"invalid message\"),\n\n &UnpackOptions::default(),\n\n cb,\n\n );\n\n let res = get_error(receiver).await;\n\n\n\n assert_eq!(res.kind(), ErrorKind::Malformed);\n\n }\n\n}\n", "file_path": "uniffi/src/didcomm/unpack.rs", "rank": 25, "score": 89922.14057927314 }, { "content": "use crate::did::DIDResolver;\n\nuse crate::error::{ErrorKind, Result};\n\nuse crate::{FromPrior, Message, UnpackMetadata};\n\n\n\npub(crate) async fn _try_unpack_plaintext<'dr, 'sr>(\n\n msg: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n metadata: &mut UnpackMetadata,\n\n) -> Result<Option<Message>> {\n\n let msg = match Message::from_str(msg) {\n\n Ok(m) => m,\n\n Err(e) if e.kind() == ErrorKind::Malformed => return Ok(None),\n\n Err(e) => Err(e)?,\n\n }\n\n .validate()?;\n\n\n\n if let Some(from_prior) = &msg.from_prior {\n\n let (unpacked_from_prior, from_prior_issuer_kid) =\n\n FromPrior::unpack(from_prior, did_resolver).await?;\n\n\n\n metadata.from_prior = Some(unpacked_from_prior);\n\n metadata.from_prior_issuer_kid = Some(from_prior_issuer_kid);\n\n };\n\n\n\n Ok(Some(msg))\n\n}\n", "file_path": "src/message/unpack/plaintext.rs", "rank": 26, "score": 89502.07900097048 }, { "content": "use askar_crypto::alg::{ed25519::Ed25519KeyPair, k256::K256KeyPair, p256::P256KeyPair};\n\n\n\nuse crate::jws::JWS;\n\nuse crate::{\n\n algorithms::SignAlg,\n\n did::DIDResolver,\n\n error::{err_msg, ErrorKind, Result, ResultContext, ResultExt},\n\n jws,\n\n utils::{crypto::AsKnownKeyPair, did::did_or_url},\n\n UnpackMetadata, UnpackOptions,\n\n};\n\n\n\npub(crate) async fn _try_unapck_sign<'dr>(\n\n msg: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n _opts: &UnpackOptions,\n\n metadata: &mut UnpackMetadata,\n\n) -> Result<Option<String>> {\n\n let jws_json = msg;\n\n\n", "file_path": "src/message/unpack/sign.rs", "rank": 29, "score": 89497.16557085158 }, { "content": " UnpackMetadata, UnpackOptions,\n\n};\n\n\n\npub(crate) async fn _try_unpack_authcrypt<'dr, 'sr>(\n\n msg: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n secrets_resolver: &'sr (dyn SecretsResolver + 'sr),\n\n opts: &UnpackOptions,\n\n metadata: &mut UnpackMetadata,\n\n) -> Result<Option<String>> {\n\n let jwe = match JWE::from_str(msg) {\n\n Ok(m) => m,\n\n Err(e) if e.kind() == ErrorKind::Malformed => return Ok(None),\n\n Err(e) => Err(e)?,\n\n };\n\n\n\n let mut buf = vec![];\n\n let parsed_jwe = jwe.parse(&mut buf)?;\n\n\n\n if parsed_jwe.protected.alg != jwe::Algorithm::Ecdh1puA256kw {\n", "file_path": "src/message/unpack/authcrypt.rs", "rank": 31, "score": 89494.25571131102 }, { "content": "};\n\n\n\npub(crate) async fn _try_unpack_anoncrypt<'sr>(\n\n msg: &str,\n\n secrets_resolver: &'sr (dyn SecretsResolver + 'sr),\n\n opts: &UnpackOptions,\n\n metadata: &mut UnpackMetadata,\n\n) -> Result<Option<String>> {\n\n let jwe = match JWE::from_str(msg) {\n\n Ok(m) => m,\n\n Err(e) if e.kind() == ErrorKind::Malformed => return Ok(None),\n\n Err(e) => Err(e)?,\n\n };\n\n\n\n let mut buf = vec![];\n\n let parsed_jwe = jwe.parse(&mut buf)?;\n\n\n\n if parsed_jwe.protected.alg != jwe::Algorithm::EcdhEsA256kw {\n\n return Ok(None);\n\n }\n", "file_path": "src/message/unpack/anoncrypt.rs", "rank": 32, "score": 89493.51211718193 }, { "content": " /// - `metadata` additional metadata about this `unpack` execution like used keys identifiers,\n\n /// trust context, algorithms and etc.\n\n ///\n\n /// # Errors\n\n /// - `DIDNotResolved` Sender or recipient DID not found.\n\n /// - `DIDUrlNotFound` DID doesn't contain mentioned DID Urls (for ex., key id)\n\n /// - `MessageMalformed` message doesn't correspond to DID Comm or has invalid encryption or signatures.\n\n /// - `Unsupported` Used crypto or method is unsupported.\n\n /// - `SecretNotFound` No recipient secrets found.\n\n /// - `InvalidState` Indicates library error.\n\n /// - `IOError` IO error during DID or secrets resolving.\n\n /// TODO: verify and update errors list\n\n pub async fn unpack<'dr, 'sr>(\n\n msg: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n secrets_resolver: &'sr (dyn SecretsResolver + 'sr),\n\n options: &UnpackOptions,\n\n ) -> Result<(Self, UnpackMetadata)> {\n\n let mut metadata = UnpackMetadata {\n\n encrypted: false,\n", "file_path": "src/message/unpack/mod.rs", "rank": 33, "score": 89492.6126719047 }, { "content": "mod sign;\n\n\n\nimpl Message {\n\n /// Unpacks the packed message by doing decryption and verifying the signatures.\n\n /// This method supports all DID Comm message types (encrypted, signed, plaintext).\n\n ///\n\n /// If unpack options expect a particular property (for example that a message is encrypted)\n\n /// and the packed message doesn't meet the criteria (it's not encrypted), then a MessageUntrusted\n\n /// error will be returned.\n\n ///\n\n /// # Params\n\n /// - `packed_msg` the message as JSON string to be unpacked\n\n /// - `did_resolver` instance of `DIDResolver` to resolve DIDs\n\n /// - `secrets_resolver` instance of SecretsResolver` to resolve sender DID keys secrets\n\n /// - `options` allow fine configuration of unpacking process and imposing additional restrictions\n\n /// to message to be trusted.\n\n ///\n\n /// # Returns\n\n /// Tuple `(message, metadata)`.\n\n /// - `message` plain message instance\n", "file_path": "src/message/unpack/mod.rs", "rank": 34, "score": 89492.2435837377 }, { "content": " let forwarded_msg_at_bob = serde_json::to_string(&forward_at_bob.forwarded_msg)\n\n .expect(\"Unable serialize forwarded message\");\n\n\n\n let (unpacked_twice_msg, unpack_twice_metadata) = Message::unpack(\n\n &forwarded_msg_at_bob,\n\n &did_resolver,\n\n &bob_secrets_resolver,\n\n &UnpackOptions {\n\n unwrap_re_wrapping_forward: false,\n\n ..UnpackOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"Unable unpack\");\n\n\n\n assert_eq!(&unpacked_twice_msg, &*MESSAGE_SIMPLE);\n\n\n\n assert!(unpack_twice_metadata.encrypted);\n\n assert_eq!(\n\n unpack_twice_metadata.authenticated,\n", "file_path": "src/message/unpack/mod.rs", "rank": 35, "score": 89489.34872130088 }, { "content": "/// and used keys identifiers.\n\n#[derive(Debug, PartialEq, Eq, Clone, Serialize)]\n\npub struct UnpackMetadata {\n\n /// Whether the plaintext has been encrypted\n\n pub encrypted: bool,\n\n\n\n /// Whether the plaintext has been authenticated\n\n pub authenticated: bool,\n\n\n\n /// Whether the plaintext has been signed\n\n pub non_repudiation: bool,\n\n\n\n /// Whether the sender ID was hidden or protected\n\n pub anonymous_sender: bool,\n\n\n\n /// Whether the plaintext was re-wrapped in a forward message by a mediator\n\n pub re_wrapped_in_forward: bool,\n\n\n\n /// Key ID of the sender used for authentication encryption if the plaintext has been authenticated and encrypted\n\n pub encrypted_from_kid: Option<String>,\n", "file_path": "src/message/unpack/mod.rs", "rank": 36, "score": 89487.55585243551 }, { "content": "\n\n break;\n\n }\n\n\n\n let msg = anoncrypted.as_deref().unwrap_or(msg);\n\n\n\n let authcrypted =\n\n _try_unpack_authcrypt(msg, did_resolver, secrets_resolver, options, &mut metadata)\n\n .await?;\n\n let msg = authcrypted.as_deref().unwrap_or(msg);\n\n\n\n let signed = _try_unapck_sign(msg, did_resolver, options, &mut metadata).await?;\n\n let msg = signed.as_deref().unwrap_or(msg);\n\n\n\n let msg = _try_unpack_plaintext(msg, did_resolver, &mut metadata)\n\n .await?\n\n .ok_or_else(|| {\n\n err_msg(\n\n ErrorKind::Malformed,\n\n \"Message is not a valid JWE, JWS or JWM\",\n", "file_path": "src/message/unpack/mod.rs", "rank": 37, "score": 89487.10595571679 }, { "content": " pub expect_decrypt_by_all_keys: bool,\n\n\n\n /// If `true` and the packed message is a `Forward`\n\n /// wrapping a plaintext packed for the given recipient, then both Forward and packed plaintext are unpacked automatically,\n\n /// and the unpacked plaintext will be returned instead of unpacked Forward.\n\n /// False by default.\n\n #[serde(default)]\n\n pub unwrap_re_wrapping_forward: bool,\n\n}\n\n\n\nimpl Default for UnpackOptions {\n\n fn default() -> Self {\n\n UnpackOptions {\n\n expect_decrypt_by_all_keys: false,\n\n unwrap_re_wrapping_forward: true,\n\n }\n\n }\n\n}\n\n\n\n/// Additional metadata about this `unpack` method execution like trust predicates\n", "file_path": "src/message/unpack/mod.rs", "rank": 38, "score": 89485.32321434063 }, { "content": " INVALID_PLAINTEXT_MSG_WRONG_TYP, MEDIATOR1_DID_DOC, MEDIATOR1_SECRETS,\n\n MESSAGE_ATTACHMENT_BASE64, MESSAGE_ATTACHMENT_JSON, MESSAGE_ATTACHMENT_LINKS,\n\n MESSAGE_ATTACHMENT_MULTI_1, MESSAGE_ATTACHMENT_MULTI_2, MESSAGE_FROM_PRIOR_FULL,\n\n MESSAGE_MINIMAL, MESSAGE_SIMPLE, PLAINTEXT_FROM_PRIOR,\n\n PLAINTEXT_FROM_PRIOR_INVALID_SIGNATURE, PLAINTEXT_INVALID_FROM_PRIOR,\n\n PLAINTEXT_MSG_ATTACHMENT_BASE64, PLAINTEXT_MSG_ATTACHMENT_JSON,\n\n PLAINTEXT_MSG_ATTACHMENT_LINKS, PLAINTEXT_MSG_ATTACHMENT_MULTI_1,\n\n PLAINTEXT_MSG_ATTACHMENT_MULTI_2, PLAINTEXT_MSG_MINIMAL, PLAINTEXT_MSG_SIMPLE,\n\n SIGNED_MSG_ALICE_KEY_1, SIGNED_MSG_ALICE_KEY_2, SIGNED_MSG_ALICE_KEY_3,\n\n },\n\n PackEncryptedOptions,\n\n };\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn unpack_works_plaintext() {\n\n let plaintext_metadata = UnpackMetadata {\n\n anonymous_sender: false,\n\n authenticated: false,\n", "file_path": "src/message/unpack/mod.rs", "rank": 39, "score": 89485.20846636848 }, { "content": " )\n\n .await;\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_malformed_plaintext_msg() {\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_EMPTY,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_STRING,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_NO_ID,\n", "file_path": "src/message/unpack/mod.rs", "rank": 40, "score": 89484.92660167014 }, { "content": " _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_EMPTY_DATA,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_LINKS_NO_HASH,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_AS_STRING,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_AS_INT_ARRAY,\n", "file_path": "src/message/unpack/mod.rs", "rank": 41, "score": 89484.77799540917 }, { "content": "use crate::{\n\n did::DIDResolver,\n\n error::{err_msg, ErrorKind, Result, ResultContext, ResultExt},\n\n jws,\n\n utils::{crypto::AsKnownKeyPair, did::did_or_url},\n\n FromPrior,\n\n};\n\nuse askar_crypto::alg::{ed25519::Ed25519KeyPair, k256::K256KeyPair, p256::P256KeyPair};\n\n\n\nimpl FromPrior {\n\n /// Unpacks a plaintext value from a signed `from_prior` JWT.\n\n /// https://identity.foundation/didcomm-messaging/spec/#did-rotation\n\n ///\n\n /// # Parameters\n\n /// - `from_prior_jwt` signed `from_prior` JWT.\n\n /// - `did_resolver` instance of `DIDResolver` to resolve DIDs.\n\n ///\n\n /// # Returns\n\n /// Tuple (plaintext `from_prior` value, identifier of the issuer key used to sign `from_prior`)\n\n ///\n", "file_path": "src/message/from_prior/unpack.rs", "rank": 42, "score": 89484.6915188076 }, { "content": " BOB_DID_DOC.clone(),\n\n CHARLIE_DID_DOC.clone(),\n\n ]);\n\n\n\n let secrets_resolver = ExampleSecretsResolver::new(BOB_SECRETS.clone());\n\n\n\n let (msg, mut metadata) = Message::unpack(\n\n msg,\n\n &did_resolver,\n\n &secrets_resolver,\n\n &UnpackOptions::default(),\n\n )\n\n .await\n\n .expect(\"unpack is ok.\");\n\n\n\n assert_eq!(&msg, exp_msg);\n\n\n\n metadata.signed_message = exp_metadata.signed_message.clone();\n\n assert_eq!(&metadata, exp_metadata);\n\n }\n", "file_path": "src/message/unpack/mod.rs", "rank": 43, "score": 89484.60069418486 }, { "content": " .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_WRONG_TYP,\n\n \"Malformed: `typ` must be \\\"application/didcomm-plain+json\\\"\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_EMPTY_ATTACHMENTS,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_NO_DATA,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n", "file_path": "src/message/unpack/mod.rs", "rank": 44, "score": 89484.2590177682 }, { "content": "\n\n /// Target key IDS for encryption if the plaintext has been encrypted\n\n pub encrypted_to_kids: Option<Vec<String>>,\n\n\n\n /// Key ID used for signature if the plaintext has been signed\n\n pub sign_from: Option<String>,\n\n\n\n /// Key ID used for from_prior header signature if from_prior header is present\n\n pub from_prior_issuer_kid: Option<String>,\n\n\n\n /// Algorithm used for authenticated encryption\n\n pub enc_alg_auth: Option<AuthCryptAlg>,\n\n\n\n /// Algorithm used for anonymous encryption\n\n pub enc_alg_anon: Option<AnonCryptAlg>,\n\n\n\n /// Algorithm used for message signing\n\n pub sign_alg: Option<SignAlg>,\n\n\n\n /// If the plaintext has been signed, the JWS is returned for non-repudiation purposes\n", "file_path": "src/message/unpack/mod.rs", "rank": 45, "score": 89484.25982473114 }, { "content": " // (This serializtion is a double work because forwarded_msg will then\n\n // be deserialized in _try_unpack_anoncrypt.)\n\n let forwarded_msg = serde_json::to_string(&forward_msg.forwarded_msg).kind(\n\n ErrorKind::InvalidState,\n\n \"Unable serialize forwarded message\",\n\n )?;\n\n\n\n return Ok(Some(forwarded_msg));\n\n }\n\n }\n\n\n\n Ok(None)\n\n }\n\n}\n\n\n\n/// Allows fine customization of unpacking process\n\n#[derive(Debug, PartialEq, Eq, Deserialize, Clone)]\n\npub struct UnpackOptions {\n\n /// Whether the plaintext must be decryptable by all keys resolved by the secrets resolver. False by default.\n\n #[serde(default)]\n", "file_path": "src/message/unpack/mod.rs", "rank": 46, "score": 89484.24096005734 }, { "content": " )\n\n .await\n\n .expect(\"Unable encrypt\");\n\n\n\n assert_eq!(\n\n pack_metadata.messaging_service.as_ref(),\n\n Some(&MessagingServiceMetadata {\n\n id: BOB_SERVICE.id.clone(),\n\n service_endpoint: BOB_DID_COMM_MESSAGING_SERVICE.service_endpoint.clone(),\n\n })\n\n );\n\n\n\n let (unpacked_msg_mediator1, unpack_metadata_mediator1) = Message::unpack(\n\n &msg,\n\n &did_resolver,\n\n &mediator1_secrets_resolver,\n\n &UnpackOptions::default(),\n\n )\n\n .await\n\n .expect(\"Unable unpack\");\n", "file_path": "src/message/unpack/mod.rs", "rank": 47, "score": 89484.20963461282 }, { "content": " &UnpackOptions {\n\n unwrap_re_wrapping_forward: false,\n\n ..UnpackOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"Unable unpack\");\n\n\n\n let forward_at_bob =\n\n try_parse_forward(&unpacked_once_msg).expect(\"Message is not Forward\");\n\n\n\n assert_eq!(forward_at_bob.msg, &unpacked_once_msg);\n\n assert_eq!(&forward_at_bob.next, to);\n\n\n\n assert!(unpack_once_metadata.encrypted);\n\n assert!(!unpack_once_metadata.authenticated);\n\n assert!(!unpack_once_metadata.non_repudiation);\n\n assert!(unpack_once_metadata.anonymous_sender);\n\n assert!(!unpack_once_metadata.re_wrapped_in_forward);\n\n\n", "file_path": "src/message/unpack/mod.rs", "rank": 48, "score": 89484.15292053716 }, { "content": "\n\n let forward =\n\n try_parse_forward(&unpacked_msg_mediator1).expect(\"Message is not Forward\");\n\n\n\n assert_eq!(forward.msg, &unpacked_msg_mediator1);\n\n assert_eq!(&forward.next, to);\n\n\n\n assert!(unpack_metadata_mediator1.encrypted);\n\n assert!(!unpack_metadata_mediator1.authenticated);\n\n assert!(!unpack_metadata_mediator1.non_repudiation);\n\n assert!(unpack_metadata_mediator1.anonymous_sender);\n\n assert!(!unpack_metadata_mediator1.re_wrapped_in_forward);\n\n\n\n let forwarded_msg = serde_json::to_string(&forward.forwarded_msg)\n\n .expect(\"Unable serialize forwarded message\");\n\n\n\n let re_wrapping_forward_msg = wrap_in_forward(\n\n &forwarded_msg,\n\n None,\n\n to,\n", "file_path": "src/message/unpack/mod.rs", "rank": 49, "score": 89484.0168496708 }, { "content": " &vec![to.to_owned()],\n\n &AnonCryptAlg::default(),\n\n &did_resolver,\n\n )\n\n .await\n\n .expect(\"Unable wrap in forward\");\n\n\n\n let (unpacked_msg, unpack_metadata) = Message::unpack(\n\n &re_wrapping_forward_msg,\n\n &did_resolver,\n\n &bob_secrets_resolver,\n\n &UnpackOptions::default(),\n\n )\n\n .await\n\n .expect(\"Unable unpack\");\n\n\n\n assert_eq!(&unpacked_msg, &*MESSAGE_SIMPLE);\n\n assert!(unpack_metadata.re_wrapped_in_forward);\n\n }\n\n }\n", "file_path": "src/message/unpack/mod.rs", "rank": 50, "score": 89483.86612959711 }, { "content": " from,\n\n sign_by,\n\n &did_resolver,\n\n &alice_secrets_resolver,\n\n &PackEncryptedOptions::default(),\n\n )\n\n .await\n\n .expect(\"Unable encrypt\");\n\n\n\n assert_eq!(\n\n pack_metadata.messaging_service.as_ref(),\n\n Some(&MessagingServiceMetadata {\n\n id: BOB_SERVICE.id.clone(),\n\n service_endpoint: BOB_DID_COMM_MESSAGING_SERVICE.service_endpoint.clone(),\n\n })\n\n );\n\n\n\n let (unpacked_msg_mediator1, unpack_metadata_mediator1) = Message::unpack(\n\n &msg,\n\n &did_resolver,\n", "file_path": "src/message/unpack/mod.rs", "rank": 51, "score": 89483.81831155 }, { "content": " msg,\n\n &did_resolver,\n\n &secrets_resolver,\n\n &UnpackOptions::default(),\n\n )\n\n .await\n\n .expect(\"unpack is ok.\");\n\n\n\n assert_eq!(&msg, exp_msg);\n\n assert_eq!(&metadata, exp_metadata);\n\n }\n\n\n\n // Same as `_verify_unpack`, but skips indeterministic values from metadata checking\n\n async fn _verify_unpack_undeterministic(\n\n msg: &str,\n\n exp_msg: &Message,\n\n exp_metadata: &UnpackMetadata,\n\n ) {\n\n let did_resolver = ExampleDIDResolver::new(vec![\n\n ALICE_DID_DOC.clone(),\n", "file_path": "src/message/unpack/mod.rs", "rank": 52, "score": 89483.71857152363 }, { "content": " #[tokio::test]\n\n async fn unpack_plaintext_works_invalid_from_prior_signature() {\n\n _verify_unpack_returns_error(\n\n PLAINTEXT_FROM_PRIOR_INVALID_SIGNATURE,\n\n ErrorKind::Malformed,\n\n \"Malformed: Unable to verify from_prior signature: Unable decode signature: Invalid last symbol 66, offset 85.\",\n\n )\n\n .await;\n\n }\n\n\n\n async fn _verify_unpack(msg: &str, exp_msg: &Message, exp_metadata: &UnpackMetadata) {\n\n let did_resolver = ExampleDIDResolver::new(vec![\n\n ALICE_DID_DOC.clone(),\n\n BOB_DID_DOC.clone(),\n\n CHARLIE_DID_DOC.clone(),\n\n ]);\n\n\n\n let secrets_resolver = ExampleSecretsResolver::new(BOB_SECRETS.clone());\n\n\n\n let (msg, metadata) = Message::unpack(\n", "file_path": "src/message/unpack/mod.rs", "rank": 53, "score": 89483.62668836823 }, { "content": " &mediator1_secrets_resolver,\n\n &UnpackOptions {\n\n unwrap_re_wrapping_forward: false,\n\n ..UnpackOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"Unable unpack\");\n\n\n\n let forward_at_mediator1 =\n\n try_parse_forward(&unpacked_msg_mediator1).expect(\"Message is not Forward\");\n\n\n\n assert_eq!(forward_at_mediator1.msg, &unpacked_msg_mediator1);\n\n assert_eq!(&forward_at_mediator1.next, to);\n\n\n\n assert!(unpack_metadata_mediator1.encrypted);\n\n assert!(!unpack_metadata_mediator1.authenticated);\n\n assert!(!unpack_metadata_mediator1.non_repudiation);\n\n assert!(unpack_metadata_mediator1.anonymous_sender);\n\n assert!(!unpack_metadata_mediator1.re_wrapped_in_forward);\n", "file_path": "src/message/unpack/mod.rs", "rank": 54, "score": 89483.59734921032 }, { "content": " )\n\n })?;\n\n\n\n Ok((msg, metadata))\n\n }\n\n\n\n async fn _try_unwrap_forwarded_message<'dr, 'sr>(\n\n msg: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n secrets_resolver: &'sr (dyn SecretsResolver + 'sr),\n\n ) -> Result<Option<String>> {\n\n let plaintext = match Message::from_str(msg) {\n\n Ok(m) => m,\n\n Err(e) if e.kind() == ErrorKind::Malformed => return Ok(None),\n\n Err(e) => Err(e)?,\n\n };\n\n\n\n if let Some(forward_msg) = try_parse_forward(&plaintext) {\n\n if has_key_agreement_secret(&forward_msg.next, did_resolver, secrets_resolver).await? {\n\n // TODO: Think how to avoid extra serialization of forwarded_msg here.\n", "file_path": "src/message/unpack/mod.rs", "rank": 55, "score": 89483.53589359447 }, { "content": " pub signed_message: Option<String>,\n\n\n\n /// If plaintext contains from_prior header, its unpacked value is returned\n\n pub from_prior: Option<FromPrior>,\n\n}\n\n\n\nasync fn has_key_agreement_secret<'dr, 'sr>(\n\n did_or_kid: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n secrets_resolver: &'sr (dyn SecretsResolver + 'sr),\n\n) -> Result<bool> {\n\n let kids = match did_or_url(did_or_kid) {\n\n (_, Some(kid)) => {\n\n vec![kid.to_owned()]\n\n }\n\n (did, None) => {\n\n let did_doc = did_resolver\n\n .resolve(did)\n\n .await?\n\n .ok_or_else(|| err_msg(ErrorKind::DIDNotResolved, \"Next DID doc not found\"))?;\n", "file_path": "src/message/unpack/mod.rs", "rank": 56, "score": 89482.7927949174 }, { "content": "use askar_crypto::{\n\n alg::{\n\n aes::{A256CbcHs512, A256Gcm, A256Kw, AesKey},\n\n chacha20::{Chacha20Key, XC20P},\n\n p256::P256KeyPair,\n\n x25519::X25519KeyPair,\n\n },\n\n kdf::ecdh_es::EcdhEs,\n\n};\n\n\n\nuse crate::{\n\n algorithms::AnonCryptAlg,\n\n error::{err_msg, ErrorKind, Result, ResultExt},\n\n jwe::{self, envelope::JWE},\n\n secrets::SecretsResolver,\n\n utils::{\n\n crypto::{AsKnownKeyPair, KnownKeyPair},\n\n did::did_or_url,\n\n },\n\n UnpackMetadata, UnpackOptions,\n", "file_path": "src/message/unpack/anoncrypt.rs", "rank": 57, "score": 89482.59967404138 }, { "content": "\n\n let forwarded_msg_at_mediator1 =\n\n serde_json::to_string(&forward_at_mediator1.forwarded_msg)\n\n .expect(\"Unable serialize forwarded message\");\n\n\n\n let re_wrapping_forward_msg = wrap_in_forward(\n\n &forwarded_msg_at_mediator1,\n\n None,\n\n to,\n\n &vec![to.to_owned()],\n\n &AnonCryptAlg::default(),\n\n &did_resolver,\n\n )\n\n .await\n\n .expect(\"Unable wrap in forward\");\n\n\n\n let (unpacked_once_msg, unpack_once_metadata) = Message::unpack(\n\n &re_wrapping_forward_msg,\n\n &did_resolver,\n\n &bob_secrets_resolver,\n", "file_path": "src/message/unpack/mod.rs", "rank": 58, "score": 89482.23723220339 }, { "content": " anoncrypted =\n\n _try_unpack_anoncrypt(msg, secrets_resolver, options, &mut metadata).await?;\n\n\n\n if options.unwrap_re_wrapping_forward && anoncrypted.is_some() {\n\n let forwarded_msg_opt = Self::_try_unwrap_forwarded_message(\n\n anoncrypted.as_deref().unwrap(),\n\n did_resolver,\n\n secrets_resolver,\n\n )\n\n .await?;\n\n\n\n if forwarded_msg_opt.is_some() {\n\n forwarded_msg = forwarded_msg_opt.unwrap();\n\n msg = &forwarded_msg;\n\n\n\n metadata.re_wrapped_in_forward = true;\n\n\n\n continue;\n\n }\n\n }\n", "file_path": "src/message/unpack/mod.rs", "rank": 59, "score": 89481.92914969854 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse anoncrypt::_try_unpack_anoncrypt;\n\nuse authcrypt::_try_unpack_authcrypt;\n\nuse sign::_try_unapck_sign;\n\n\n\nuse crate::{\n\n algorithms::{AnonCryptAlg, AuthCryptAlg, SignAlg},\n\n did::DIDResolver,\n\n error::{err_msg, ErrorKind, Result, ResultExt},\n\n message::unpack::plaintext::_try_unpack_plaintext,\n\n protocols::routing::try_parse_forward,\n\n secrets::SecretsResolver,\n\n utils::did::did_or_url,\n\n FromPrior, Message,\n\n};\n\n\n\nmod anoncrypt;\n\nmod authcrypt;\n\nmod plaintext;\n", "file_path": "src/message/unpack/mod.rs", "rank": 60, "score": 89481.80515180838 }, { "content": " /// # Errors\n\n /// - `Malformed` Signed `from_prior` JWT is malformed.\n\n /// - `DIDNotResolved` Issuer DID not found.\n\n /// - `DIDUrlNotFound` Issuer authentication verification method is not found.\n\n /// - `Unsupported` Used crypto or method is unsupported.\n\n pub async fn unpack<'dr>(\n\n from_prior_jwt: &str,\n\n did_resolver: &'dr (dyn DIDResolver + 'dr),\n\n ) -> Result<(FromPrior, String)> {\n\n let mut buf = vec![];\n\n let parsed = jws::parse_compact(from_prior_jwt, &mut buf)?;\n\n\n\n let typ = parsed.parsed_header.typ;\n\n let alg = parsed.parsed_header.alg.clone();\n\n let kid = parsed.parsed_header.kid;\n\n\n\n if typ != \"JWT\" {\n\n Err(err_msg(\n\n ErrorKind::Malformed,\n\n \"from_prior is malformed: typ is not JWT\",\n", "file_path": "src/message/from_prior/unpack.rs", "rank": 61, "score": 89481.34796723821 }, { "content": " \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_NO_TYP,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_NO_TYPE,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_NO_BODY,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 62, "score": 89480.86390902361 }, { "content": " \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_WRONG_DATA,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_WRONG_ID,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n &INVALID_PLAINTEXT_MSG_ATTACHMENTS_NULL_DATA,\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 63, "score": 89480.61741124853 }, { "content": "\n\n async fn _verify_unpack_malformed(msg: &str, exp_error_str: &str) {\n\n _verify_unpack_returns_error(msg, ErrorKind::Malformed, exp_error_str).await\n\n }\n\n\n\n async fn _verify_unpack_returns_error(msg: &str, exp_err_kind: ErrorKind, exp_err_msg: &str) {\n\n let did_resolver = ExampleDIDResolver::new(vec![\n\n ALICE_DID_DOC.clone(),\n\n BOB_DID_DOC.clone(),\n\n CHARLIE_DID_DOC.clone(),\n\n ]);\n\n\n\n let secrets_resolver = ExampleSecretsResolver::new(BOB_SECRETS.clone());\n\n\n\n let err = Message::unpack(\n\n msg,\n\n &did_resolver,\n\n &secrets_resolver,\n\n &UnpackOptions::default(),\n\n )\n\n .await\n\n .expect_err(\"res is ok\");\n\n\n\n assert_eq!(err.kind(), exp_err_kind);\n\n assert_eq!(format!(\"{}\", err), exp_err_msg);\n\n }\n\n}\n", "file_path": "src/message/unpack/mod.rs", "rank": 64, "score": 89480.40306680251 }, { "content": " &MESSAGE_ATTACHMENT_BASE64,\n\n &plaintext_metadata,\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n PLAINTEXT_MSG_ATTACHMENT_JSON,\n\n &MESSAGE_ATTACHMENT_JSON,\n\n &plaintext_metadata,\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n PLAINTEXT_MSG_ATTACHMENT_LINKS,\n\n &MESSAGE_ATTACHMENT_LINKS,\n\n &plaintext_metadata,\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n", "file_path": "src/message/unpack/mod.rs", "rank": 65, "score": 89480.24237931006 }, { "content": " &did_resolver,\n\n &secrets_resolver,\n\n &PackEncryptedOptions {\n\n forward: false,\n\n enc_alg_anon: enc_alg.clone(),\n\n ..PackEncryptedOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"Unable pack_encrypted\");\n\n\n\n _verify_unpack_undeterministic(\n\n &packed,\n\n msg,\n\n &UnpackMetadata {\n\n sign_from: Some(sign_by_kid.into()),\n\n sign_alg: Some(sign_alg),\n\n signed_message: None,\n\n anonymous_sender: true,\n\n authenticated: true,\n", "file_path": "src/message/unpack/mod.rs", "rank": 66, "score": 89480.23946392817 }, { "content": " update_field(SIGNED_MSG_ALICE_KEY_1, \"payload\", \"invalid\").as_str(),\n\n \"Malformed: Wrong signature\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(SIGNED_MSG_ALICE_KEY_1, \"payload\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(SIGNED_MSG_ALICE_KEY_1, \"signatures\", \"invalid\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(SIGNED_MSG_ALICE_KEY_1, \"signatures\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n", "file_path": "src/message/unpack/mod.rs", "rank": 67, "score": 89480.0926341177 }, { "content": " sign_alg: Some(SignAlg::EdDSA),\n\n signed_message: Some(SIGNED_MSG_ALICE_KEY_1.into()),\n\n ..sign_metadata.clone()\n\n },\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n SIGNED_MSG_ALICE_KEY_2,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n sign_from: Some(\"did:example:alice#key-2\".into()),\n\n sign_alg: Some(SignAlg::ES256),\n\n signed_message: Some(SIGNED_MSG_ALICE_KEY_2.into()),\n\n ..sign_metadata.clone()\n\n },\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n", "file_path": "src/message/unpack/mod.rs", "rank": 68, "score": 89479.87932892457 }, { "content": " &PackEncryptedOptions {\n\n forward: false,\n\n protect_sender: true,\n\n enc_alg_anon: enc_alg_anon.clone(),\n\n ..PackEncryptedOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"Unable pack_encrypted\");\n\n\n\n _verify_unpack_undeterministic(\n\n &packed,\n\n msg,\n\n &UnpackMetadata {\n\n sign_from: Some(sign_by_kid.into()),\n\n sign_alg: Some(sign_alg),\n\n signed_message: Some(\"nondeterministic\".into()),\n\n anonymous_sender: true,\n\n authenticated: true,\n\n non_repudiation: true,\n", "file_path": "src/message/unpack/mod.rs", "rank": 69, "score": 89479.87657077459 }, { "content": " .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_AUTH_X25519, \"iv\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_AUTH_X25519, \"ciphertext\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode ciphertext: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_AUTH_X25519, \"ciphertext\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n", "file_path": "src/message/unpack/mod.rs", "rank": 70, "score": 89479.55307868977 }, { "content": " PLAINTEXT_MSG_ATTACHMENT_MULTI_1,\n\n &MESSAGE_ATTACHMENT_MULTI_1,\n\n &plaintext_metadata,\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n PLAINTEXT_MSG_ATTACHMENT_MULTI_2,\n\n &MESSAGE_ATTACHMENT_MULTI_2,\n\n &plaintext_metadata,\n\n )\n\n .await;\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_plaintext_2way() {\n\n _unpack_works_plaintext_2way(&MESSAGE_SIMPLE).await;\n\n _unpack_works_plaintext_2way(&MESSAGE_MINIMAL).await;\n\n _unpack_works_plaintext_2way(&MESSAGE_ATTACHMENT_BASE64).await;\n\n _unpack_works_plaintext_2way(&MESSAGE_ATTACHMENT_JSON).await;\n", "file_path": "src/message/unpack/mod.rs", "rank": 71, "score": 89479.55006862394 }, { "content": " remove_field(ENCRYPTED_MSG_ANON_XC20P_1, \"iv\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_ANON_XC20P_1, \"ciphertext\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode ciphertext: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_ANON_XC20P_1, \"ciphertext\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_ANON_XC20P_1, \"tag\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode tag: Invalid last symbol 100, offset 6.\",\n", "file_path": "src/message/unpack/mod.rs", "rank": 72, "score": 89479.27984127826 }, { "content": " async fn unpack_works_malformed_anoncrypt_msg() {\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_ANON_XC20P_1, \"protected\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode protected header: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_ANON_XC20P_1, \"protected\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_ANON_XC20P_1, \"iv\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode iv: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n", "file_path": "src/message/unpack/mod.rs", "rank": 73, "score": 89479.07859892656 }, { "content": " ..PackEncryptedOptions::default()\n\n },\n\n )\n\n .await\n\n .expect(\"encrypt is ok.\");\n\n\n\n _verify_unpack_undeterministic(\n\n &packed,\n\n msg,\n\n &UnpackMetadata {\n\n sign_from: Some(sign_by_kid.into()),\n\n sign_alg: Some(sign_alg),\n\n signed_message: Some(\"nondeterministic\".into()),\n\n anonymous_sender: false,\n\n authenticated: true,\n\n non_repudiation: true,\n\n encrypted: true,\n\n enc_alg_auth: Some(enc_alg),\n\n enc_alg_anon: None,\n\n encrypted_from_kid: Some(from_kid.into()),\n", "file_path": "src/message/unpack/mod.rs", "rank": 74, "score": 89479.00438205471 }, { "content": " }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_malformed_authcrypt_msg() {\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_AUTH_X25519, \"protected\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode protected header: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_AUTH_X25519, \"protected\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_AUTH_X25519, \"iv\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode iv: Invalid last symbol 100, offset 6.\",\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 75, "score": 89478.86655099009 }, { "content": " SIGNED_MSG_ALICE_KEY_3,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n sign_from: Some(\"did:example:alice#key-3\".into()),\n\n sign_alg: Some(SignAlg::ES256K),\n\n signed_message: Some(SIGNED_MSG_ALICE_KEY_3.into()),\n\n ..sign_metadata.clone()\n\n },\n\n )\n\n .await;\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_signed_2way() {\n\n _unpack_works_signed_2way(\n\n &MESSAGE_SIMPLE,\n\n ALICE_DID,\n\n &ALICE_AUTH_METHOD_25519.id,\n\n SignAlg::EdDSA,\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 76, "score": 89478.82351734067 }, { "content": " non_repudiation: false,\n\n encrypted: false,\n\n enc_alg_auth: None,\n\n enc_alg_anon: None,\n\n sign_alg: None,\n\n encrypted_from_kid: None,\n\n encrypted_to_kids: None,\n\n sign_from: None,\n\n signed_message: None,\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n };\n\n\n\n _verify_unpack(PLAINTEXT_MSG_SIMPLE, &MESSAGE_SIMPLE, &plaintext_metadata).await;\n\n\n\n _verify_unpack(PLAINTEXT_MSG_MINIMAL, &MESSAGE_MINIMAL, &plaintext_metadata).await;\n\n\n\n _verify_unpack(\n\n PLAINTEXT_MSG_ATTACHMENT_BASE64,\n", "file_path": "src/message/unpack/mod.rs", "rank": 77, "score": 89478.57859364389 }, { "content": " _verify_unpack_malformed(\n\n update_field(ENCRYPTED_MSG_AUTH_X25519, \"tag\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode tag: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_AUTH_X25519, \"tag\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_protected_field(ENCRYPTED_MSG_AUTH_X25519, \"apv\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode apv: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_protected_field(ENCRYPTED_MSG_AUTH_X25519, \"apv\").as_str(),\n", "file_path": "src/message/unpack/mod.rs", "rank": 78, "score": 89478.53452100094 }, { "content": " _unpack_works_plaintext_2way(&MESSAGE_ATTACHMENT_LINKS).await;\n\n _unpack_works_plaintext_2way(&MESSAGE_ATTACHMENT_MULTI_1).await;\n\n _unpack_works_plaintext_2way(&MESSAGE_ATTACHMENT_MULTI_2).await;\n\n\n\n async fn _unpack_works_plaintext_2way(msg: &Message) {\n\n let did_resolver = ExampleDIDResolver::new(vec![ALICE_DID_DOC.clone()]);\n\n\n\n let packed = msg\n\n .pack_plaintext(&did_resolver)\n\n .await\n\n .expect(\"Unable pack_plaintext\");\n\n\n\n _verify_unpack(\n\n &packed,\n\n msg,\n\n &UnpackMetadata {\n\n anonymous_sender: false,\n\n authenticated: false,\n\n non_repudiation: false,\n\n encrypted: false,\n", "file_path": "src/message/unpack/mod.rs", "rank": 80, "score": 89478.329483102 }, { "content": " )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_field(ENCRYPTED_MSG_ANON_XC20P_1, \"tag\").as_str(),\n\n \"Malformed: Message is not a valid JWE, JWS or JWM\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_protected_field(ENCRYPTED_MSG_ANON_XC20P_1, \"apv\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode apv: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_protected_field(ENCRYPTED_MSG_ANON_XC20P_1, \"apv\").as_str(),\n\n \"Malformed: Unable parse protected header: missing field `apv` at line 1 column 166\",\n\n )\n\n .await;\n", "file_path": "src/message/unpack/mod.rs", "rank": 81, "score": 89478.23809618031 }, { "content": " };\n\n\n\n _verify_unpack(\n\n PLAINTEXT_FROM_PRIOR,\n\n &MESSAGE_FROM_PRIOR_FULL,\n\n &exp_metadata,\n\n )\n\n .await;\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_plaintext_works_invalid_from_prior() {\n\n _verify_unpack_returns_error(\n\n PLAINTEXT_INVALID_FROM_PRIOR,\n\n ErrorKind::Malformed,\n\n \"Malformed: Unable to parse compactly serialized JWS\",\n\n )\n\n .await;\n\n }\n\n\n", "file_path": "src/message/unpack/mod.rs", "rank": 82, "score": 89477.59147110619 }, { "content": " async fn _unpack_works_unwrap_re_wrapping_forward_off(\n\n to: &str,\n\n from: Option<&str>,\n\n sign_by: Option<&str>,\n\n ) {\n\n let did_resolver = ExampleDIDResolver::new(vec![\n\n ALICE_DID_DOC.clone(),\n\n BOB_DID_DOC.clone(),\n\n MEDIATOR1_DID_DOC.clone(),\n\n ]);\n\n\n\n let alice_secrets_resolver = ExampleSecretsResolver::new(ALICE_SECRETS.clone());\n\n\n\n let bob_secrets_resolver = ExampleSecretsResolver::new(BOB_SECRETS.clone());\n\n\n\n let mediator1_secrets_resolver = ExampleSecretsResolver::new(MEDIATOR1_SECRETS.clone());\n\n\n\n let (msg, pack_metadata) = MESSAGE_SIMPLE\n\n .pack_encrypted(\n\n to,\n", "file_path": "src/message/unpack/mod.rs", "rank": 83, "score": 89477.4966799215 }, { "content": " re_wrapped_in_forward: false,\n\n };\n\n\n\n _verify_unpack(\n\n ENCRYPTED_MSG_ANON_XC20P_1,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n enc_alg_anon: Some(AnonCryptAlg::Xc20pEcdhEsA256kw),\n\n encrypted_to_kids: Some(vec![\n\n \"did:example:bob#key-x25519-1\".into(),\n\n \"did:example:bob#key-x25519-2\".into(),\n\n \"did:example:bob#key-x25519-3\".into(),\n\n ]),\n\n ..metadata.clone()\n\n },\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n ENCRYPTED_MSG_ANON_XC20P_2,\n", "file_path": "src/message/unpack/mod.rs", "rank": 84, "score": 89477.36538321013 }, { "content": "\n\n let payload =\n\n String::from_utf8(payload).kind(ErrorKind::Malformed, \"Signed payload is invalid utf8\")?;\n\n\n\n metadata.authenticated = true;\n\n metadata.non_repudiation = true;\n\n metadata.sign_from = Some(signer_kid.into());\n\n metadata.signed_message = Some(jws_json.into());\n\n\n\n Ok(Some(payload))\n\n}\n", "file_path": "src/message/unpack/sign.rs", "rank": 85, "score": 89477.19740129456 }, { "content": " _verify_unpack(\n\n &msg,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n sign_from: Some(sign_by_kid.into()),\n\n sign_alg: Some(sign_alg),\n\n signed_message: Some(msg.clone()),\n\n anonymous_sender: false,\n\n authenticated: true,\n\n non_repudiation: true,\n\n encrypted: false,\n\n enc_alg_auth: None,\n\n enc_alg_anon: None,\n\n encrypted_from_kid: None,\n\n encrypted_to_kids: None,\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n },\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 86, "score": 89476.58669225052 }, { "content": " authenticated: false,\n\n non_repudiation: false,\n\n anonymous_sender: false,\n\n re_wrapped_in_forward: false,\n\n encrypted_from_kid: None,\n\n encrypted_to_kids: None,\n\n sign_from: None,\n\n from_prior_issuer_kid: None,\n\n enc_alg_auth: None,\n\n enc_alg_anon: None,\n\n sign_alg: None,\n\n signed_message: None,\n\n from_prior: None,\n\n };\n\n\n\n let mut msg: &str = msg;\n\n let mut anoncrypted: Option<String>;\n\n let mut forwarded_msg: String;\n\n\n\n loop {\n", "file_path": "src/message/unpack/mod.rs", "rank": 88, "score": 89475.85776112441 }, { "content": " if let Some(_) = to_kids.iter().find(|k| {\n\n let (k_did, k_url) = did_or_url(k);\n\n (k_did != to_did) || (k_url.is_none())\n\n }) {\n\n Err(err_msg(\n\n ErrorKind::Malformed,\n\n \"Recipient keys are outside of one did or can't be resolved to key agreement\",\n\n ))?;\n\n }\n\n\n\n if metadata.encrypted_to_kids.is_none() {\n\n metadata.encrypted_to_kids = Some(to_kids.iter().map(|&k| k.to_owned()).collect());\n\n } else {\n\n // TODO: Verify that same keys used for authcrypt as for anoncrypt envelope\n\n }\n\n\n\n metadata.authenticated = true;\n\n metadata.encrypted = true;\n\n metadata.encrypted_from_kid = Some(from_kid.into());\n\n\n", "file_path": "src/message/unpack/authcrypt.rs", "rank": 89, "score": 89475.78063579631 }, { "content": " non_repudiation: true,\n\n sign_from: Some(\"did:example:alice#key-1\".into()),\n\n sign_alg: Some(SignAlg::EdDSA),\n\n signed_message: Some(ENCRYPTED_MSG_AUTH_P256_SIGNED.into()),\n\n ..metadata.clone()\n\n },\n\n )\n\n .await;\n\n\n\n // TODO: Check hidden sender case\n\n // TODO: Check P-384 curve support\n\n // TODO: Check P-521 curve support\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_authcrypted_2way() {\n\n _unpack_works_authcrypted_2way(\n\n &MESSAGE_SIMPLE,\n\n BOB_DID,\n\n &[\n", "file_path": "src/message/unpack/mod.rs", "rank": 90, "score": 89475.5648902956 }, { "content": " authenticated: true,\n\n non_repudiation: true,\n\n encrypted: false,\n\n enc_alg_auth: None,\n\n enc_alg_anon: None,\n\n sign_alg: None,\n\n encrypted_from_kid: None,\n\n encrypted_to_kids: None,\n\n sign_from: None,\n\n signed_message: None,\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n };\n\n\n\n _verify_unpack(\n\n SIGNED_MSG_ALICE_KEY_1,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n sign_from: Some(\"did:example:alice#key-1\".into()),\n", "file_path": "src/message/unpack/mod.rs", "rank": 91, "score": 89475.49718389208 }, { "content": " encrypted_to_kids: Some(vec![\n\n \"did:example:bob#key-x25519-1\".into(),\n\n \"did:example:bob#key-x25519-2\".into(),\n\n \"did:example:bob#key-x25519-3\".into(),\n\n ]),\n\n ..metadata.clone()\n\n },\n\n )\n\n .await;\n\n\n\n _verify_unpack(\n\n ENCRYPTED_MSG_AUTH_P256,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n enc_alg_auth: Some(AuthCryptAlg::A256cbcHs512Ecdh1puA256kw),\n\n encrypted_from_kid: Some(\"did:example:alice#key-p256-1\".into()),\n\n encrypted_to_kids: Some(vec![\n\n \"did:example:bob#key-p256-1\".into(),\n\n \"did:example:bob#key-p256-2\".into(),\n\n ]),\n", "file_path": "src/message/unpack/mod.rs", "rank": 92, "score": 89475.39358676093 }, { "content": " \"Malformed: Unable parse protected header: missing field `apv` at line 1 column 264\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n update_protected_field(ENCRYPTED_MSG_AUTH_X25519, \"apu\", \"invalid\").as_str(),\n\n \"Malformed: Unable decode apu: Invalid last symbol 100, offset 6.\",\n\n )\n\n .await;\n\n\n\n _verify_unpack_malformed(\n\n remove_protected_field(ENCRYPTED_MSG_AUTH_X25519, \"apu\").as_str(),\n\n \"Malformed: SKID present, but no apu\",\n\n )\n\n .await;\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_malformed_signed_msg() {\n\n _verify_unpack_malformed(\n", "file_path": "src/message/unpack/mod.rs", "rank": 93, "score": 89474.97902894982 }, { "content": " non_repudiation: false,\n\n encrypted: true,\n\n enc_alg_auth: None,\n\n enc_alg_anon: None,\n\n sign_alg: None,\n\n encrypted_from_kid: None,\n\n encrypted_to_kids: None,\n\n sign_from: None,\n\n signed_message: None,\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n };\n\n\n\n _verify_unpack(\n\n ENCRYPTED_MSG_AUTH_X25519,\n\n &MESSAGE_SIMPLE,\n\n &UnpackMetadata {\n\n enc_alg_auth: Some(AuthCryptAlg::A256cbcHs512Ecdh1puA256kw),\n\n encrypted_from_kid: Some(\"did:example:alice#key-x25519-1\".into()),\n", "file_path": "src/message/unpack/mod.rs", "rank": 94, "score": 89474.84832824118 }, { "content": " did_doc.key_agreements\n\n }\n\n };\n\n\n\n let kids = kids.iter().map(|k| k as &str).collect::<Vec<_>>();\n\n\n\n let secrets_ids = secrets_resolver.find_secrets(&kids[..]).await?;\n\n\n\n return Ok(!secrets_ids.is_empty());\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{\n\n did::resolvers::ExampleDIDResolver,\n\n message::MessagingServiceMetadata,\n\n protocols::routing::wrap_in_forward,\n\n secrets::resolvers::ExampleSecretsResolver,\n\n test_vectors::{\n\n remove_field, remove_protected_field, update_field, update_protected_field,\n", "file_path": "src/message/unpack/mod.rs", "rank": 95, "score": 89474.45640072117 }, { "content": "\n\n _verify_unpack(\n\n &packed,\n\n msg,\n\n &UnpackMetadata {\n\n sign_from: None,\n\n sign_alg: None,\n\n signed_message: None,\n\n anonymous_sender: false,\n\n authenticated: true,\n\n non_repudiation: false,\n\n encrypted: true,\n\n enc_alg_auth: Some(enc_alg),\n\n enc_alg_anon: None,\n\n encrypted_from_kid: Some(from_kid.into()),\n\n encrypted_to_kids: Some(to_kids.iter().map(|&k| k.to_owned()).collect()),\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n },\n", "file_path": "src/message/unpack/mod.rs", "rank": 96, "score": 89474.43996652566 }, { "content": " msg,\n\n &UnpackMetadata {\n\n sign_from: None,\n\n sign_alg: None,\n\n signed_message: None,\n\n anonymous_sender: true,\n\n authenticated: true,\n\n non_repudiation: false,\n\n encrypted: true,\n\n enc_alg_auth: Some(enc_alg_auth),\n\n enc_alg_anon: Some(enc_alg_anon),\n\n encrypted_from_kid: Some(from_kid.into()),\n\n encrypted_to_kids: Some(to_kids.iter().map(|&k| k.to_owned()).collect()),\n\n from_prior_issuer_kid: None,\n\n from_prior: None,\n\n re_wrapped_in_forward: false,\n\n },\n\n )\n\n .await;\n\n }\n", "file_path": "src/message/unpack/mod.rs", "rank": 97, "score": 89473.28090878388 }, { "content": " let did_resolver = ExampleDIDResolver::new(vec![\n\n ALICE_DID_DOC.clone(),\n\n BOB_DID_DOC.clone(),\n\n MEDIATOR1_DID_DOC.clone(),\n\n ]);\n\n\n\n let alice_secrets_resolver = ExampleSecretsResolver::new(ALICE_SECRETS.clone());\n\n\n\n let bob_secrets_resolver = ExampleSecretsResolver::new(BOB_SECRETS.clone());\n\n\n\n let mediator1_secrets_resolver = ExampleSecretsResolver::new(MEDIATOR1_SECRETS.clone());\n\n\n\n let (msg, pack_metadata) = MESSAGE_SIMPLE\n\n .pack_encrypted(\n\n to,\n\n from,\n\n sign_by,\n\n &did_resolver,\n\n &alice_secrets_resolver,\n\n &PackEncryptedOptions::default(),\n", "file_path": "src/message/unpack/mod.rs", "rank": 98, "score": 89472.81724583436 }, { "content": " from.is_some() || sign_by.is_some()\n\n );\n\n assert_eq!(unpack_twice_metadata.non_repudiation, sign_by.is_some());\n\n assert_eq!(unpack_twice_metadata.anonymous_sender, from.is_none());\n\n assert!(!unpack_twice_metadata.re_wrapped_in_forward);\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn unpack_works_anoncrypted_2way() {\n\n _unpack_works_anoncrypted_2way(\n\n &MESSAGE_SIMPLE,\n\n BOB_DID,\n\n &[\n\n &BOB_SECRET_KEY_AGREEMENT_KEY_X25519_1.id,\n\n &BOB_SECRET_KEY_AGREEMENT_KEY_X25519_2.id,\n\n &BOB_SECRET_KEY_AGREEMENT_KEY_X25519_3.id,\n\n ],\n\n AnonCryptAlg::A256cbcHs512EcdhEsA256kw,\n\n )\n", "file_path": "src/message/unpack/mod.rs", "rank": 99, "score": 89472.66285869342 } ]
Rust
src/geometry/aabb.rs
josefrcm/rspt
ad534adf75635338134e4ec71491f55f3065fb6a
use std::f32; use super::*; #[derive(Clone, Copy, Debug)] pub struct AABB { pub lower: nalgebra::Point3<f32>, pub upper: nalgebra::Point3<f32>, } impl AABB { pub fn empty() -> Self { AABB { lower: nalgebra::Point3::new(f32::NAN, f32::NAN, f32::NAN), upper: nalgebra::Point3::new(f32::NAN, f32::NAN, f32::NAN), } } pub fn from_vertices(vertices: &[Vertex]) -> Self { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for v in vertices { lower.x = f32::min(lower.x, v.coords.x); lower.y = f32::min(lower.y, v.coords.y); lower.z = f32::min(lower.z, v.coords.z); upper.x = f32::max(upper.x, v.coords.x); upper.y = f32::max(upper.y, v.coords.y); upper.z = f32::max(upper.z, v.coords.z); } AABB { lower: lower, upper: upper, } } pub fn from_faces(vertices: &[Vertex], faces: &[Triangle]) -> Self { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for f in faces { let v1 = vertices[f.v1 as usize].coords; let v2 = vertices[f.v2 as usize].coords; let v3 = vertices[f.v3 as usize].coords; lower.x = f32::min(lower.x, v1.x); lower.x = f32::min(lower.x, v2.x); lower.x = f32::min(lower.x, v3.x); lower.y = f32::min(lower.y, v1.y); lower.y = f32::min(lower.y, v2.y); lower.y = f32::min(lower.y, v3.y); lower.z = f32::min(lower.z, v1.z); lower.z = f32::min(lower.z, v2.z); lower.z = f32::min(lower.z, v3.z); upper.x = f32::max(upper.x, v1.x); upper.x = f32::max(upper.x, v2.x); upper.x = f32::max(upper.x, v3.x); upper.y = f32::max(upper.y, v1.y); upper.y = f32::max(upper.y, v2.y); upper.y = f32::max(upper.y, v3.y); upper.z = f32::max(upper.z, v1.z); upper.z = f32::max(upper.z, v2.z); upper.z = f32::max(upper.z, v3.z); } AABB { lower: lower, upper: upper, } } pub fn intersect(&self, ray: Ray) -> Interval { let x1 = (self.lower.x - ray.origin.x) / ray.direction.x; let x2 = (self.upper.x - ray.origin.x) / ray.direction.x; let x_int = Interval::new(x1, x2); let y1 = (self.lower.y - ray.origin.y) / ray.direction.y; let y2 = (self.upper.y - ray.origin.y) / ray.direction.y; let y_int = Interval::new(y1, y2); let z1 = (self.lower.z - ray.origin.z) / ray.direction.z; let z2 = (self.upper.z - ray.origin.z) / ray.direction.z; let z_int = Interval::new(z1, z2); let foo = f32::max(x_int.start, f32::max(y_int.start, z_int.start)); let bar = f32::min(x_int.finish, f32::min(y_int.finish, z_int.finish)); if bar >= foo { Interval::new(foo, bar) } else { Interval::new(f32::INFINITY, f32::INFINITY) } } } pub fn union(boxes: &[AABB]) -> AABB { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for b in boxes { lower.x = f32::min(lower.x, b.lower.x); lower.y = f32::min(lower.y, b.lower.y); lower.z = f32::min(lower.z, b.lower.z); upper.x = f32::max(upper.x, b.upper.x); upper.y = f32::max(upper.y, b.upper.y); upper.z = f32::max(upper.z, b.upper.z); } AABB { lower: lower, upper: upper, } }
use std::f32; use super::*; #[derive(Clone, Copy, Debug)] pub struct AABB { pub lower: nalgebra::Point3<f32>, pub upper: nalgebra::Point3<f32>, } impl AABB { pub fn empty() -> Self { AABB { lower: nalgebra::Point3::new(f32::NAN, f32::NAN, f32::NAN), upper: nalgebra::Point3::new(f32::NAN, f32::NAN, f32::NAN), } } pub fn from_vertices(vertices: &[Vertex]) -> Self { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for v in vertices { lower.x = f32::min(lower.x, v.coords.x); lower.y = f32::min(lower.y, v.coords.y); lower.z = f32::min(lower.z, v.coords.z); upper.x = f32::max(upper.x, v.coords.x); upper.y = f32::max(upper.y, v.coords.y); upper.z = f32::max(upper.z, v.coords.z); } AABB { lower: lower, upper: upper, } } pub fn from_faces(vertices: &[Vertex], faces: &[Triangle]) -> Self { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for f in faces { let v1 = vertices[f.v1 as usize].coords; let v2 = vertices[f.v2 as usize].coords; let v3 = vertices[f.v3 as usize].coords; lower.x = f32::min(lower.x, v1.x); lower.x = f32::min(lower.x, v2.x); lower.x = f32::min(lower.x, v3.x); lower.y = f32::min(lower.y, v1.y); lower.y = f32::min(lower.y, v2.y); lower.y = f32::min(lower.y, v3.y); lower.z = f32::min(lower.z, v1.z); lower.z = f32::min(lower.z, v2.z); lower.z = f32::min(lower.z, v3.z); upper.x = f32::max(upper.x, v1.x); upper.x = f32::max(upper.x, v2.x); upper.x = f32::max(upper.x, v3.x); upper.y = f32::max(upper.y, v1.y); upper.y = f32::max(upper.y, v2.y); upper.y = f32::max(upper.y, v3.y); upper.z = f32::max(upper.z, v1.z); upper.z = f32::max(upper.z, v2.z); upper.z = f32::max(upper.z, v3.z); } AABB { lower: lower, upper: upper, } } pub fn intersect(&self, ray: Ray) -> Interval { let x1 = (self.lower.x - ray.origin.x) / ray.direction.x; let x2 = (self.upper.x - ray.origin.x) / ray.direction.x; let x_int = Interval::new(x1, x2); let y1 = (self.lower.y - ray.origin.y) / ray.direction.y; let y2 = (self.upper.y - ray.origin.y) / ray.direction.y; let y_int = Interval::new(y1, y2); let z1 = (self.lower.z - ray.origin.z) / ray.direction.z; let z2 = (self.upper.z - ray.origin.z) / ray.direction.z; let z_int = Interval::new(z1, z2); let foo = f32::max(x_int.sta
} else { Interval::new(f32::INFINITY, f32::INFINITY) } } } pub fn union(boxes: &[AABB]) -> AABB { let mut lower = nalgebra::Point3::new(f32::INFINITY, f32::INFINITY, f32::INFINITY); let mut upper = nalgebra::Point3::new(f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY); for b in boxes { lower.x = f32::min(lower.x, b.lower.x); lower.y = f32::min(lower.y, b.lower.y); lower.z = f32::min(lower.z, b.lower.z); upper.x = f32::max(upper.x, b.upper.x); upper.y = f32::max(upper.y, b.upper.y); upper.z = f32::max(upper.z, b.upper.z); } AABB { lower: lower, upper: upper, } }
rt, f32::max(y_int.start, z_int.start)); let bar = f32::min(x_int.finish, f32::min(y_int.finish, z_int.finish)); if bar >= foo { Interval::new(foo, bar)
function_block-random_span
[ { "content": "pub fn scale(image: &mut Image2D, factor: usize) -> () {\n\n let s = 1.0 / (factor as f32);\n\n image.map_inplace(|a| *a = *a * s);\n\n}\n\n\n", "file_path": "src/tracer/image2d.rs", "rank": 1, "score": 82483.86372930113 }, { "content": "pub fn accum(lhs: &mut Image2D, rhs: &Image2D) {\n\n assert!(lhs.shape() == rhs.shape());\n\n\n\n lhs.zip_mut_with(rhs, |a, &b| *a += b);\n\n}\n\n\n", "file_path": "src/tracer/image2d.rs", "rank": 2, "score": 80656.38359368697 }, { "content": "fn trace_ray(scene: &Scene, ray: geometry::Ray, max_bounces: usize) -> Color {\n\n if max_bounces == 0 {\n\n color::black()\n\n } else {\n\n let hit = scene.intersect(ray);\n\n if hit.distance.is_finite() {\n\n let material = &scene.materials[hit.material as usize];\n\n let outgoing_ray = material.spawn_secondary_ray(&hit);\n\n let incoming_color = trace_ray(scene, outgoing_ray, max_bounces - 1);\n\n material.shade(&hit, outgoing_ray.direction, incoming_color)\n\n } else {\n\n color::black()\n\n }\n\n }\n\n}\n", "file_path": "src/tracer/sampling.rs", "rank": 3, "score": 66488.3946168021 }, { "content": "pub fn black() -> Color {\n\n nalgebra::zero()\n\n}\n", "file_path": "src/tracer/color.rs", "rank": 4, "score": 65375.28886044328 }, { "content": "///\n\n/// Pretty print a duration\n\npub fn pretty_time(t: std::time::Duration) -> String {\n\n let nanos = t.subsec_nanos();\n\n let mut seconds = t.as_secs();\n\n let mut minutes = seconds / 60;\n\n seconds = seconds % 60;\n\n let hours = minutes / 60;\n\n minutes = minutes % 60;\n\n format!(\"{}:{:02}:{:02}.{}\", hours, minutes, seconds, nanos)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 55183.89658251796 }, { "content": "pub fn new(width: usize, height: usize) -> Image2D {\n\n ndarray::Array2::zeros((height, width))\n\n}\n\n\n", "file_path": "src/tracer/image2d.rs", "rank": 6, "score": 52500.1377136902 }, { "content": "pub fn sample(scene: &Scene, camera: &Camera, max_bounces: usize) -> Image2D {\n\n Zip::from(&camera.make_rays()).par_apply_collect(|r| trace_ray(scene, *r, max_bounces))\n\n\n\n //let rays = camera.make_rays();\n\n //sample_scene(scene, &rays, max_bounces)\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Private functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n/*fn sample_scene(scene: &Scene, rays: &[geometry::Ray], max_iter : usize) -> Vec<Color> {\n\n if max_iter == 0 {\n\n vec![Color::black(); rays.len()]\n\n } else {\n\n let intersections : Vec<SceneIntersection> = rays.par_iter().map(|&r| scene.intersect(r)).collect();\n\n let outgoing_rays : Vec<geometry::util::Ray> = intersections.par_iter().map(|hit| hit.material.spawn_secondary_ray(&hit)).collect();\n\n let incoming_color = sample_scene(scene, &outgoing_rays, max_iter-1);\n\n return izip!(intersections, outgoing_rays, incoming_color).map(|(hit, r, c)| hit.material.shade(hit.point, hit.ray.direction, hit.normal, r.direction, c)).collect();\n\n }\n\n}*/\n\n\n", "file_path": "src/tracer/sampling.rs", "rank": 7, "score": 47310.53531324654 }, { "content": "pub fn save_png(image: &Image2D, filename: &std::path::Path) -> image::ImageResult<()> {\n\n // Compute the scale factor\n\n let mut high = f32::NEG_INFINITY;\n\n //let high = self.pixels.max();\n\n for value in image.iter() {\n\n high = f32::max(high, value.x);\n\n high = f32::max(high, value.y);\n\n high = f32::max(high, value.z);\n\n }\n\n let scale = 1.0 / high;\n\n let gamma = 1.0 / 2.2;\n\n\n\n // Rescale the image to the range [0, 255]\n\n let width = image.ncols();\n\n let height = image.nrows();\n\n let mut buf = vec![0; 3 * width * height];\n\n for ((y, x), pixel) in image.indexed_iter() {\n\n let offset = 3 * (width * y + x);\n\n buf[offset + 0] = (255.0 * (scale * pixel.x).powf(gamma)) as u8;\n\n buf[offset + 1] = (255.0 * (scale * pixel.y).powf(gamma)) as u8;\n", "file_path": "src/tracer/image2d.rs", "rank": 8, "score": 44624.29007252754 }, { "content": "pub fn save_hdr(image: &Image2D, filename: &std::path::Path) -> image::ImageResult<()> {\n\n type RgbF32 = image::Rgb<f32>;\n\n\n\n // Rescale the image to the range [0, 255]\n\n let width = image.ncols();\n\n let height = image.nrows();\n\n let buf: Vec<RgbF32> = image\n\n .iter()\n\n .map(|pixel| RgbF32::from([pixel.x, pixel.y, pixel.z]))\n\n .collect();\n\n\n\n // Save the image as PNG\n\n let writer = std::fs::File::create(filename)?;\n\n let encoder = image::hdr::HDREncoder::new(writer);\n\n encoder.encode(&buf, width, height)\n\n}\n", "file_path": "src/tracer/image2d.rs", "rank": 9, "score": 44624.29007252754 }, { "content": "///\n\n/// Program options\n\nstruct ProgramOptions {\n\n width: usize,\n\n height: usize,\n\n max_bounces: usize,\n\n num_samples: usize,\n\n scene_file: std::path::PathBuf,\n\n camera_file: std::path::PathBuf,\n\n image_file: std::path::PathBuf,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 42486.994915529904 }, { "content": "#[derive(Clone, Serialize, Deserialize)]\n\nstruct CameraDef {\n\n pub position: nalgebra::Point3<f32>,\n\n pub orientation: nalgebra::UnitQuaternion<f32>,\n\n pub focal: f32,\n\n}\n\n\n\n///\n\n/// Camera\n\npub struct Camera {\n\n pub position: nalgebra::Point3<f32>,\n\n pub orientation: nalgebra::UnitQuaternion<f32>,\n\n pub width: usize,\n\n pub height: usize,\n\n pub focal: f32,\n\n pub aspect: f32,\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n", "file_path": "src/tracer/camera.rs", "rank": 11, "score": 41410.81183147449 }, { "content": "///\n\n///\n\nfn main() {\n\n println!(\"sizeof(TriangleBundle): {}\", std::mem::size_of::<geometry::TriangleBundle>());\n\n println!(\"sizeof(AABB): {}\", std::mem::size_of::<geometry::AABB>());\n\n println!(\"sizeof(Node): {}\", std::mem::size_of::<geometry::Node>());\n\n println!(\"sizeof(BVH): {}\", std::mem::size_of::<geometry::BVH>());\n\n\n\n // Load the input data\n\n let load_start = std::time::Instant::now();\n\n let options = parse_options();\n\n println!(\"Loading scene...\");\n\n let scene = tracer::Scene::from_json(&options.scene_file).unwrap();\n\n println!(\"Loading camera...\");\n\n let mut camera =\n\n tracer::Camera::from_json(&options.camera_file, options.width, options.height).unwrap();\n\n let load_time = load_start.elapsed();\n\n\n\n // Render the scene\n\n let render_start = std::time::Instant::now();\n\n let mut fb = tracer::image2d::new(options.width, options.height);\n\n for i in 0..options.num_samples {\n", "file_path": "src/main.rs", "rank": 12, "score": 35820.0786331414 }, { "content": "fn sample_hemisphere(\n\n point: nalgebra::Point3<f32>,\n\n normal: nalgebra::Vector3<f32>\n\n) -> geometry::Ray {\n\n let x = 2.0 * rand::random::<f32>() - 1.0;\n\n let y = 2.0 * rand::random::<f32>() - 1.0;\n\n let z = 2.0 * rand::random::<f32>() - 1.0;\n\n let s = (x * x + y * y + z * z).sqrt();\n\n\n\n let mut d = nalgebra::Vector3::new(x / s, y / s, z / s);\n\n\n\n if d.dot(&normal) < 0.0 {\n\n d.x = -d.x;\n\n d.y = -d.y;\n\n d.z = -d.z;\n\n }\n\n\n\n geometry::Ray {\n\n origin: point + 1.0e-3 * d,\n\n direction: d,\n", "file_path": "src/tracer/material.rs", "rank": 13, "score": 33648.18426516765 }, { "content": "///\n\n/// Parse the command line arguments\n\nfn parse_options() -> ProgramOptions {\n\n // Argument definition\n\n let matches = clap::App::new(\"Rusty Ray\")\n\n .version(\"0.1\")\n\n .author(\"José Franco Campos <[email protected]>\")\n\n .about(\"A toy path-tracer in Rust\")\n\n .arg(\n\n clap::Arg::with_name(\"num-samples\")\n\n .short(\"s\")\n\n .long(\"num-samples\")\n\n .value_name(\"SAMPLES\")\n\n .help(\"Number of samples to trace - More samples, more quality\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"width\")\n\n .short(\"w\")\n\n .long(\"width\")\n\n .value_name(\"PIXELS\")\n\n .help(\"Image width\")\n", "file_path": "src/main.rs", "rank": 14, "score": 31732.29673785514 }, { "content": " gamma: f32::NAN,\n\n face: Triangle::invalid(),\n\n }\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n///\n\n/// Build a triangle bundle\n\nimpl TriangleBundle {\n\n pub fn new(vertices: &[Vertex], faces: &[Triangle]) -> Self {\n\n // Preconditions\n\n if faces.len() > BUNDLE_SIZE {\n\n panic!(\n\n \"Triangle bundles should be at most {} elements big!\",\n\n BUNDLE_SIZE\n\n );\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 22, "score": 23644.515263362704 }, { "content": "\n\n faces: [Triangle; BUNDLE_SIZE],\n\n}\n\n\n\n///\n\n/// Result of a bundle-ray intersection\n\npub struct BundleIntersection {\n\n pub distance: f32,\n\n pub alpha: f32,\n\n pub beta: f32,\n\n pub gamma: f32,\n\n pub face: Triangle,\n\n}\n\n\n\nimpl BundleIntersection {\n\n pub fn empty() -> Self {\n\n Self {\n\n distance: f32::INFINITY,\n\n alpha: f32::NAN,\n\n beta: f32::NAN,\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 23, "score": 23644.306911568263 }, { "content": " for (i, triangle) in faces.iter().enumerate() {\n\n // The triangle itself\n\n bundle.faces[i] = *triangle;\n\n let index1 = triangle.v1;\n\n let index2 = triangle.v2;\n\n let index3 = triangle.v3;\n\n\n\n // Vertex coordinates\n\n let vertex1 = vertices[index1 as usize].coords.xyz();\n\n let vertex2 = vertices[index2 as usize].coords.xyz();\n\n let vertex3 = vertices[index3 as usize].coords.xyz();\n\n\n\n // Triangle normal\n\n let edge_a = vertex2 - vertex1;\n\n let edge_b = vertex3 - vertex1;\n\n let normal = edge_a.cross(&edge_b).normalize();\n\n\n\n // Plane equation\n\n bundle.plane_eq_x[i] = normal.x;\n\n bundle.plane_eq_y[i] = normal.y;\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 24, "score": 23642.673371382658 }, { "content": "use super::*;\n\nuse nalgebra::*;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\npub const BUNDLE_SIZE: usize = 8;\n\npub const EPSILON: f32 = 0.0000001;\n\n#[allow(non_camel_case_types)]\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 25, "score": 23637.494668908705 }, { "content": " bundle.gamma_eq_z[i] = barycentric[(1, 2)];\n\n bundle.gamma_eq_w[i] = -Vector3::new(\n\n barycentric[(1, 0)],\n\n barycentric[(1, 1)],\n\n barycentric[(1, 2)],\n\n )\n\n .dot(&vertex1.coords);\n\n }\n\n\n\n bundle\n\n }\n\n}\n\n\n\n///\n\n/// Ray-bundle intersection\n\n/// [Baldwin-Weber]\n\n/// http://jcgt.org/published/0005/03/03/\n\nimpl TriangleBundle {\n\n ///\n\n /// Ray-Bundle intersection\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 26, "score": 23636.017140070024 }, { "content": " }\n\n }\n\n\n\n // If a triangle was hit, compute the intersection parameters: coordinates, normal, material, etc.\n\n if nearest_distance.is_infinite() {\n\n BundleIntersection::empty()\n\n } else {\n\n BundleIntersection {\n\n distance: nearest_distance,\n\n alpha: alphas[nearest_index],\n\n beta: betas[nearest_index],\n\n gamma: gammas[nearest_index],\n\n face: self.faces[nearest_index],\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 27, "score": 23635.631879970068 }, { "content": " }\n\n\n\n // Initialize the bundle to all zeros\n\n let mut bundle = TriangleBundle {\n\n plane_eq_x: zero(),\n\n plane_eq_y: zero(),\n\n plane_eq_z: zero(),\n\n plane_eq_w: zero(),\n\n beta_eq_x: zero(),\n\n beta_eq_y: zero(),\n\n beta_eq_z: zero(),\n\n beta_eq_w: zero(),\n\n gamma_eq_x: zero(),\n\n gamma_eq_y: zero(),\n\n gamma_eq_z: zero(),\n\n gamma_eq_w: zero(),\n\n faces: [Triangle::invalid(); BUNDLE_SIZE],\n\n };\n\n\n\n // Compute the triangle equations\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 28, "score": 23635.179250577836 }, { "content": " pub fn intersect(&self, ray: Ray) -> BundleIntersection {\n\n // Compute the intersection of the ray against all triangles in the bundle\n\n let t1: f32xN = (self.plane_eq_x * ray.origin.x)\n\n + (self.plane_eq_y * ray.origin.y)\n\n + (self.plane_eq_z * ray.origin.z)\n\n + self.plane_eq_w;\n\n let t2: f32xN = (self.plane_eq_x * ray.direction.x)\n\n + (self.plane_eq_y * ray.direction.y)\n\n + (self.plane_eq_z * ray.direction.z);\n\n let distances: f32xN = -t1.component_div(&t2);\n\n\n\n let points_x: f32xN = f32xN::repeat(ray.origin.x) + distances * ray.direction.x;\n\n let points_y: f32xN = f32xN::repeat(ray.origin.y) + distances * ray.direction.y;\n\n let points_z: f32xN = f32xN::repeat(ray.origin.z) + distances * ray.direction.z;\n\n\n\n let betas: f32xN = self.beta_eq_x.component_mul(&points_x)\n\n + self.beta_eq_y.component_mul(&points_y)\n\n + self.beta_eq_z.component_mul(&points_z)\n\n + self.beta_eq_w;\n\n\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 29, "score": 23634.85768278289 }, { "content": " let gammas: f32xN = self.gamma_eq_x.component_mul(&points_x)\n\n + self.gamma_eq_y.component_mul(&points_y)\n\n + self.gamma_eq_z.component_mul(&points_z)\n\n + self.gamma_eq_w;\n\n\n\n let alphas: f32xN = f32xN::repeat(1.0) - betas - gammas;\n\n\n\n // Find the intersection of the ray against the bundle\n\n let mut nearest_distance = f32::INFINITY;\n\n let mut nearest_index = 0;\n\n for i in 0..BUNDLE_SIZE {\n\n if (self.faces[i].material != u32::MAX)\n\n && (distances[i] > EPSILON)\n\n && (alphas[i] > 0.0)\n\n && (betas[i] > 0.0)\n\n && (gammas[i] > 0.0)\n\n && (distances[i] < nearest_distance)\n\n {\n\n nearest_distance = distances[i];\n\n nearest_index = i;\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 30, "score": 23634.478507225973 }, { "content": " bundle.plane_eq_z[i] = normal.z;\n\n bundle.plane_eq_w[i] = -normal.dot(&vertex1.coords);\n\n\n\n // World-to-barycentric coordinate conversion\n\n let barycentric = Matrix3::from_columns(&[edge_a, edge_b, normal])\n\n .try_inverse()\n\n .unwrap();\n\n\n\n bundle.beta_eq_x[i] = barycentric[(0, 0)];\n\n bundle.beta_eq_y[i] = barycentric[(0, 1)];\n\n bundle.beta_eq_z[i] = barycentric[(0, 2)];\n\n bundle.beta_eq_w[i] = -Vector3::new(\n\n barycentric[(0, 0)],\n\n barycentric[(0, 1)],\n\n barycentric[(0, 2)],\n\n )\n\n .dot(&vertex1.coords);\n\n\n\n bundle.gamma_eq_x[i] = barycentric[(1, 0)];\n\n bundle.gamma_eq_y[i] = barycentric[(1, 1)];\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 31, "score": 23629.847944721925 }, { "content": "fn project_vector_onto_plane(vector: nalgebra::Vector3<f32>, plane: nalgebra::Vector3<f32>) -> nalgebra::Vector3<f32> {\n\n vector - vector.dot(&plane) * plane\n\n}*/\n", "file_path": "src/tracer/material.rs", "rank": 32, "score": 19755.481573812613 }, { "content": "#[allow(non_camel_case_types)]\n\ntype u32xN = [u32; BUNDLE_SIZE];\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 33, "score": 19197.65550101733 }, { "content": "#[allow(non_camel_case_types)]\n\ntype f32xN = VectorN<f32, U8>;\n\n\n\n///\n\n/// Triangle bundle\n\n#[derive(Clone, Copy)]\n\npub struct TriangleBundle {\n\n plane_eq_x: f32xN,\n\n plane_eq_y: f32xN,\n\n plane_eq_z: f32xN,\n\n plane_eq_w: f32xN,\n\n\n\n beta_eq_x: f32xN,\n\n beta_eq_y: f32xN,\n\n beta_eq_z: f32xN,\n\n beta_eq_w: f32xN,\n\n\n\n gamma_eq_x: f32xN,\n\n gamma_eq_y: f32xN,\n\n gamma_eq_z: f32xN,\n\n gamma_eq_w: f32xN,\n", "file_path": "src/geometry/triangle_bundle.rs", "rank": 34, "score": 18503.52342779832 }, { "content": "\n\n///\n\n/// Triangle\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Triangle {\n\n pub v1: u32,\n\n pub v2: u32,\n\n pub v3: u32,\n\n pub material: u32,\n\n}\n\n\n\n///\n\n/// Polygon mesh\n\n#[derive(Clone)]\n\npub struct Mesh {\n\n pub vertices: Vec<Vertex>,\n\n pub faces: BVH,\n\n}\n\n\n\n///\n", "file_path": "src/geometry/mesh.rs", "rank": 35, "score": 20.262124808854125 }, { "content": "// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\nimpl Triangle {\n\n pub fn invalid() -> Self {\n\n Self {\n\n v1: u32::MAX,\n\n v2: u32::MAX,\n\n v3: u32::MAX,\n\n material: u32::MAX,\n\n }\n\n }\n\n}\n\n\n\nimpl Mesh {\n\n ///\n\n /// Create a new mesh from an array of vertices and an array of triangles\n\n pub fn new(vertices: Vec<Vertex>, faces: Vec<Triangle>) -> Self {\n\n // Build the acceleration structure\n", "file_path": "src/geometry/mesh.rs", "rank": 36, "score": 20.14522312488603 }, { "content": "/// Mesh-ray intersection\n\nimpl Mesh {\n\n ///\n\n /// Compute the mesh-ray intersection\n\n pub fn intersect(&self, ray: Ray) -> MeshIntersection {\n\n let hit = self.faces.intersect(ray);\n\n if hit.distance.is_finite() {\n\n let v1 = &self.vertices[hit.face.v1 as usize];\n\n let v2 = &self.vertices[hit.face.v2 as usize];\n\n let v3 = &self.vertices[hit.face.v3 as usize];\n\n let point = hit.alpha * v1.coords.coords\n\n + hit.beta * v2.coords.coords\n\n + hit.gamma * v3.coords.coords;\n\n let normal =\n\n (hit.alpha * v1.normal + hit.beta * v2.normal + hit.gamma * v3.normal).normalize();\n\n MeshIntersection {\n\n point: nalgebra::Point3::new(point.x, point.y, point.z),\n\n normal: normal,\n\n distance: hit.distance,\n\n material: hit.face.material,\n\n }\n\n } else {\n\n MeshIntersection::empty()\n\n }\n\n }\n\n}\n", "file_path": "src/geometry/mesh.rs", "rank": 37, "score": 17.130473249015708 }, { "content": " v2: index2,\n\n v3: index3,\n\n material: material,\n\n });\n\n }\n\n }\n\n }\n\n\n\n // Build the acceleration structure\n\n Ok(Self::new(vertices, faces))\n\n }\n\n\n\n ///\n\n /// Compute the bounding box\n\n pub fn bounds(&self) -> AABB {\n\n AABB::from_vertices(&self.vertices)\n\n }\n\n}\n\n\n\n///\n", "file_path": "src/geometry/mesh.rs", "rank": 38, "score": 15.880547676974317 }, { "content": " let mut bundles = Vec::new();\n\n for c in faces.chunks(BUNDLE_SIZE) {\n\n let foo = TriangleBundle::new(&vertices, &c.to_vec());\n\n let bar = AABB::from_faces(&vertices, &c.to_vec());\n\n bundles.push((foo, bar));\n\n }\n\n let tree = BVH::build_mesh(&bundles);\n\n\n\n // Done\n\n Mesh {\n\n vertices: vertices,\n\n faces: tree,\n\n }\n\n }\n\n\n\n ///\n\n /// Load a mesh from a PLY file\n\n pub fn load_ply(filename: &Path, material: u32) -> Result<Self, std::io::Error> {\n\n println!(\"Loading mesh {}\", filename.display());\n\n let f = File::open(filename)?;\n", "file_path": "src/geometry/mesh.rs", "rank": 39, "score": 13.720081873906906 }, { "content": "pub mod aabb;\n\npub use aabb::*;\n\n\n\npub mod bvh;\n\npub use bvh::*;\n\n\n\npub mod mesh;\n\npub use mesh::*;\n\n\n\npub mod ray;\n\npub use ray::*;\n\n\n\n/*pub mod scene;\n\npub use self::scene::*;*/\n\n\n\npub mod triangle_bundle;\n\npub use triangle_bundle::*;\n\n\n\npub mod util;\n\npub use util::*;\n", "file_path": "src/geometry/mod.rs", "rank": 40, "score": 13.208991844222206 }, { "content": " let file = BufReader::new(&f);\n\n let mut lines = file.lines();\n\n\n\n let mut vertices: Vec<Vertex> = Vec::new();\n\n let mut faces: Vec<Triangle> = Vec::new();\n\n let mut num_vertices: usize = 0;\n\n let mut num_faces: usize = 0;\n\n\n\n // Read the header\n\n loop {\n\n match lines.next() {\n\n None => break,\n\n Some(line) => {\n\n let foo = line.unwrap();\n\n let fields: Vec<&str> = foo.split(\" \").collect();\n\n if fields[0] == \"end_header\" {\n\n break;\n\n } else if (fields[0] == \"element\") && (fields[1] == \"vertex\") {\n\n num_vertices = fields[2].parse::<usize>().unwrap();\n\n } else if (fields[0] == \"element\") && (fields[1] == \"face\") {\n", "file_path": "src/geometry/mesh.rs", "rank": 41, "score": 12.763757937198124 }, { "content": "use std;\n\nuse std::f32;\n\nuse std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\nuse std::path::Path;\n\n\n\nuse super::*;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n///\n\n/// Vertex\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Vertex {\n\n pub coords: nalgebra::Point3<f32>,\n\n pub normal: nalgebra::Vector3<f32>,\n\n}\n", "file_path": "src/geometry/mesh.rs", "rank": 42, "score": 12.738809928113891 }, { "content": "// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Barycentric coordinates on a triangle\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Barycentric {\n\n pub alpha: f32,\n\n pub beta: f32,\n\n pub gamma: f32,\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Half-open interval [a,b)\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Interval {\n\n pub start: f32,\n\n pub finish: f32,\n\n}\n", "file_path": "src/geometry/util.rs", "rank": 43, "score": 12.46039903541585 }, { "content": " Self::build_branches(&leaves)\n\n }\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Tree traversal\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n///\n\n/// Ray intersection\n\nimpl BVH {\n\n pub fn intersect(&self, ray: Ray) -> BundleIntersection {\n\n // Check each child in order\n\n let mut nearest_hit = BundleIntersection::empty();\n\n for i in 0..self.bounds.len() {\n\n let intersection = self.bounds[i].intersect(ray);\n\n if intersection.start < nearest_hit.distance {\n\n match &self.children[i] {\n\n Node::Empty => {}\n", "file_path": "src/geometry/bvh.rs", "rank": 44, "score": 11.580053720855947 }, { "content": "/// Recursive bounding volume hierarchy\n\n#[derive(Clone)]\n\npub struct BVH {\n\n bounds: [AABB; NODE_SIZE],\n\n children: [Node; NODE_SIZE],\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Tree construction\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n///\n\n/// Build the whole world geometry\n\nimpl BVH {\n\n ///\n\n /// An empty tree\n\n pub fn empty() -> Self {\n\n BVH {\n\n bounds: [AABB::empty(); NODE_SIZE],\n\n children: [Node::Empty, Node::Empty, Node::Empty, Node::Empty],\n", "file_path": "src/geometry/bvh.rs", "rank": 45, "score": 10.692611411893278 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Constructor\n\n pub fn build_mesh(elements: &[(TriangleBundle, AABB)]) -> Self {\n\n let leaves = Self::build_leaves(elements);\n\n Self::build_branches(&leaves)\n\n }\n\n\n\n /*///\n\n /// Constructor\n\n pub fn build_world(elements: &[Mesh]) -> Self {\n\n let leaves = Self::build_leaves(elements);\n\n Self::build_branches(&leaves)\n\n let mut bundles = Vec::new();\n\n for m in meshes {\n\n let b = m.bounds();\n\n bundles.push((m, b));\n\n }\n", "file_path": "src/geometry/bvh.rs", "rank": 46, "score": 9.957907202150135 }, { "content": "\n\nimpl Interval {\n\n pub fn new(s: f32, f: f32) -> Self {\n\n Interval {\n\n start: f32::min(s, f),\n\n finish: f32::max(s, f),\n\n }\n\n }\n\n}\n", "file_path": "src/geometry/util.rs", "rank": 47, "score": 9.876079202033043 }, { "content": "/// Result of a mesh-ray intersection\n\npub struct MeshIntersection {\n\n pub distance: f32,\n\n pub material: u32,\n\n pub point: nalgebra::Point3<f32>,\n\n pub normal: nalgebra::Vector3<f32>,\n\n}\n\n\n\nimpl MeshIntersection {\n\n pub fn empty() -> Self {\n\n Self {\n\n distance: f32::INFINITY,\n\n material: u32::MAX,\n\n point: nalgebra::geometry::Point::origin(),\n\n normal: nalgebra::zero(),\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/geometry/mesh.rs", "rank": 48, "score": 9.71195070384839 }, { "content": "use super::*;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\nconst NODE_SIZE: usize = 4;\n\n\n\n///\n\n/// A node of the BVH\n\n/// TODO: use some kind of tagged box to reduce storage requirements\n\n#[derive(Clone)]\n\npub enum Node {\n\n Empty,\n\n Leaf(Box<TriangleBundle>),\n\n Branch(Box<BVH>),\n\n //Instance(Box<nalgebra::Transform3<f32>>, Box<BVH>),\n\n}\n\n\n\n///\n", "file_path": "src/geometry/bvh.rs", "rank": 49, "score": 9.072439614325049 }, { "content": " }*/\n\n\n\n ///\n\n /// First stage: turn the leaves into nodes\n\n fn build_leaves(elements: &[(TriangleBundle, AABB)]) -> Vec<Box<BVH>> {\n\n elements\n\n .chunks(NODE_SIZE)\n\n .map(|c| {\n\n let mut leaf = Self::empty();\n\n for i in 0..c.len() {\n\n leaf.children[i] = Node::Leaf(Box::new(c[i].0.clone()));\n\n leaf.bounds[i] = c[i].1;\n\n }\n\n Box::new(leaf)\n\n })\n\n .collect()\n\n }\n\n\n\n ///\n\n /// Second stage: recursively merge the nodes into bigger nodes\n", "file_path": "src/geometry/bvh.rs", "rank": 50, "score": 7.759983012328589 }, { "content": "use ron;\n\nuse std;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::ops::Deref;\n\nuse std::path::Path;\n\n\n\nuse super::*;\n\nuse crate::geometry;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct InstanceDef {\n\n pub mesh: String,\n\n pub material: String, /*,\n\n pub transform: linalg::Matrix4*/\n\n}\n", "file_path": "src/tracer/scene.rs", "rank": 51, "score": 7.520652527350937 }, { "content": " coords: nalgebra::Point3::new(vx, vy, vz),\n\n normal: nalgebra::Vector3::new(nx, ny, nz),\n\n });\n\n }\n\n }\n\n }\n\n\n\n // Read the faces\n\n for _i in 0..num_faces {\n\n match lines.next() {\n\n None => break,\n\n Some(line) => {\n\n let foo = line.unwrap();\n\n let fields: Vec<&str> = foo.split(\" \").collect();\n\n let index1 = fields[1].parse::<u32>().unwrap();\n\n let index2 = fields[2].parse::<u32>().unwrap();\n\n let index3 = fields[3].parse::<u32>().unwrap();\n\n\n\n faces.push(Triangle {\n\n v1: index1,\n", "file_path": "src/geometry/mesh.rs", "rank": 52, "score": 7.464595670872097 }, { "content": "impl Material {\n\n pub fn none() -> Self {\n\n Material::Light {\n\n emission: color::black(),\n\n }\n\n }\n\n\n\n pub fn spawn_secondary_ray(&self, intersection: &geometry::MeshIntersection) -> geometry::Ray {\n\n match self {\n\n Material::Light { .. } => geometry::Ray {\n\n origin: nalgebra::Point3::new(f32::NAN, f32::NAN, f32::NAN),\n\n direction: nalgebra::Vector3::new(f32::NAN, f32::NAN, f32::NAN),\n\n },\n\n Material::Standard { .. } => {\n\n sample_hemisphere(intersection.point, intersection.normal)\n\n }\n\n }\n\n }\n\n\n\n pub fn shade(\n", "file_path": "src/tracer/material.rs", "rank": 53, "score": 7.366087868361237 }, { "content": " // Build the acceleration structure\n\n //let tree = geometry::BVH::build_world(meshes);\n\n\n\n // Done\n\n Ok(Scene {\n\n materials: materials,\n\n geometry: meshes,\n\n })\n\n }\n\n\n\n ///\n\n /// Intersect a ray against the world\n\n pub fn intersect(&self, ray: geometry::Ray) -> geometry::MeshIntersection {\n\n let mut result = geometry::MeshIntersection::empty();\n\n\n\n if ray.direction.x.is_finite() && ray.direction.y.is_finite() && ray.direction.z.is_finite() {\n\n for m in &self.geometry {\n\n let hit = m.intersect(ray);\n\n if hit.distance < result.distance {\n\n result = hit;\n\n }\n\n }\n\n }\n\n\n\n return result;\n\n }\n\n}\n", "file_path": "src/tracer/scene.rs", "rank": 54, "score": 7.2504241506657 }, { "content": " num_faces = fields[2].parse::<usize>().unwrap();\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Read the vertices\n\n for _i in 0..num_vertices {\n\n match lines.next() {\n\n None => break,\n\n Some(line) => {\n\n let foo = line.unwrap();\n\n let fields: Vec<&str> = foo.split(\" \").collect();\n\n let vx = fields[0].parse::<f32>().unwrap();\n\n let vy = fields[1].parse::<f32>().unwrap();\n\n let vz = fields[2].parse::<f32>().unwrap();\n\n let nx = fields[3].parse::<f32>().unwrap();\n\n let ny = fields[4].parse::<f32>().unwrap();\n\n let nz = fields[5].parse::<f32>().unwrap();\n\n vertices.push(Vertex {\n", "file_path": "src/geometry/mesh.rs", "rank": 55, "score": 7.223291789884598 }, { "content": "use super::*;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\npub type Image2D = ndarray::Array2<Color>;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/tracer/image2d.rs", "rank": 56, "score": 7.112356070915945 }, { "content": " fn build_branches(elements: &[Box<BVH>]) -> Self {\n\n // First case: an empty tree\n\n if elements.len() == 0 {\n\n Self::empty()\n\n }\n\n // Second case: a tree with just one leaf\n\n else if elements.len() == 1 {\n\n (*elements[0]).clone()\n\n }\n\n // Third case: merge the leaves into groups of NODE_SIZE elements\n\n else {\n\n let mut leaves = Vec::new();\n\n for c in elements.chunks(NODE_SIZE) {\n\n let mut node = Self::empty();\n\n for i in 0..c.len() {\n\n node.children[i] = Node::Branch(c[i].clone());\n\n node.bounds[i] = union(&c[i].bounds);\n\n }\n\n leaves.push(Box::new(node));\n\n }\n", "file_path": "src/geometry/bvh.rs", "rank": 57, "score": 6.805529443898253 }, { "content": "use nalgebra;\n\nuse std::f32;\n\n\n\nuse crate::geometry;\n\nuse super::*;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub enum Material {\n\n Light { emission: Color },\n\n Standard { emission: Color, diffuse: Color },\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/tracer/material.rs", "rank": 58, "score": 6.7844513214346405 }, { "content": "pub mod camera;\n\npub use camera::*;\n\n\n\npub mod color;\n\npub use color::*;\n\n\n\npub mod halton;\n\npub use halton::*;\n\n\n\npub mod image2d;\n\npub use image2d::*;\n\n\n\npub mod material;\n\npub use material::*;\n\n\n\npub mod sampling;\n\npub use sampling::*;\n\n\n\npub mod scene;\n\npub use scene::*;\n", "file_path": "src/tracer/mod.rs", "rank": 59, "score": 6.541529954303395 }, { "content": "use ndarray::Zip;\n\n\n\nuse super::*;\n\nuse crate::geometry;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/tracer/sampling.rs", "rank": 60, "score": 5.97021718761394 }, { "content": "// ====================================================================================================================\n\n// 1-dimensional Halton sequence\n\n// ====================================================================================================================\n\n\n\n///\n\n/// Halton sequence of base N\n\npub struct HaltonSeq {\n\n base: usize,\n\n offset: usize,\n\n}\n\n\n\n///\n\n/// Methods\n\nimpl HaltonSeq {\n\n ///\n\n /// Create a new sequence with a given base\n\n pub fn new(base: usize) -> Self {\n\n HaltonSeq {\n\n base: base,\n\n offset: 0,\n", "file_path": "src/tracer/halton.rs", "rank": 61, "score": 5.716349280672223 }, { "content": "// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\npub type Color = nalgebra::Vector3<f32>;\n\n\n\n/*#[derive(Copy, Clone, Serialize, Deserialize)]\n\npub struct Color {\n\n pub r: f32,\n\n pub g: f32,\n\n pub b: f32,\n\n}*/\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/tracer/color.rs", "rank": 62, "score": 5.159312384282208 }, { "content": " aspect: (width as f32) / (height as f32),\n\n })\n\n }\n\n\n\n ///\n\n /// Trace rays from the camera\n\n pub fn make_rays(&self) -> ndarray::Array2<geometry::Ray> {\n\n let xbias = rand::random::<f32>() - (self.width as f32) / 2.0;\n\n let ybias = rand::random::<f32>() - (self.height as f32) / 2.0;\n\n let foobar = 1.0 / f32::min(self.width as f32, self.height as f32);\n\n\n\n ndarray::Array2::from_shape_fn([self.height, self.width], |(y, x)| {\n\n let xr = foobar * (x as f32 + xbias);\n\n let yr = -foobar * (y as f32 + ybias);\n\n let direction = nalgebra::Vector3::new(xr, self.focal, yr).normalize();\n\n geometry::Ray {\n\n origin: self.position,\n\n direction: direction,\n\n }\n\n })\n\n }\n\n}\n", "file_path": "src/tracer/camera.rs", "rank": 63, "score": 5.149921507238458 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Discard the next `num` elements of the sequence\n\n pub fn discard(&mut self, num: usize) {\n\n self.offset += num;\n\n }\n\n\n\n ///\n\n /// Generate the next element of the sequence\n\n pub fn next(&mut self) -> f64 {\n\n let mut sample = 0.0;\n\n let mut denominator = self.base as f64;\n\n let mut n = self.offset;\n\n while n > 0 {\n\n let multiplier: usize = n % self.base;\n\n sample += (multiplier as f64) / denominator;\n\n n = n / self.base;\n\n denominator *= self.base as f64;\n\n }\n\n self.offset += 1;\n\n\n\n sample\n\n }\n\n}\n", "file_path": "src/tracer/halton.rs", "rank": 64, "score": 5.144406417975644 }, { "content": "impl Scene {\n\n pub fn from_json(filename: &Path) -> Result<Self, std::io::Error> {\n\n // Load the scene description from the JSON file\n\n let file = File::open(filename)?;\n\n let json: SceneDef = ron::de::from_reader(file).unwrap();\n\n\n\n // Load each model\n\n let base_dir = filename.parent().unwrap();\n\n let mut materials = Vec::new();\n\n\n\n let mut meshes = Vec::new();\n\n for m in &json.meshes {\n\n let material_num = materials.len();\n\n materials.push(json.materials.get(&m.material).unwrap().clone());\n\n\n\n let mesh_path = base_dir.join(&m.mesh);\n\n let mesh = geometry::Mesh::load_ply(mesh_path.deref(), material_num as u32)?;\n\n meshes.push(mesh);\n\n }\n\n\n", "file_path": "src/tracer/scene.rs", "rank": 65, "score": 5.012676913905741 }, { "content": " Node::Leaf(bundle) => {\n\n let hit = bundle.intersect(ray);\n\n if hit.distance < nearest_hit.distance {\n\n nearest_hit = hit;\n\n }\n\n }\n\n Node::Branch(tree) => {\n\n let hit = tree.intersect(ray);\n\n if hit.distance < nearest_hit.distance {\n\n nearest_hit = hit;\n\n }\n\n }\n\n /*Node::Instance(transform, tree) => {\n\n let hit = tree.intersect(ray);\n\n if hit.distance < nearest_hit.distance {\n\n nearest_hit = hit;\n\n }\n\n }*/\n\n }\n\n }\n\n }\n\n\n\n // If a triangle was hit, compute the intersection parameters: coordinates, normal, material, etc.\n\n nearest_hit\n\n }\n\n}\n", "file_path": "src/geometry/bvh.rs", "rank": 66, "score": 3.734248567577467 }, { "content": "use std;\n\nuse std::f32;\n\nuse std::fs::File;\n\nuse std::path::Path;\n\n\n\nuse rand;\n\nuse ron;\n\n\n\nuse crate::geometry;\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public data types\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n///\n\n/// Camera definition\n\n#[derive(Clone, Serialize, Deserialize)]\n", "file_path": "src/tracer/camera.rs", "rank": 67, "score": 3.48224030507489 }, { "content": "\n\n#[derive(Serialize, Deserialize)]\n\npub struct SceneDef {\n\n pub materials: HashMap<String, Material>,\n\n pub meshes: Vec<InstanceDef>,\n\n}\n\n\n\n///\n\n/// World geometry\n\npub struct Scene {\n\n pub materials: Vec<Material>,\n\n //pub geometry: geometry::BVH,\n\n pub geometry: Vec<geometry::Mesh>,\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Public functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n\n// TODO: check keys\n", "file_path": "src/tracer/scene.rs", "rank": 68, "score": 2.967492774156902 }, { "content": "\n\nimpl Camera {\n\n ///\n\n /// Load the camera description from a JSON file\n\n pub fn from_json(\n\n filename: &Path,\n\n width: usize,\n\n height: usize,\n\n ) -> Result<Camera, std::io::Error> {\n\n // Load the camera description from the JSON file\n\n let file = File::open(filename)?;\n\n let json: CameraDef = ron::de::from_reader(file).unwrap();\n\n\n\n // Build the camera\n\n Ok(Camera {\n\n position: json.position,\n\n orientation: json.orientation,\n\n width: width,\n\n height: height,\n\n focal: json.focal,\n", "file_path": "src/tracer/camera.rs", "rank": 69, "score": 2.6320709791805657 }, { "content": " &self,\n\n point: &geometry::MeshIntersection,\n\n outgoing_ray: nalgebra::Vector3<f32>,\n\n incoming_color: Color,\n\n ) -> Color {\n\n match self {\n\n Material::Light { emission } => *emission,\n\n Material::Standard { emission, diffuse } => {\n\n emission + (point.normal.dot(&outgoing_ray) * diffuse.component_mul(&incoming_color))\n\n }\n\n }\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n// Private functions\n\n// --------------------------------------------------------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/tracer/material.rs", "rank": 70, "score": 2.4245343688727012 }, { "content": "#![allow(dead_code)]\n\n\n\n#[macro_use] extern crate serde;\n\n\n\nmod geometry;\n\nmod tracer;\n\n\n\n///\n\n/// Program options\n", "file_path": "src/main.rs", "rank": 71, "score": 2.358808592781971 }, { "content": " }\n\n}\n\n\n\n/*fn sample_hemisphere(point: nalgebra::Point3<f32>, normal: nalgebra::Vector3<f32>, incident: geometry::Ray) -> geometry::Ray {\n\n // Create the local frame\n\n let z_axis : nalgebra::Vector3<f32> = normal.normalize();\n\n let y_axis : nalgebra::Vector3<f32> = project_vector_onto_plane(incident.direction.normalize(), normal.normalize()).normalize();\n\n let x_axis : nalgebra::Vector3<f32> = y_axis.cross(&z_axis);\n\n let frame = nalgebra::Matrix3::from_rows(&[x_axis.transpose(), y_axis.transpose(), z_axis.transpose()]).try_inverse().unwrap();\n\n\n\n let u1 = rand::random::<f32>();\n\n let u2 = rand::random::<f32>();\n\n let r = u1.sqrt();\n\n let theta = 2.0 * 3.141592654 * u2;\n\n\n\n let x = r * theta.cos();\n\n let y = r * theta.sin();\n\n let z = (f32::max(0.0, 1.0 - u1)).sqrt();\n\n let d = frame * nalgebra::Vector3::new(x, y, z);\n\n\n\n geometry::Ray {\n\n origin: point + 1.0e-3 * d,\n\n direction: d\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/tracer/material.rs", "rank": 72, "score": 2.0571337378172427 }, { "content": " println!(\"Rendering sample {}/{}\", i + 1, options.num_samples);\n\n let sampling = tracer::sample(&scene, &mut camera, options.max_bounces);\n\n tracer::image2d::accum(&mut fb, &sampling);\n\n }\n\n tracer::image2d::scale(&mut fb, options.num_samples);\n\n let render_time = render_start.elapsed();\n\n\n\n // Write the resulting image\n\n let save_start = std::time::Instant::now();\n\n println!(\"Writing result\");\n\n tracer::image2d::save_png(&fb, &options.image_file).unwrap();\n\n //tracer::image2d::save_hdr(&fb, &options.image_file).unwrap();\n\n let save_time = save_start.elapsed();\n\n\n\n // Print the timing results\n\n println!(\"Timing results:\");\n\n println!(\"\\tLoading -> {}\", pretty_time(load_time));\n\n println!(\"\\tRendering -> {}\", pretty_time(render_time));\n\n println!(\"\\tSaving -> {}\", pretty_time(save_time));\n\n}\n", "file_path": "src/main.rs", "rank": 73, "score": 2.0044573147398914 }, { "content": " .takes_value(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"height\")\n\n .short(\"h\")\n\n .long(\"height\")\n\n .value_name(\"PIXELS\")\n\n .help(\"Image height\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"max-bounces\")\n\n .short(\"b\")\n\n .long(\"max-bounces\")\n\n .value_name(\"BOUNCES\")\n\n .help(\"Maximum number of bounces per ray\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"input\")\n", "file_path": "src/main.rs", "rank": 74, "score": 1.489480166720572 }, { "content": "# rspt\n\n\n\n## Introduction\n\n\n\nToy path-tracer written in Rust, written as a learning exercise. My motivation is two-fold:\n\n\n\n- I wanted to learn a new programming language. My favourite languages are C++ and Haskell, and Rust seems to take some of the best things of both.\n\n- Computer graphics has always been my favourite programming topic, and I had been wanting to write a path-tracer for the last two years.\n\n\n\n## Running the program\n\n\n\nTo run the renderer just run the _test.sh_ or _test.bat_ script. It will compile the program and render a sample scene.\n\n\n\n## Future plans\n\n\n\n- Write a proper documentation.\n\n- Create a proper material definition format.\n\n- Add support for texture mapping and skyboxes.\n\n- Add support for geometry instancing.\n\n- Use SIMD for intersection tests.\n\n- Improve the BVH building code, the current one is excedingly naive.\n\n- Improve the sampling strategy.\n", "file_path": "README.md", "rank": 75, "score": 1.0127335067790133 } ]
Rust
src/enemy.rs
mdenchev/BevyJam1
6be0aafad7a6fce6e4cf51a2dfb9c0c01b5616aa
use bevy::prelude::*; use heron::{prelude::*, rapier_plugin::PhysicsWorld}; use crate::{levels::map::MapInitData, player::PlayerStats, utils::CommonHandles, GameState}; pub struct EnemyPlugin; impl Plugin for EnemyPlugin { fn build(&self, app: &mut App) { app.add_system_set( SystemSet::on_update(crate::GameState::Playing) .with_system(enemy_follow_player) .with_system(despawn_enemy_on_collision), ); } } #[derive(Component)] pub struct EnemyStats { pub damage: f32, pub speed: f32, } pub fn enemy_follow_player( players: Query<(&Transform, &PlayerStats)>, mut enemies: Query<(&mut Velocity, &Transform, &EnemyStats)>, ) { for (mut vel, enemy_trans, enemy_stats) in enemies.iter_mut() { if let Some((closest_player_trans, _)) = players.iter().min_by_key(|(player_trans, _)| { let dist = enemy_trans .translation .distance_squared(player_trans.translation) * 1000.0; dist as i32 }) { let direction = (enemy_trans.translation - closest_player_trans.translation).normalize(); vel.linear = direction * enemy_stats.speed * -1.0; } } } pub fn spawn_enemy(commands: &mut Commands, common_handles: &CommonHandles, position: Vec2) { commands .spawn() .insert_bundle(SpriteSheetBundle { sprite: TextureAtlasSprite::new(40), texture_atlas: common_handles.player_sprites.clone(), transform: Transform::from_translation(position.extend(1.0)), ..Default::default() }) .insert(EnemyStats { damage: 50.0, speed: 30.0, }) .insert(RigidBody::Dynamic) .insert(RotationConstraints::lock()) .insert(CollisionShape::Sphere { radius: 10.0 }) .insert( CollisionLayers::none() .with_group(crate::GameLayers::Enemies) .with_masks(&[ crate::GameLayers::World, crate::GameLayers::Player, crate::GameLayers::Bullets, crate::GameLayers::Enemies, ]), ) .insert(Velocity::default()); } fn despawn_enemy_on_collision( mut commands: Commands, time: Res<Time>, mut map_init_data: ResMut<MapInitData>, mut game_state: ResMut<State<GameState>>, mut events: EventReader<CollisionEvent>, ) { map_init_data.timer += time.delta(); events.iter().filter(|e| e.is_started()).for_each(|ev| { let (e1, e2) = ev.rigid_body_entities(); let (l1, l2) = ev.collision_layers(); use crate::GameLayers::*; if l1.contains_group(Enemies) && l2.contains_group(Bullets) { commands.entity(e1).despawn(); commands.entity(e2).despawn(); map_init_data.kills += 1; } else if l1.contains_group(Bullets) && l2.contains_group(Enemies) { commands.entity(e1).despawn(); commands.entity(e2).despawn(); map_init_data.kills += 1; } if map_init_data.kills == 50 { let _ = game_state.overwrite_set(GameState::GameWon); } }); } fn _check_enemy_visibility( players: Query<&Transform, With<PlayerStats>>, mut enemies: Query<(Entity, &mut Visibility, &Transform, &EnemyStats)>, physics_world: PhysicsWorld, ) { for player_trans in players.iter() { let player_pos = player_trans.translation; for (_ent, mut visibility, enemy_trans, _) in enemies.iter_mut() { let enemy_pos = enemy_trans.translation; if enemy_pos.distance(player_pos) > 1000.0f32 { visibility.is_visible = false; continue; } use crate::GameLayers::*; let distance_to_wall = physics_world .ray_cast_with_filter( player_pos, enemy_pos - player_pos, false, CollisionLayers::none() .with_group(Player) .with_masks(&[World]), |_| true, ) .map(|r| r.collision_point.distance(player_pos)) .unwrap_or(f32::MAX); visibility.is_visible = distance_to_wall > enemy_pos.distance(player_pos); } } }
use bevy::prelude::*; use heron::{prelude::*, rapier_plugin::PhysicsWorld}; use crate::{levels::map::MapInitData, player::PlayerStats, utils::CommonHandles, GameState}; pub struct EnemyPlugin; impl Plugin for EnemyPlugin { fn build(&self, app: &mut App) { app.add_system_set( SystemSet::on_update(crate::GameState::Playing) .with_system(enemy_follow_player) .with_system(despawn_enemy_on_collision), ); } } #[derive(Component)] pub struct EnemyStats { pub damage: f32, pub speed: f32, } pub fn enemy_follow_player( players: Query<(&Transform, &PlayerStats)>, mut enemies: Query<(&mut Velocity, &Transform, &EnemyStats)>, ) { for (mut vel, enemy_trans, enemy_stats) in enemies.iter_mut() { if let Some((closest_player_trans, _)) = players.iter().min_by_key(|(player_trans, _)| { let dist = enemy_trans .translation .distance_squared(player_trans.translation) * 1000.0; dist as i32 }) { let direction = (enemy_trans.translation - closest_player_trans.translation).normalize(); vel.linear = direction * enemy_stats.speed * -1.0; } } }
fn despawn_enemy_on_collision( mut commands: Commands, time: Res<Time>, mut map_init_data: ResMut<MapInitData>, mut game_state: ResMut<State<GameState>>, mut events: EventReader<CollisionEvent>, ) { map_init_data.timer += time.delta(); events.iter().filter(|e| e.is_started()).for_each(|ev| { let (e1, e2) = ev.rigid_body_entities(); let (l1, l2) = ev.collision_layers(); use crate::GameLayers::*; if l1.contains_group(Enemies) && l2.contains_group(Bullets) { commands.entity(e1).despawn(); commands.entity(e2).despawn(); map_init_data.kills += 1; } else if l1.contains_group(Bullets) && l2.contains_group(Enemies) { commands.entity(e1).despawn(); commands.entity(e2).despawn(); map_init_data.kills += 1; } if map_init_data.kills == 50 { let _ = game_state.overwrite_set(GameState::GameWon); } }); } fn _check_enemy_visibility( players: Query<&Transform, With<PlayerStats>>, mut enemies: Query<(Entity, &mut Visibility, &Transform, &EnemyStats)>, physics_world: PhysicsWorld, ) { for player_trans in players.iter() { let player_pos = player_trans.translation; for (_ent, mut visibility, enemy_trans, _) in enemies.iter_mut() { let enemy_pos = enemy_trans.translation; if enemy_pos.distance(player_pos) > 1000.0f32 { visibility.is_visible = false; continue; } use crate::GameLayers::*; let distance_to_wall = physics_world .ray_cast_with_filter( player_pos, enemy_pos - player_pos, false, CollisionLayers::none() .with_group(Player) .with_masks(&[World]), |_| true, ) .map(|r| r.collision_point.distance(player_pos)) .unwrap_or(f32::MAX); visibility.is_visible = distance_to_wall > enemy_pos.distance(player_pos); } } }
pub fn spawn_enemy(commands: &mut Commands, common_handles: &CommonHandles, position: Vec2) { commands .spawn() .insert_bundle(SpriteSheetBundle { sprite: TextureAtlasSprite::new(40), texture_atlas: common_handles.player_sprites.clone(), transform: Transform::from_translation(position.extend(1.0)), ..Default::default() }) .insert(EnemyStats { damage: 50.0, speed: 30.0, }) .insert(RigidBody::Dynamic) .insert(RotationConstraints::lock()) .insert(CollisionShape::Sphere { radius: 10.0 }) .insert( CollisionLayers::none() .with_group(crate::GameLayers::Enemies) .with_masks(&[ crate::GameLayers::World, crate::GameLayers::Player, crate::GameLayers::Bullets, crate::GameLayers::Enemies, ]), ) .insert(Velocity::default()); }
function_block-full_function
[ { "content": "// Going to want this to find the spawn point eventually.\n\npub fn spawn_player(\n\n commands: &mut Commands,\n\n common_handles: &CommonHandles,\n\n pos: (f32, f32),\n\n asset_server: &AssetServer,\n\n is_clone: bool,\n\n clone_id: usize,\n\n) {\n\n if is_clone {\n\n info!(\"Spawning clone#{clone_id}\");\n\n } else {\n\n info!(\"Spawning player!\");\n\n }\n\n let starting_gun = commands\n\n .spawn_bundle(GunType::Shotgun.create_bundle(&*asset_server))\n\n .id();\n\n\n\n let mut starting_inventory = Inventory::default();\n\n starting_inventory.collect_item(Item::Gun(GunType::Shotgun));\n\n\n", "file_path": "src/player.rs", "rank": 1, "score": 109454.17439819392 }, { "content": "pub fn player_clone(\n\n mut keys: ResMut<Input<KeyCode>>,\n\n mut game_state: ResMut<State<GameState>>,\n\n mut player_recording: ResMut<PlayerRecording>,\n\n) {\n\n if keys.just_pressed(KeyCode::C) {\n\n info!(\"Cloning!\");\n\n player_recording.current_loop += 1;\n\n player_recording.current_tick = 0;\n\n player_recording.inputs.push(vec![]);\n\n let _ = game_state.overwrite_set(GameState::SetupLevel);\n\n keys.clear_just_pressed(KeyCode::C);\n\n }\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 2, "score": 107529.13612390174 }, { "content": "pub fn record_player(\n\n player_input: Res<PlayerInput>,\n\n mut player_recording: ResMut<PlayerRecording>,\n\n) {\n\n let loop_idx = player_recording.current_loop;\n\n // FIXME this can be done better elsewhere but eh\n\n if player_recording.inputs.len() <= loop_idx {\n\n player_recording.inputs.push(vec![]);\n\n }\n\n player_recording.inputs[loop_idx].push(player_input.clone());\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 3, "score": 107529.13612390174 }, { "content": "pub fn player_shooting(\n\n mut commands: Commands,\n\n audio: Res<bevy_kira_audio::Audio>,\n\n channels: Res<AudioChannels>,\n\n asset_server: Res<AssetServer>,\n\n mut input_ticks: EventReader<PlayerInputTick>,\n\n time: Res<Time>,\n\n players: Query<(Entity, &Transform, &Inventory), With<ControllablePlayer>>,\n\n mut guns: Query<\n\n (\n\n &Parent,\n\n &mut Transform,\n\n &mut Visibility,\n\n &mut GunTimer,\n\n &GunType,\n\n ),\n\n Without<ControllablePlayer>,\n\n >,\n\n) {\n\n for PlayerInputTick { input, entity } in input_ticks.iter() {\n", "file_path": "src/player/player_movement.rs", "rank": 4, "score": 107529.13612390174 }, { "content": "pub fn player_movement(\n\n player_input: Res<PlayerInput>,\n\n mut controllable_player: Query<\n\n (&mut Velocity, &PlayerStats),\n\n (With<ControlledPlayer>, With<RigidBody>),\n\n >,\n\n) {\n\n for (mut vel, stat) in controllable_player.iter_mut() {\n\n vel.linear = Vec3::from((player_input.move_direction, 0.0)) * stat.speed;\n\n }\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 5, "score": 107529.13612390174 }, { "content": "pub fn player_shooting_input(\n\n player_input: Res<PlayerInput>,\n\n mut input_ticks: EventWriter<PlayerInputTick>,\n\n players: Query<Entity, With<ControlledPlayer>>,\n\n) {\n\n if let Ok(entity) = players.get_single() {\n\n input_ticks.send(PlayerInputTick {\n\n entity,\n\n input: player_input.clone(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 6, "score": 104938.83870164353 }, { "content": "pub fn replay_recordings(\n\n mut player_recording: ResMut<PlayerRecording>,\n\n mut input_ticks: EventWriter<PlayerInputTick>,\n\n mut clones: Query<\n\n (Entity, &mut Velocity, &PlayerStats, &CloneId),\n\n (Without<ControlledPlayer>, With<RigidBody>),\n\n >,\n\n) {\n\n let current_loop = player_recording.current_loop;\n\n if current_loop == 0 {\n\n return;\n\n };\n\n let mut ticks_batch = vec![];\n\n let tick = player_recording.current_tick;\n\n for (id, recording) in player_recording.inputs[..current_loop].iter().enumerate() {\n\n for (entity, mut vel, stat, clone_id) in clones.iter_mut() {\n\n if clone_id.0 == id {\n\n if let Some(input) = recording.get(tick) {\n\n // Movement\n\n vel.linear = Vec3::from((input.move_direction, 0.0)) * stat.speed;\n", "file_path": "src/player/player_movement.rs", "rank": 7, "score": 103343.0072462179 }, { "content": "pub fn cleanup(mut commands: Commands, query: Query<Entity>) {\n\n for ent in query.iter() {\n\n commands.entity(ent).despawn();\n\n }\n\n}\n", "file_path": "src/menus/common.rs", "rank": 9, "score": 82930.54089124533 }, { "content": "pub fn level_spawns(\n\n mut commands: Commands,\n\n common_handles: Res<CommonHandles>,\n\n mut game_state: ResMut<State<GameState>>,\n\n mut map_init_data: ResMut<MapInitData>,\n\n recordings: Res<PlayerRecording>,\n\n asset_server: Res<AssetServer>,\n\n char_query: Query<Entity, (With<Velocity>, With<RigidBody>)>,\n\n) {\n\n info!(\"Setting up level ents\");\n\n // Reset kills\n\n map_init_data.kills = 0;\n\n\n\n // Clear existing enemies\n\n for ent in char_query.iter() {\n\n commands.entity(ent).despawn_recursive();\n\n }\n\n\n\n // Spawn player\n\n crate::player::spawn_player(\n", "file_path": "src/levels/mod.rs", "rank": 10, "score": 75517.77932968136 }, { "content": "pub fn game_won(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n map_init_data: ResMut<MapInitData>,\n\n query: Query<Entity>,\n\n) {\n\n info!(\"Game Won!\");\n\n for ent in query.iter() {\n\n commands.entity(ent).despawn_recursive();\n\n }\n\n\n\n commands.spawn_bundle(UiCameraBundle::default());\n\n let text_style = Style {\n\n align_self: AlignSelf::Center,\n\n position_type: PositionType::Relative,\n\n position: Rect::default(),\n\n ..Default::default()\n\n };\n\n\n\n let text_textstyle = TextStyle {\n", "file_path": "src/levels/mod.rs", "rank": 11, "score": 75517.77932968136 }, { "content": "pub fn load_common_handles(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n mut texture_atlases: ResMut<Assets<TextureAtlas>>,\n\n) {\n\n info!(\"Loading common handles!\");\n\n let player_sprites_tex = asset_server.load(\"images/images.png\");\n\n let player_sprites_atlas =\n\n TextureAtlas::from_grid(player_sprites_tex, Vec2::new(32.0, 32.0), 8, 8);\n\n let player_sprites = texture_atlases.add(player_sprites_atlas);\n\n commands.insert_resource(CommonHandles { player_sprites });\n\n info!(\"Common handles loaded!\");\n\n}\n", "file_path": "src/utils.rs", "rank": 12, "score": 75517.77932968136 }, { "content": "pub fn setup(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n audio: Res<Audio>,\n\n channels: Res<AudioChannels>,\n\n) {\n\n info!(\"[Scene:MainMenu:setup]\");\n\n commands.spawn_bundle(UiCameraBundle::default());\n\n\n\n // Play bg music\n\n audio.play_looped_in_channel(asset_server.load(\"music/OutThere_0.ogg\"), &channels.music);\n\n\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Percent(100.0), Val::Percent(100.0)),\n\n justify_content: JustifyContent::Center,\n\n flex_direction: FlexDirection::ColumnReverse,\n\n ..Default::default()\n\n },\n", "file_path": "src/menus/main_menu.rs", "rank": 13, "score": 75517.77932968136 }, { "content": "pub fn update_kills_text(\n\n map_init_data: Res<MapInitData>,\n\n mut query: Query<&mut Text, With<KilledText>>,\n\n) {\n\n let mut text = query.single_mut();\n\n text.sections[0].value = format!(\"Kills: {}\", map_init_data.kills);\n\n}\n\n\n", "file_path": "src/levels/mod.rs", "rank": 14, "score": 73528.96225261991 }, { "content": "// Taken from https://github.com/StarArawn/bevy_ecs_tilemap/blob/main/examples/helpers/texture.rs\n\npub fn set_texture_filters_to_nearest(\n\n mut texture_events: EventReader<AssetEvent<Image>>,\n\n mut textures: ResMut<Assets<Image>>,\n\n) {\n\n // quick and dirty, run this for all textures anytime a texture is created.\n\n for event in texture_events.iter() {\n\n match event {\n\n AssetEvent::Created { handle } => {\n\n if let Some(mut texture) = textures.get_mut(handle) {\n\n texture.texture_descriptor.usage = TextureUsages::TEXTURE_BINDING\n\n | TextureUsages::COPY_SRC\n\n | TextureUsages::COPY_DST;\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 15, "score": 73528.96225261991 }, { "content": "pub fn handle_buttons(\n\n mut game_state: ResMut<State<GameState>>,\n\n mut interaction_query: Query<\n\n (&Interaction, &mut UiColor, &ButtonId),\n\n (Changed<Interaction>, With<Button>, Without<Disabled>),\n\n >,\n\n mut app_exit_events: EventWriter<AppExit>,\n\n) -> anyhow::Result<()> {\n\n for (interaction, mut color, button_id) in interaction_query.iter_mut() {\n\n match *interaction {\n\n Interaction::Clicked => {\n\n *color = PRESSED_COLOR;\n\n match button_id {\n\n ButtonId::SinglePlayer => {\n\n game_state.overwrite_set(GameState::BuildLevel)?;\n\n }\n\n ButtonId::Quit => {\n\n app_exit_events.send(AppExit);\n\n }\n\n _ => {}\n", "file_path": "src/menus/main_menu.rs", "rank": 16, "score": 73528.96225261991 }, { "content": "fn cam_follow_player(\n\n mut queries: QuerySet<(\n\n QueryState<&mut Transform, With<MainCamera>>,\n\n QueryState<&Transform, (With<ControlledPlayer>, With<RigidBody>)>,\n\n )>,\n\n) {\n\n let mut player_position = if let Ok(player) = queries.q1().get_single() {\n\n player.translation\n\n } else {\n\n return;\n\n };\n\n\n\n if let Ok(mut camera) = queries.q0().get_single_mut() {\n\n player_position.z = camera.translation.z;\n\n camera.translation = player_position;\n\n }\n\n}\n", "file_path": "src/player.rs", "rank": 19, "score": 70849.8868078044 }, { "content": "pub fn text_style() -> Style {\n\n Style {\n\n align_self: AlignSelf::Center,\n\n position_type: PositionType::Relative,\n\n position: Rect::default(),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/menus/common.rs", "rank": 20, "score": 69957.89535887877 }, { "content": "pub fn button_style() -> Style {\n\n Style {\n\n size: Size::new(Val::Px(150.0), Val::Px(35.0)),\n\n align_items: AlignItems::Center,\n\n align_self: AlignSelf::Center,\n\n justify_content: JustifyContent::Center,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/menus/common.rs", "rank": 21, "score": 69957.89535887877 }, { "content": "fn despawn_on_collision(mut commands: Commands, mut events: EventReader<CollisionEvent>) {\n\n events.iter().filter(|e| e.is_started()).for_each(|ev| {\n\n let (e1, e2) = ev.rigid_body_entities();\n\n let (l1, l2) = ev.collision_layers();\n\n use crate::GameLayers::*;\n\n if l1.contains_group(World) && l2.contains_group(Bullets) {\n\n commands.entity(e2).despawn();\n\n } else if l1.contains_group(Bullets) && l2.contains_group(World) {\n\n commands.entity(e1).despawn();\n\n }\n\n });\n\n}\n\n\n\n#[derive(Bundle, Default)]\n\npub struct GunBundle {\n\n gun_type: GunType,\n\n #[bundle]\n\n sprite: SpriteBundle,\n\n gun_timer: GunTimer,\n\n}\n", "file_path": "src/gun.rs", "rank": 22, "score": 66817.88816614826 }, { "content": "pub fn button_text_alignment() -> TextAlignment {\n\n TextAlignment {\n\n horizontal: HorizontalAlign::Center,\n\n vertical: VerticalAlign::Center,\n\n }\n\n}\n\n\n", "file_path": "src/menus/common.rs", "rank": 23, "score": 66468.7786631553 }, { "content": "fn get_player_inputs(\n\n keys: Res<Input<KeyCode>>,\n\n mouse: Res<Input<MouseButton>>,\n\n windows: Res<Windows>,\n\n mut player_input: ResMut<PlayerInput>,\n\n) {\n\n // Create our move vector from keyboard inputs\n\n let mut move_direction = Vec2::ZERO;\n\n move_direction.x -= if keys.pressed(KeyCode::A) || keys.pressed(KeyCode::Left) {\n\n 1.0\n\n } else {\n\n 0.0\n\n };\n\n move_direction.x += if keys.pressed(KeyCode::D) || keys.pressed(KeyCode::Right) {\n\n 1.0\n\n } else {\n\n 0.0\n\n };\n\n move_direction.y += if keys.pressed(KeyCode::W) || keys.pressed(KeyCode::Up) {\n\n 1.0\n", "file_path": "src/inputs.rs", "rank": 24, "score": 63000.977617319135 }, { "content": "/// Generic error handler system that can be chained into\n\n/// to allow using ? for error checking and logging\n\npub fn log_error(In(result): In<anyhow::Result<()>>) {\n\n if let Err(e) = result {\n\n error!(\"{:?}\", e);\n\n }\n\n}\n\n\n\n/// Where we can store commonly used handles\n\n/// instead of always using asset server\n\npub struct CommonHandles {\n\n pub player_sprites: Handle<TextureAtlas>,\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 25, "score": 59499.99247884544 }, { "content": "pub fn text_textstyle(asset_server: &AssetServer) -> TextStyle {\n\n TextStyle {\n\n font: asset_server.load(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 30.0,\n\n color: Color::WHITE,\n\n }\n\n}\n\n\n", "file_path": "src/menus/common.rs", "rank": 26, "score": 57057.61848414669 }, { "content": "fn spawn_pickup(mut commands: Commands, asset_server: Res<AssetServer>) {\n\n commands\n\n .spawn_bundle(Item::Grenade.bundle(Transform::from_xyz(300., 300., 1.1), &asset_server));\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 27, "score": 55159.403706859324 }, { "content": "fn spawn_inventory_ui(mut commands: Commands, asset_server: Res<AssetServer>) {\n\n info!(\"Spawning inventory UI\");\n\n\n\n commands\n\n .spawn_bundle(UiCameraBundle::default())\n\n .insert(UiCamera);\n\n\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Percent(100.), Val::Percent(100.)),\n\n justify_content: JustifyContent::SpaceBetween,\n\n ..Style::default()\n\n },\n\n color: Color::NONE.into(),\n\n ..NodeBundle::default()\n\n })\n\n .insert(InventoryUi)\n\n .with_children(|parent| {\n\n parent\n", "file_path": "src/item.rs", "rank": 28, "score": 53914.66881195077 }, { "content": "#[derive(Component)]\n\nstruct InventoryUi;\n\n\n", "file_path": "src/item.rs", "rank": 29, "score": 47417.37671230578 }, { "content": "#[derive(Component)]\n\nstruct UiCamera;\n\n\n", "file_path": "src/item.rs", "rank": 30, "score": 47417.37671230578 }, { "content": "#[derive(Bundle)]\n\nstruct PickupBundle {\n\n #[bundle]\n\n sprite_bundle: SpriteBundle,\n\n rb: RigidBody,\n\n coll_shape: CollisionShape,\n\n coll_layers: CollisionLayers,\n\n item: Item,\n\n}\n\n\n\n#[derive(Component, Default)]\n\npub struct IgnoreColliders(Vec<Entity>);\n\n\n\nimpl Deref for IgnoreColliders {\n\n type Target = Vec<Entity>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 31, "score": 47417.37671230578 }, { "content": "#[derive(Component)]\n\nstruct InventoryUiImage;\n\n\n", "file_path": "src/item.rs", "rank": 32, "score": 46152.278146750075 }, { "content": "fn main() {\n\n if cfg!(target_os = \"linux\") {\n\n println!(\"cargo:rustc-link-lib=vulkan\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 33, "score": 43051.763869410825 }, { "content": "fn main() {\n\n App::new()\n\n // Configure the game window\n\n .insert_resource(WindowDescriptor {\n\n width: 1600.0,\n\n height: 900.0,\n\n vsync: true,\n\n mode: WindowMode::Windowed,\n\n title: \"Bevy Cursed Tomb\".to_string(),\n\n ..Default::default()\n\n })\n\n .insert_resource(ClearColor(Color::rgb(0.11, 0.039, 0.004)))\n\n .init_resource::<AudioChannels>()\n\n // Standard Bevy functionality\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(utils::UtilsPlugin)\n\n .add_plugin(PhysicsPlugin::default())\n\n .add_plugin(inputs::GameInputPlugin)\n\n .add_plugin(LogDiagnosticsPlugin::default())\n\n .add_plugin(FrameTimeDiagnosticsPlugin::default())\n", "file_path": "src/main.rs", "rank": 34, "score": 41590.441919790006 }, { "content": "fn collide_pickups(\n\n mut commands: Commands,\n\n mut events: EventReader<CollisionEvent>,\n\n pickups: Query<&Item>,\n\n mut players: Query<\n\n (&mut Inventory, &mut IgnoreColliders),\n\n (With<ControlledPlayer>, Without<Item>),\n\n >,\n\n) {\n\n for ev in events.iter() {\n\n let (e1, e2) = ev.rigid_body_entities();\n\n let (layer_1, layer_2) = ev.collision_layers();\n\n\n\n let player;\n\n let pickup;\n\n if layer_1.contains_group(GameLayers::Pickups) {\n\n player = e2;\n\n pickup = e1;\n\n } else if layer_2.contains_group(GameLayers::Pickups) {\n\n player = e1;\n", "file_path": "src/item.rs", "rank": 35, "score": 40279.564138127025 }, { "content": "fn drop_pickup(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n input: Res<PlayerInput>,\n\n mut curr_players: Query<\n\n (&mut Inventory, &mut IgnoreColliders, &Transform),\n\n With<ControlledPlayer>,\n\n >,\n\n) {\n\n if input.throw.was_pressed() {\n\n if let Ok((mut inventory, mut ignore_colls, tf)) = curr_players.get_single_mut() {\n\n if let Some(item) = inventory.drop_item() {\n\n ignore_colls.push(commands.spawn_bundle(item.bundle(*tf, &asset_server)).id());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 36, "score": 40279.564138127025 }, { "content": "fn build_level(\n\n mut commands: Commands,\n\n common_handles: Res<CommonHandles>,\n\n mut game_state: ResMut<State<GameState>>,\n\n mut map_init_data: ResMut<MapInitData>,\n\n asset_server: Res<AssetServer>,\n\n atlases: Res<Assets<TextureAtlas>>,\n\n mut map_query: MapQuery,\n\n) {\n\n info!(\"[Scene:SingleplayerLevel:setup]\");\n\n commands\n\n .spawn_bundle(OrthographicCameraBundle::new_2d())\n\n .insert(MainCamera);\n\n let texture_handle = atlases\n\n .get(common_handles.player_sprites.clone())\n\n .unwrap()\n\n .texture\n\n .clone();\n\n\n\n let map_entity = commands.spawn().id();\n", "file_path": "src/levels/mod.rs", "rank": 37, "score": 39097.03537402827 }, { "content": "fn update_inventory_ui(\n\n asset_server: Res<AssetServer>,\n\n curr_players: Query<&Inventory, (With<ControlledPlayer>, Changed<Inventory>)>,\n\n mut ui_images: Query<&mut UiImage, With<InventoryUiImage>>,\n\n) {\n\n if let (Ok(inventory), Ok(mut image)) = (curr_players.get_single(), ui_images.get_single_mut())\n\n {\n\n info!(\"Updating UI for: {inventory:?}\");\n\n\n\n *image = asset_server\n\n .load(\n\n inventory\n\n .get_item()\n\n .map_or(\"images/empty.png\", |item| item.image_path()),\n\n )\n\n .into();\n\n }\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 38, "score": 39097.03537402827 }, { "content": "fn enable_bullet_ccd(\n\n mut rigid_bodies: ResMut<RigidBodySet>,\n\n new_handles: Query<&RigidBodyHandle, (With<BulletStats>, Added<RigidBodyHandle>)>,\n\n) {\n\n for handle in new_handles.iter() {\n\n if let Some(body) = rigid_bodies.get_mut(handle.into_rapier()) {\n\n body.enable_ccd(true);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gun.rs", "rank": 39, "score": 39097.03537402827 }, { "content": "fn despawn_inventory_ui(\n\n mut commands: Commands,\n\n uis: Query<Entity, Or<(With<InventoryUi>, With<UiCamera>)>>,\n\n) {\n\n info!(\"Despawning inventory UI\");\n\n\n\n uis.iter().for_each(|ui| {\n\n commands.entity(ui).despawn_recursive();\n\n });\n\n}\n", "file_path": "src/item.rs", "rank": 40, "score": 39097.03537402827 }, { "content": "fn main() {\n\n // When run locally, results may differ from actual CI runs triggered by\n\n // .github/workflows/ci.yml\n\n\n\n // See if any code needs to be formatted\n\n cmd!(\"cargo fmt --all -- --check\")\n\n .run()\n\n .expect(\"Please run 'cargo fmt --all' to format your code.\");\n\n\n\n // Run tests\n\n cmd!(\"cargo test\")\n\n .run()\n\n .expect(\"Please fix failing tests in output above.\");\n\n\n\n // See if clippy has any complaints.\n\n // - Type complexity must be ignored because we use huge templates for queries\n\n // cmd!(\"cargo clippy --workspace -- -A clippy::type_complexity\")\n\n // .run()\n\n // .expect(\"Please fix clippy errors in output above.\");\n\n\n\n cmd!(\"cargo check --workspace\")\n\n .run()\n\n .expect(\"Please fix cargo check errors in output above.\");\n\n}\n", "file_path": "tools/ci/src/main.rs", "rank": 41, "score": 39097.03537402827 }, { "content": "fn zoom_update(\n\n mut scroll: EventReader<MouseWheel>,\n\n mut query: Query<&mut OrthographicProjection, With<MainCamera>>,\n\n) {\n\n for mut projection in query.iter_mut() {\n\n for ev in scroll.iter() {\n\n projection.scale = (projection.scale - ev.y / 20.0).max(0.01);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/levels/mod.rs", "rank": 42, "score": 39097.03537402827 }, { "content": " let entity = *entity;\n\n let Ok((player_ent, &player_transform, inventory)) = players.get(entity) else {return};\n\n for (parent, mut gun_transform, mut visibility, mut gun_timer, gun_type) in guns.iter_mut()\n\n {\n\n if let Some(Item::Gun(_)) = inventory.get_item() {\n\n visibility.is_visible = true;\n\n\n\n if parent.0 == player_ent {\n\n gun_timer.tick(time.delta());\n\n // Shoot\n\n if input.shoot.is_down() && gun_timer.finished() {\n\n info!(\"Player {player_ent:?} shoots {gun_type:?}\");\n\n gun_type.play_sfx(&*audio, &channels.audio, &*asset_server);\n\n commands\n\n .spawn_bundle(gun_type.create_bullet_bundle(\n\n &*asset_server,\n\n player_transform.translation + gun_transform.translation,\n\n input.aim_direction,\n\n ))\n\n .insert(\n", "file_path": "src/player/player_movement.rs", "rank": 43, "score": 32831.57070800912 }, { "content": "use bevy::prelude::*;\n\nuse heron::{CollisionLayers, RigidBody, Velocity};\n\n\n\nuse crate::{\n\n gun::{GunTimer, GunType},\n\n inputs::PlayerInput,\n\n item::{Inventory, Item},\n\n resources::audio_channels::AudioChannels,\n\n GameState,\n\n};\n\n\n\nuse super::{ControlledPlayer, PlayerRecording, PlayerStats};\n\n\n\n#[derive(Component, Default)]\n\npub struct ControllablePlayer;\n\n\n\n#[derive(Component, Default)]\n\npub struct CloneId(pub usize);\n\n\n\npub struct PlayerInputTick {\n\n pub entity: Entity,\n\n pub input: PlayerInput,\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 44, "score": 32830.510316057276 }, { "content": " // Shooting\n\n ticks_batch.push(PlayerInputTick {\n\n entity,\n\n input: input.clone(),\n\n })\n\n } else if let Some(input) = recording.last() {\n\n // Movement\n\n vel.linear = Vec3::from((input.move_direction, 0.0)) * stat.speed;\n\n // Shooting\n\n ticks_batch.push(PlayerInputTick {\n\n entity,\n\n input: input.clone(),\n\n })\n\n }\n\n }\n\n }\n\n }\n\n input_ticks.send_batch(ticks_batch.into_iter());\n\n player_recording.current_tick += 1;\n\n}\n\n\n", "file_path": "src/player/player_movement.rs", "rank": 45, "score": 32828.150957098806 }, { "content": " CollisionLayers::none()\n\n .with_group(crate::GameLayers::Bullets)\n\n .with_masks(&[\n\n crate::GameLayers::World,\n\n crate::GameLayers::Enemies,\n\n ]),\n\n );\n\n gun_timer.set_duration(gun_type.cooldown());\n\n gun_timer.reset();\n\n }\n\n // Orient gun\n\n gun_transform.rotation = Quat::from_axis_angle(\n\n Vec3::Z,\n\n input.aim_direction.y.atan2(input.aim_direction.x),\n\n );\n\n }\n\n } else {\n\n visibility.is_visible = false;\n\n\n\n gun_timer.set_duration(gun_type.cooldown());\n\n gun_timer.reset();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/player/player_movement.rs", "rank": 46, "score": 32825.4518284362 }, { "content": "};\n\n\n\n#[derive(Default)]\n\npub struct PlayerRecording {\n\n pub current_loop: usize,\n\n pub current_tick: usize,\n\n pub inputs: Vec<Vec<PlayerInput>>,\n\n}\n\n\n\npub struct PlayerPlugin;\n\n\n\nimpl Plugin for PlayerPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.init_resource::<PlayerRecording>()\n\n .add_event::<PlayerInputTick>()\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing)\n\n .with_system(cam_follow_player)\n\n .with_system(record_player)\n\n .with_system(player_clone)\n", "file_path": "src/player.rs", "rank": 49, "score": 28033.706148569734 }, { "content": "pub struct PlayerStats {\n\n pub speed: f32,\n\n}\n\n\n\nimpl Default for PlayerStats {\n\n fn default() -> Self {\n\n Self { speed: 200.0 }\n\n }\n\n}\n\n\n\n// Should mark the player currently under control but not ghosts\n\n#[derive(Component)]\n\npub struct ControlledPlayer;\n\n\n", "file_path": "src/player.rs", "rank": 50, "score": 28032.83497640033 }, { "content": "use bevy::prelude::*;\n\n\n\nmod player_movement;\n\n\n\nuse heron::{CollisionLayers, CollisionShape, RigidBody, RotationConstraints, Velocity};\n\nuse player_movement::player_movement;\n\n\n\nuse crate::{\n\n gun::GunType,\n\n inputs::PlayerInput,\n\n item::{IgnoreColliders, Inventory, Item},\n\n levels::MainCamera,\n\n player::player_movement::CloneId,\n\n utils::CommonHandles,\n\n GameState,\n\n};\n\n\n\nuse self::player_movement::{\n\n player_clone, player_shooting, player_shooting_input, record_player, replay_recordings,\n\n ControllablePlayer, PlayerInputTick,\n", "file_path": "src/player.rs", "rank": 51, "score": 28026.887670322543 }, { "content": " let player_ent = commands\n\n .spawn_bundle(ControllablePlayerBundle::default())\n\n .insert_bundle(SpriteSheetBundle {\n\n sprite: TextureAtlasSprite::new(32),\n\n texture_atlas: common_handles.player_sprites.clone(),\n\n transform: Transform::from_xyz(pos.0, pos.1, 1.0),\n\n ..Default::default()\n\n })\n\n .insert(starting_inventory)\n\n .insert(IgnoreColliders::default())\n\n .insert(RigidBody::Dynamic)\n\n .insert(RotationConstraints::lock())\n\n .insert(CollisionShape::Sphere { radius: 10.0 })\n\n .insert(Velocity::default())\n\n .insert(\n\n CollisionLayers::none()\n\n .with_group(crate::GameLayers::Player)\n\n .with_masks(&[\n\n crate::GameLayers::World,\n\n crate::GameLayers::Enemies,\n", "file_path": "src/player.rs", "rank": 52, "score": 28026.462764741515 }, { "content": " crate::GameLayers::Pickups,\n\n ]),\n\n )\n\n .add_child(starting_gun)\n\n .id();\n\n if is_clone {\n\n commands.entity(player_ent).insert(CloneId(clone_id));\n\n } else {\n\n commands.entity(player_ent).insert(ControlledPlayer);\n\n }\n\n}\n\n\n\n#[derive(Bundle, Default)]\n\npub struct ControllablePlayerBundle {\n\n controllable: ControllablePlayer,\n\n stats: PlayerStats,\n\n inventory: Inventory,\n\n}\n\n\n\n#[derive(Component)]\n", "file_path": "src/player.rs", "rank": 53, "score": 28023.94393451839 }, { "content": " .with_system(player_movement)\n\n .with_system(replay_recordings)\n\n .with_system(player_shooting_input)\n\n .with_system(player_shooting), // This apparently removes the GameState condition\n\n //.with_run_criteria(\n\n // FixedTimestep::steps_per_second(60.0),\n\n //)\n\n );\n\n }\n\n}\n\n\n\n// Going to want this to find the spawn point eventually.\n", "file_path": "src/player.rs", "rank": 54, "score": 28021.95572302403 }, { "content": "use bevy::{core::FixedTimestep, prelude::*};\n\n\n\npub struct GameInputPlugin;\n\n\n\nimpl Plugin for GameInputPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.insert_resource(PlayerInput::default())\n\n .add_system_to_stage(\n\n CoreStage::PreUpdate,\n\n get_player_inputs.with_run_criteria(\n\n FixedTimestep::steps_per_second(60.0).with_label(\"update_inputs\"),\n\n ),\n\n );\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct PlayerInput {\n\n pub move_direction: Vec2,\n\n pub aim_direction: Vec2,\n", "file_path": "src/inputs.rs", "rank": 55, "score": 17.35044541751711 }, { "content": "use std::{\n\n ops::{Deref, DerefMut},\n\n time::Duration,\n\n};\n\n\n\nuse bevy::prelude::*;\n\nuse heron::{\n\n rapier_plugin::{convert::IntoRapier, rapier2d::prelude::RigidBodySet, RigidBodyHandle},\n\n CollisionEvent, CollisionShape, RigidBody, RotationConstraints, Velocity,\n\n};\n\n\n\npub struct GunPlugin;\n\n\n\nimpl Plugin for GunPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.add_system(enable_bullet_ccd)\n\n .add_system(despawn_on_collision);\n\n }\n\n}\n\n\n", "file_path": "src/gun.rs", "rank": 56, "score": 15.363860927711507 }, { "content": "use std::{\n\n mem::replace,\n\n ops::{Deref, DerefMut},\n\n};\n\n\n\nuse bevy::prelude::*;\n\nuse heron::{CollisionEvent, CollisionLayers, CollisionShape, RigidBody};\n\n\n\nuse crate::{gun::GunType, inputs::PlayerInput, player::ControlledPlayer, GameLayers, GameState};\n\n\n\npub struct ItemPlugin;\n\n\n\nimpl Plugin for ItemPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.add_system_set(\n\n SystemSet::on_enter(GameState::Playing)\n\n .with_system(spawn_inventory_ui)\n\n .with_system(spawn_pickup),\n\n )\n\n .add_system_set(\n", "file_path": "src/item.rs", "rank": 57, "score": 15.218049785231077 }, { "content": "pub mod map;\n\n\n\nuse bevy::{input::mouse::MouseWheel, prelude::*};\n\nuse bevy_ecs_tilemap::prelude::*;\n\nuse heron::{CollisionLayers, CollisionShape, RigidBody, Velocity};\n\n\n\nuse crate::{player::PlayerRecording, utils::CommonHandles, GameState};\n\n\n\nuse self::map::MapInitData;\n\n\n\npub struct SinglePlayerScene;\n\n\n\nimpl Plugin for SinglePlayerScene {\n\n fn build(&self, app: &mut App) {\n\n app.add_plugin(TilemapPlugin)\n\n .init_resource::<MapInitData>()\n\n .add_system(crate::utils::set_texture_filters_to_nearest)\n\n .add_system_set(SystemSet::on_enter(GameState::BuildLevel).with_system(build_level))\n\n .add_system_set(SystemSet::on_enter(GameState::SetupLevel).with_system(level_spawns))\n\n .add_system_set(\n", "file_path": "src/levels/mod.rs", "rank": 58, "score": 14.570033184165139 }, { "content": "use std::time::Duration;\n\n\n\n#[derive(Debug, Default)]\n\npub struct MapInitData {\n\n pub player_spawn_position: (f32, f32),\n\n pub enemy_spawn_positions: Vec<(f32, f32)>,\n\n // Fixme move somewhere more sensible\n\n pub kills: usize,\n\n pub timer: Duration,\n\n}\n", "file_path": "src/levels/map.rs", "rank": 59, "score": 12.350127233124837 }, { "content": "use bevy::{prelude::*, render::render_resource::TextureUsages};\n\n\n\npub struct UtilsPlugin;\n\n\n\nimpl Plugin for UtilsPlugin {\n\n fn build(&self, app: &mut App) {\n\n app.add_system_to_stage(CoreStage::PreUpdate, set_texture_filters_to_nearest)\n\n .add_startup_system(load_common_handles);\n\n }\n\n}\n\n\n\n// Taken from https://github.com/StarArawn/bevy_ecs_tilemap/blob/main/examples/helpers/texture.rs\n", "file_path": "src/utils.rs", "rank": 60, "score": 11.946585472736501 }, { "content": "#![feature(try_blocks)]\n\n#![feature(let_else)]\n\n#![allow(clippy::too_many_arguments)]\n\n\n\nuse bevy::diagnostic::{FrameTimeDiagnosticsPlugin, LogDiagnosticsPlugin};\n\nuse bevy::prelude::*;\n\nuse bevy::window::WindowMode;\n\nuse bevy_egui::EguiPlugin;\n\n\n\nuse bevy_kira_audio::AudioPlugin;\n\nuse heron::prelude::*;\n\nuse resources::audio_channels::AudioChannels;\n\n\n\nmod enemy;\n\npub mod gun;\n\nmod inputs;\n\nmod item;\n\nmod levels;\n\nmod menus;\n\nmod player;\n\npub mod resources;\n\nmod utils;\n\n\n", "file_path": "src/main.rs", "rank": 61, "score": 10.958490627665778 }, { "content": "use bevy::prelude::*;\n\n\n\nuse crate::{utils::log_error, GameState};\n\n\n\npub mod common;\n\npub mod main_menu;\n\n\n\npub struct MainMenuScene;\n\n\n\nimpl Plugin for MainMenuScene {\n\n fn build(&self, app: &mut App) {\n\n app.add_system_set(SystemSet::on_enter(GameState::MainMenu).with_system(main_menu::setup))\n\n .add_system_set(\n\n SystemSet::on_update(GameState::MainMenu)\n\n .with_system(main_menu::handle_buttons.chain(log_error)),\n\n )\n\n .add_system_set(SystemSet::on_exit(GameState::MainMenu).with_system(common::cleanup));\n\n }\n\n}\n", "file_path": "src/menus/mod.rs", "rank": 62, "score": 10.710361318153698 }, { "content": " ) -> BulletBundle {\n\n let aim_direction = aim_direction.extend(0.0);\n\n let transform = Transform {\n\n translation: origin + aim_direction * 12.0 + Vec3::Z * 1.2,\n\n rotation: Quat::from_axis_angle(Vec3::Z, aim_direction.y.atan2(aim_direction.x)),\n\n ..Default::default()\n\n };\n\n match self {\n\n GunType::Shotgun => BulletBundle {\n\n bullet_stats: BulletStats { _damage: 3.0 },\n\n sprite: SpriteBundle {\n\n texture: asset_server.load(\"images/shotgun_bullet.png\"),\n\n transform,\n\n ..Default::default()\n\n },\n\n rb: RigidBody::Dynamic,\n\n constraints: RotationConstraints::lock(),\n\n collision_shape: CollisionShape::Cuboid {\n\n half_extends: Vec3::new(8f32, 1f32, 0f32),\n\n border_radius: None,\n", "file_path": "src/gun.rs", "rank": 63, "score": 10.69520009115934 }, { "content": "\n\nimpl Default for GunType {\n\n fn default() -> Self {\n\n Self::Shotgun\n\n }\n\n}\n\n\n\nimpl GunType {\n\n pub fn cooldown(&self) -> Duration {\n\n match self {\n\n GunType::Shotgun => Duration::from_millis(700),\n\n }\n\n }\n\n\n\n pub fn velocity(&self) -> f32 {\n\n match self {\n\n GunType::Shotgun => 1000.0,\n\n }\n\n }\n\n\n", "file_path": "src/gun.rs", "rank": 64, "score": 9.868108134016474 }, { "content": " .add_plugin(EguiPlugin)\n\n .add_plugin(AudioPlugin)\n\n //.add_plugin(WorldInspectorPlugin::new())\n\n .add_plugin(player::PlayerPlugin)\n\n .add_plugin(menus::MainMenuScene)\n\n .add_plugin(levels::SinglePlayerScene)\n\n .add_plugin(item::ItemPlugin)\n\n .add_plugin(gun::GunPlugin)\n\n .add_plugin(enemy::EnemyPlugin)\n\n .add_state(GameState::MainMenu)\n\n .run();\n\n}\n\n\n\n#[derive(Clone, Eq, PartialEq, Debug, Hash)]\n\npub enum GameState {\n\n MainMenu,\n\n BuildLevel,\n\n SetupLevel,\n\n Playing,\n\n GameWon,\n", "file_path": "src/main.rs", "rank": 65, "score": 9.571280734120803 }, { "content": "use bevy::{\n\n app::AppExit,\n\n math::Size,\n\n prelude::{\n\n info, AssetServer, BuildChildren, Button, ButtonBundle, Changed, Color, Commands,\n\n Component, EventWriter, NodeBundle, Query, Res, ResMut, State, TextBundle, UiCameraBundle,\n\n With, Without,\n\n },\n\n text::Text,\n\n ui::{FlexDirection, Interaction, JustifyContent, Style, UiColor, Val},\n\n};\n\nuse bevy_kira_audio::Audio;\n\n\n\nuse crate::{menus::common, resources::audio_channels::AudioChannels, GameState};\n\n\n\nuse super::common::{Disabled, HOVERED_COLOR, NORMAL_COLOR, PRESSED_COLOR};\n\n\n\n#[derive(Debug, Clone, Copy, Component)]\n\npub enum ButtonId {\n\n SinglePlayer,\n\n Settings,\n\n Credits,\n\n Quit,\n\n}\n\n\n", "file_path": "src/menus/main_menu.rs", "rank": 66, "score": 9.26281251208199 }, { "content": "#[derive(Component, Debug, Default)]\n\npub struct BulletStats {\n\n _damage: f32,\n\n}\n\n\n\n#[derive(Bundle, Default)]\n\npub struct BulletBundle {\n\n bullet_stats: BulletStats,\n\n #[bundle]\n\n sprite: SpriteBundle,\n\n rb: RigidBody,\n\n constraints: RotationConstraints,\n\n collision_shape: CollisionShape,\n\n velocity: Velocity,\n\n}\n\n\n\n#[derive(Debug, Component, Clone, Copy)]\n\npub enum GunType {\n\n Shotgun,\n\n}\n", "file_path": "src/gun.rs", "rank": 67, "score": 8.893410415303528 }, { "content": "use bevy_kira_audio::{Audio, AudioChannel};\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum AudioChannelId {\n\n Music,\n\n Audio,\n\n}\n\n\n\npub struct AudioChannels {\n\n pub music: AudioChannel,\n\n pub music_volume: f32,\n\n pub audio: AudioChannel,\n\n pub audio_volume: f32,\n\n}\n\n\n\nimpl Default for AudioChannels {\n\n fn default() -> Self {\n\n Self {\n\n music: AudioChannel::new(\"music\".to_owned()),\n\n music_volume: 1.0,\n", "file_path": "src/resources/audio_channels.rs", "rank": 68, "score": 8.785359992238373 }, { "content": "impl DerefMut for IgnoreColliders {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\n#[derive(Component, Debug, Default)]\n\npub struct Inventory(Option<Item>);\n\n\n\nimpl Deref for Inventory {\n\n type Target = Option<Item>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for Inventory {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n", "file_path": "src/item.rs", "rank": 69, "score": 8.242670685538396 }, { "content": " &mut commands,\n\n &common_handles,\n\n map_init_data.player_spawn_position,\n\n &asset_server,\n\n false,\n\n 10000, // doesn't matter\n\n );\n\n\n\n // Spawn clones\n\n for id in 0..recordings.inputs.len() {\n\n crate::player::spawn_player(\n\n &mut commands,\n\n &common_handles,\n\n map_init_data.player_spawn_position,\n\n &asset_server,\n\n true,\n\n id,\n\n );\n\n }\n\n\n\n // Spawn enemies\n\n for (x_px, y_px) in map_init_data.enemy_spawn_positions.iter().cloned() {\n\n crate::enemy::spawn_enemy(&mut commands, &common_handles, Vec2::new(x_px, y_px));\n\n }\n\n\n\n let _ = game_state.overwrite_set(GameState::Playing);\n\n}\n\n\n", "file_path": "src/levels/mod.rs", "rank": 70, "score": 7.929150236130718 }, { "content": "\n\n#[derive(Component)]\n\npub struct GunTimer(Timer);\n\n\n\nimpl Deref for GunTimer {\n\n type Target = Timer;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for GunTimer {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl Default for GunTimer {\n\n fn default() -> Self {\n\n Self(Timer::new(Duration::from_millis(1), false)) // 0 causes a panic\n\n }\n\n}\n", "file_path": "src/gun.rs", "rank": 71, "score": 7.829616593752614 }, { "content": " }\n\n}\n\n\n\nimpl Inventory {\n\n #[allow(dead_code)]\n\n pub fn collect_item(&mut self, item: Item) -> bool {\n\n if self.is_none() {\n\n **self = Some(item);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn drop_item(&mut self) -> Option<Item> {\n\n replace(&mut self.0, None)\n\n }\n\n\n\n pub fn get_item(&self) -> Option<&Item> {\n\n self.as_ref()\n\n }\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 72, "score": 7.759043228064878 }, { "content": " for y in 0..size_y {\n\n for x in 0..size_x {\n\n let tile_pos = TilePos(x, y);\n\n let (x_px, y_px) = (\n\n tile_pos.0 as f32 * 32.0 + 16.0,\n\n tile_pos.1 as f32 * 32.0 + 16.0,\n\n );\n\n if layer_builder.get_tile(tile_pos).unwrap().tile.texture_index != 9 {\n\n map_init_data.player_spawn_position = (x_px, y_px);\n\n if rand::random::<f32>() < 0.1 {\n\n map_init_data.enemy_spawn_positions.push((x_px, y_px));\n\n }\n\n continue;\n\n }\n\n let mut neighbors = 0;\n\n if x > 0 && x < size_x && y > 0 && y < size_y {\n\n for yp in [y - 1, y, y + 1] {\n\n for xp in [x - 1, x, x + 1] {\n\n if (xp == x && yp == y) || !(xp == x || yp == y) {\n\n continue;\n", "file_path": "src/levels/mod.rs", "rank": 73, "score": 7.3595486797885865 }, { "content": "}\n\n\n\n#[derive(Clone, Eq, PartialEq, Debug, Hash, PhysicsLayer)]\n\npub enum GameLayers {\n\n World,\n\n Bullets,\n\n Player,\n\n Enemies,\n\n Pickups,\n\n}\n", "file_path": "src/main.rs", "rank": 74, "score": 7.12977198049573 }, { "content": " },\n\n velocity: Velocity::from_linear(aim_direction * self.velocity()),\n\n },\n\n }\n\n }\n\n\n\n pub fn play_sfx(\n\n &self,\n\n audio: &bevy_kira_audio::Audio,\n\n channel: &bevy_kira_audio::AudioChannel,\n\n asset_server: &AssetServer,\n\n ) {\n\n match self {\n\n GunType::Shotgun => {\n\n audio.play_in_channel(asset_server.load(\"sfx/shotgun.wav\"), channel)\n\n }\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/gun.rs", "rank": 75, "score": 6.72295965162909 }, { "content": " pub fn create_bundle(&self, asset_server: &AssetServer) -> GunBundle {\n\n let transform = Transform::from_xyz(10.0, 0.0, 1.1);\n\n match self {\n\n GunType::Shotgun => GunBundle {\n\n gun_type: *self,\n\n sprite: SpriteBundle {\n\n texture: asset_server.load(\"images/shotgun.png\"),\n\n transform,\n\n ..Default::default()\n\n },\n\n gun_timer: GunTimer::default(),\n\n },\n\n }\n\n }\n\n\n\n pub fn create_bullet_bundle(\n\n &self,\n\n asset_server: &AssetServer,\n\n origin: Vec3,\n\n aim_direction: Vec2,\n", "file_path": "src/gun.rs", "rank": 76, "score": 6.718613090884755 }, { "content": " }\n\n\n\n pub fn was_pressed(&self) -> bool {\n\n *self == ButtonState::Pressed\n\n }\n\n}\n\n\n\nimpl Default for ButtonState {\n\n fn default() -> Self {\n\n Self::Up\n\n }\n\n}\n\n\n\nimpl ButtonState {\n\n #[inline]\n\n fn upgrade(&mut self) {\n\n *self = match self {\n\n ButtonState::Up => *self,\n\n ButtonState::Down => ButtonState::Released,\n\n ButtonState::Pressed => ButtonState::Released,\n", "file_path": "src/inputs.rs", "rank": 77, "score": 6.476213745817256 }, { "content": " audio: AudioChannel::new(\"audio\".to_owned()),\n\n audio_volume: 1.0,\n\n }\n\n }\n\n}\n\n\n\nimpl AudioChannels {\n\n pub fn set_volume(&mut self, audio: &Audio, id: AudioChannelId, volume: f32) {\n\n match id {\n\n AudioChannelId::Music => {\n\n self.music_volume = volume;\n\n audio.set_volume_in_channel(volume, &self.music);\n\n }\n\n AudioChannelId::Audio => {\n\n self.audio_volume = volume;\n\n audio.set_volume_in_channel(volume, &self.audio);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/resources/audio_channels.rs", "rank": 78, "score": 6.448713246443406 }, { "content": "\n\n if player_input.aim_direction.length_squared() != 0.0 {\n\n player_input.aim_direction = player_input.aim_direction.normalize();\n\n }\n\n\n\n // Get our action states\n\n if keys.pressed(KeyCode::Space) {\n\n player_input.dodge.downgrade()\n\n } else {\n\n player_input.dodge.upgrade()\n\n };\n\n if mouse.pressed(MouseButton::Left) {\n\n player_input.shoot.downgrade()\n\n } else {\n\n player_input.shoot.upgrade()\n\n };\n\n if mouse.pressed(MouseButton::Right) {\n\n player_input.throw.downgrade()\n\n } else {\n\n player_input.throw.upgrade()\n\n };\n\n}\n", "file_path": "src/inputs.rs", "rank": 79, "score": 6.125902239004823 }, { "content": " } else {\n\n 0.0\n\n };\n\n move_direction.y -= if keys.pressed(KeyCode::S) || keys.pressed(KeyCode::Down) {\n\n 1.0\n\n } else {\n\n 0.0\n\n };\n\n if move_direction.length_squared() != 0.0 {\n\n move_direction = move_direction.normalize();\n\n }\n\n player_input.move_direction = move_direction;\n\n\n\n // Create our aim vector\n\n let window = windows.get_primary().unwrap();\n\n\n\n if let Some(position) = window.cursor_position() {\n\n player_input.aim_direction =\n\n position - Vec2::new(window.width() / 2.0, window.height() / 2.0);\n\n }\n", "file_path": "src/inputs.rs", "rank": 80, "score": 5.774178603362916 }, { "content": " pickup = e2;\n\n } else {\n\n continue;\n\n }\n\n\n\n if let (Ok((mut inventory, mut ignore_colls)), Ok(item)) =\n\n (players.get_mut(player), pickups.get(pickup))\n\n {\n\n if ev.is_started() {\n\n if !ignore_colls.contains(&pickup) && inventory.collect_item(item.clone()) {\n\n commands.entity(pickup).despawn();\n\n }\n\n } else {\n\n ignore_colls.retain(|id| *id != pickup);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/item.rs", "rank": 81, "score": 5.322598749990501 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE-APACHE.md", "rank": 82, "score": 5.300965284353509 }, { "content": "use bevy::prelude::*;\n\n\n\n#[derive(Component)]\n\npub struct Disabled;\n\n\n\npub const NORMAL_COLOR: UiColor = UiColor(Color::rgb(0.15, 0.15, 0.15));\n\npub const HOVERED_COLOR: UiColor = UiColor(Color::rgb(0.25, 0.25, 0.25));\n\npub const PRESSED_COLOR: UiColor = UiColor(Color::rgb(0.35, 0.75, 0.35));\n\n\n", "file_path": "src/menus/common.rs", "rank": 83, "score": 4.990206166125192 }, { "content": " half_extends: Vec3::new(16.0, 16.0, 0.0),\n\n border_radius: None,\n\n })\n\n .insert(\n\n CollisionLayers::none()\n\n .with_group(crate::GameLayers::World)\n\n .with_masks(&[\n\n crate::GameLayers::Player,\n\n crate::GameLayers::Bullets,\n\n crate::GameLayers::Enemies,\n\n ]),\n\n )\n\n .id();\n\n commands.entity(map_entity).add_child(child);\n\n }\n\n }\n\n }\n\n }\n\n\n\n let layer_entity = map_query.build_layer(&mut commands, layer_builder, texture_handle);\n", "file_path": "src/levels/mod.rs", "rank": 84, "score": 4.930168934335562 }, { "content": " pub shoot: ButtonState,\n\n pub throw: ButtonState,\n\n pub dodge: ButtonState,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum ButtonState {\n\n /// Is currently not pressed, and was not just released this frame\n\n Up,\n\n /// Is currently pressed\n\n Down,\n\n /// Was pressed this frame\n\n Pressed,\n\n /// Was released this frame\n\n Released,\n\n}\n\n\n\nimpl ButtonState {\n\n pub fn is_down(&self) -> bool {\n\n *self == ButtonState::Down || *self == ButtonState::Pressed\n", "file_path": "src/inputs.rs", "rank": 85, "score": 4.882635256942898 }, { "content": "# Apache License\n\n\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE-APACHE.md", "rank": 86, "score": 4.687531488536882 }, { "content": " layer_builder.for_each_tiles_mut(|_ent, data| {\n\n *data = Some(if rand::random::<f32>() > 0.55 {\n\n floor_tile.clone()\n\n } else {\n\n outside_tile.clone()\n\n });\n\n });\n\n\n\n let (size_x, size_y) = {\n\n let settings = &layer_builder.settings;\n\n (\n\n settings.map_size.0 * settings.chunk_size.0,\n\n settings.map_size.1 * settings.chunk_size.1,\n\n )\n\n };\n\n\n\n for y in 0..size_y {\n\n for x in [0, size_x - 1] {\n\n *layer_builder.get_tile_mut(TilePos(x, y)).unwrap() = outside_tile.clone();\n\n }\n", "file_path": "src/levels/mod.rs", "rank": 87, "score": 4.086533366529586 }, { "content": "\n\n map.add_layer(&mut commands, 0u16, layer_entity);\n\n\n\n commands\n\n .entity(map_entity)\n\n .insert(map)\n\n .insert(Transform::from_xyz(0.0, 0.0, 0.0))\n\n .insert(GlobalTransform::default());\n\n\n\n // Fixme bad place for this but no time to be clean\n\n // Ui stuff\n\n let text_style = Style {\n\n align_self: AlignSelf::Center,\n\n position_type: PositionType::Relative,\n\n position: Rect::default(),\n\n ..Default::default()\n\n };\n\n\n\n let text_textstyle = TextStyle {\n\n font: asset_server.load(\"fonts/FiraSans-Bold.ttf\"),\n", "file_path": "src/levels/mod.rs", "rank": 88, "score": 3.770130175958562 }, { "content": " GunType::Shotgun => \"images/shotgun.png\",\n\n },\n\n Item::Grenade => \"images/grenade.png\",\n\n Item::_Totem => \"images/totem.png\",\n\n }\n\n }\n\n\n\n fn bundle(self, tf: Transform, asset_server: &AssetServer) -> PickupBundle {\n\n PickupBundle {\n\n sprite_bundle: SpriteBundle {\n\n texture: asset_server.load(self.image_path()),\n\n transform: tf,\n\n ..SpriteBundle::default()\n\n },\n\n rb: RigidBody::Sensor,\n\n coll_shape: CollisionShape::Sphere { radius: 10. },\n\n coll_layers: CollisionLayers::none()\n\n .with_group(GameLayers::Pickups)\n\n .with_mask(GameLayers::Player),\n\n item: self,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Bundle)]\n", "file_path": "src/item.rs", "rank": 89, "score": 3.5268664760524135 }, { "content": " }\n\n if let Ok(tile) = layer_builder\n\n .get_tile(TilePos(xp, yp))\n\n .map(|t| t.tile.texture_index)\n\n {\n\n if tile == 4 {\n\n neighbors += 1;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n if neighbors == 4 {\n\n *layer_builder.get_tile_mut(tile_pos).unwrap() = floor_tile.clone();\n\n } else {\n\n let child = commands\n\n .spawn()\n\n .insert(GlobalTransform::from_xyz(x_px, y_px, 0.0))\n\n .insert(RigidBody::Static)\n\n .insert(CollisionShape::Cuboid {\n", "file_path": "src/levels/mod.rs", "rank": 90, "score": 3.4735908908982336 }, { "content": " let mut map = Map::new(0u16, map_entity);\n\n\n\n let (mut layer_builder, _) = LayerBuilder::new(\n\n &mut commands,\n\n LayerSettings::new(\n\n MapSize(2, 2),\n\n ChunkSize(64, 64),\n\n TileSize(32.0, 32.0),\n\n TextureSize(96.0, 128.0),\n\n ),\n\n 0u16,\n\n 0u16,\n\n );\n\n\n\n let mut outside_tile = TileBundle::default();\n\n outside_tile.tile.texture_index = 9;\n\n let mut floor_tile = TileBundle::default();\n\n floor_tile.tile.texture_index = 4;\n\n\n\n {\n", "file_path": "src/levels/mod.rs", "rank": 91, "score": 3.0979434175366527 }, { "content": " SystemSet::on_update(GameState::Playing)\n\n .with_system(update_inventory_ui)\n\n .with_system(drop_pickup)\n\n .with_system(collide_pickups),\n\n )\n\n .add_system_set(SystemSet::on_exit(GameState::Playing).with_system(despawn_inventory_ui));\n\n }\n\n}\n\n\n\n#[derive(Clone, Component, Debug)]\n\npub enum Item {\n\n Gun(GunType),\n\n Grenade,\n\n _Totem,\n\n}\n\n\n\nimpl Item {\n\n fn image_path(&self) -> &'static str {\n\n match self {\n\n Item::Gun(gun_type) => match gun_type {\n", "file_path": "src/item.rs", "rank": 92, "score": 3.0969485753279264 }, { "content": "# MIT License\n\n\n\nCopyright (c) 2021 Leafwing Studios\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE-MIT.md", "rank": 93, "score": 3.090000892541761 }, { "content": "use xshell::cmd;\n\n\n", "file_path": "tools/ci/src/main.rs", "rank": 94, "score": 2.9688536881665457 }, { "content": " }\n\n for x in 0..size_x {\n\n for y in [0, size_y - 1] {\n\n *layer_builder.get_tile_mut(TilePos(x, y)).unwrap() = outside_tile.clone();\n\n }\n\n }\n\n\n\n for _ in 0..40 {\n\n let mut new_vals = vec![];\n\n for y in 1..(size_y - 1) {\n\n for x in 1..(size_x - 1) {\n\n let mut neighbors = 0;\n\n for yp in [y - 1, y, y + 1] {\n\n for xp in [x - 1, x, x + 1] {\n\n if xp == x && yp == y {\n\n continue;\n\n }\n\n if let Ok(tile) = layer_builder\n\n .get_tile(TilePos(xp, yp))\n\n .map(|t| t.tile.texture_index)\n", "file_path": "src/levels/mod.rs", "rank": 95, "score": 2.8336625169114074 }, { "content": "pub mod audio_channels;\n", "file_path": "src/resources/mod.rs", "rank": 96, "score": 2.8264251357219043 }, { "content": " SystemSet::on_update(GameState::Playing)\n\n .with_system(zoom_update)\n\n .with_system(update_kills_text),\n\n )\n\n .add_system_set(SystemSet::on_enter(GameState::GameWon).with_system(game_won));\n\n }\n\n}\n\n\n\n#[derive(Component)]\n\npub struct MainCamera;\n\n\n\n#[derive(Component)]\n\npub struct KilledText;\n\n\n", "file_path": "src/levels/mod.rs", "rank": 97, "score": 2.483175485113451 }, { "content": "# Bevy Jam 1\n\n\n\n<https://itch.io/jam/bevy-jam-1>\n\n\n\nTeam: SirCarter, Supermath101, tas, Seldom, mdenchev\n\n\n\n## Theme\n\n\n\nUnfair Advantage\n\n\n\n## Building\n\n\n\nThis repository has dynamic linking disabled by default. However, you should enable it for much faster incremental compile times.\n\nIf you're on Windows, you'll need to use the `nightly` Rust compiler.\n\nSwap by using `rustup default nightly`.\n", "file_path": "README.md", "rank": 98, "score": 2.4138028137806073 }, { "content": " font_size: 30.0,\n\n color: Color::BLACK,\n\n };\n\n\n\n let text_text_alignment = TextAlignment {\n\n horizontal: HorizontalAlign::Center,\n\n vertical: VerticalAlign::Center,\n\n };\n\n\n\n let root_ui_ent = commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n position_type: PositionType::Absolute,\n\n position: Rect {\n\n left: Val::Px(40.),\n\n top: Val::Px(50.),\n\n ..Default::default()\n\n },\n\n flex_direction: FlexDirection::ColumnReverse,\n\n ..Default::default()\n", "file_path": "src/levels/mod.rs", "rank": 99, "score": 2.377165427956137 } ]
Rust
src/bin/rl/reward.rs
buggedbit/wall-e
e96ec233616661d763a454554c0961a1f9b45912
use ndarray::prelude::*; use serde::{Deserialize, Serialize}; use wall_e::ceo::Reward; use wall_e::diff_drive_model::DiffDriveModel; use wall_e::fcn::*; use wall_e::goal::Goal; #[derive(Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct DiffDriveReward { start_x_bounds: (f32, f32), start_y_bounds: (f32, f32), start_or_bounds: (f32, f32), radius: f32, goal_x_bounds: (f32, f32), goal_y_bounds: (f32, f32), num_episode_ticks: usize, } impl DiffDriveReward { pub fn new( start_x_bounds: (f32, f32), start_y_bounds: (f32, f32), start_or_bounds: (f32, f32), radius: f32, goal_x_bounds: (f32, f32), goal_y_bounds: (f32, f32), num_episode_ticks: usize, ) -> DiffDriveReward { DiffDriveReward { start_x_bounds: start_x_bounds, start_y_bounds: start_y_bounds, start_or_bounds: start_or_bounds, radius: radius, goal_x_bounds: goal_x_bounds, goal_y_bounds: goal_y_bounds, num_episode_ticks: num_episode_ticks, } } pub fn start_x_bounds(&self) -> (f32, f32) { self.start_x_bounds } pub fn start_y_bounds(&self) -> (f32, f32) { self.start_y_bounds } pub fn start_or_bounds(&self) -> (f32, f32) { self.start_or_bounds } pub fn radius(&self) -> f32 { self.radius } pub fn goal_x_bounds(&self) -> (f32, f32) { self.goal_x_bounds } pub fn goal_y_bounds(&self) -> (f32, f32) { self.goal_y_bounds } } impl Reward for DiffDriveReward { fn reward(&self, fcn: &FCN, params: &Array1<f32>, num_episodes: usize) -> f32 { let mut cumulative_reward = 0.0; for _ in 0..num_episodes { let goal_coordinates = Goal::in_region(self.goal_x_bounds, self.goal_y_bounds).coordinates(); let mut model = DiffDriveModel::spawn_randomly( self.start_x_bounds, self.start_y_bounds, self.start_or_bounds, self.radius, goal_coordinates, ); let mut episode_reward = 0.0; for tick in 0..self.num_episode_ticks { let (x, y, or_in_rad) = model.scaled_state(); let control = fcn.at_with(&arr1(&[x, y, or_in_rad]), params); let (v, w) = (control[[0]], control[[1]]); model.set_control(v, w); model.update(0.1).unwrap(); let (x, y, or_in_rad) = model.scaled_state(); let (x_hat, y_hat) = { let norm = (x * x + y * y).sqrt(); (x / norm, y / norm) }; let angular_deviation = ((x_hat - or_in_rad.cos()).powf(2.0) + (y_hat - or_in_rad.sin()).powf(2.0)) .sqrt() * (1.0 / (1.0 + tick as f32)); episode_reward -= angular_deviation; episode_reward -= w.abs(); let dist = (x * x + y * y).sqrt(); episode_reward -= dist * 30.0; } let (x, y, _or_in_rad) = model.scaled_state(); let final_dist = (x * x + y * y).sqrt(); episode_reward += 200.0 * (-final_dist).exp(); let (v, w) = model.control(); episode_reward += 200.0 * (-v.abs()).exp() * (-final_dist).exp(); episode_reward += 200.0 * (-w.abs()).exp() * (-final_dist).exp(); cumulative_reward += episode_reward; } let average_reward = cumulative_reward / num_episodes as f32; average_reward } }
use ndarray::prelude::*; use serde::{Deserialize, Serialize}; use wall_e::ceo::Reward; use wall_e::diff_drive_model::DiffDriveModel; use wall_e::fcn::*; use wall_e::goal::Goal; #[derive(Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct DiffDriveReward { start_x_bounds: (f32, f32), start_y_bounds: (f32, f32), start_or_bounds: (f32, f32), radius: f32, goal_x_bounds: (f32, f32), goal_y_bounds: (f32, f32), num_episode_ticks: usize, } impl DiffDriveReward { pub fn new( start_x_bounds: (f32, f32), start_y_bounds: (f32, f32), start_or_bounds: (f32, f32), radius: f32, goal_x_bounds: (f32, f32), goal_y_bounds: (f32, f32), num_episode_ticks: usize, ) -> DiffDriveReward { DiffDriveReward { start_x_bounds: start_x_bounds, start_y_bounds: start_y_bounds, start_or_bounds: start_or_bounds, radius: radius, goal_x_bounds: goal_x_bounds, goal_y_bounds: goal_y_bounds, num_episode_ticks: num_episode_ticks, } } pub fn start_x_bounds(&self) -> (f32, f32) { self.start_x_bounds } pub fn start_y_bounds(&self) -> (f32, f32) { self.start_y_bounds } pub fn start_or_bounds(&self) -> (f32, f32) { self.start_or_bounds } pub fn radius(&self) -> f32 { self.radius } pub fn goal_x_bounds(&self) -> (f32, f32) { self.goal_x_bounds } pub fn goal_y_bounds(&self) -> (f32, f32) { self.goal_y_bounds } } impl Reward for DiffDriveReward { fn reward(&self, fcn: &FCN, params: &Array1<f32>, num_episodes: usize) -> f32 { let mut cumulative_reward = 0.0; for _ in 0..num_episodes { let goal_coordinates = Goal::in_region(self.goal_x_bounds, self.goal_y_bounds).coordinates(); let mut model = DiffDriveModel::spawn_randomly( self.start_x_bounds, self.start_y_bounds, self.start_or_bounds, self.radius, goal_coordinates, ); let mut episode_reward = 0.0; for tick in 0..self.num_episode_ticks { let (x, y, or_in_rad) = model.scaled_state(); let control = fcn.at_with(&arr1(&[x, y, or_in_rad]), params); let (v, w) = (control[[0]], control[[1]]); model.set_control(v, w); model.update(0.1).unwrap(); let (x, y, or_in_rad) = model.scaled_state(); let (x_hat, y_hat) = { let norm = (x * x + y * y).sqrt(); (x / norm, y / norm) };
episode_reward -= angular_deviation; episode_reward -= w.abs(); let dist = (x * x + y * y).sqrt(); episode_reward -= dist * 30.0; } let (x, y, _or_in_rad) = model.scaled_state(); let final_dist = (x * x + y * y).sqrt(); episode_reward += 200.0 * (-final_dist).exp(); let (v, w) = model.control(); episode_reward += 200.0 * (-v.abs()).exp() * (-final_dist).exp(); episode_reward += 200.0 * (-w.abs()).exp() * (-final_dist).exp(); cumulative_reward += episode_reward; } let average_reward = cumulative_reward / num_episodes as f32; average_reward } }
let angular_deviation = ((x_hat - or_in_rad.cos()).powf(2.0) + (y_hat - or_in_rad.sin()).powf(2.0)) .sqrt() * (1.0 / (1.0 + tick as f32));
assignment_statement
[ { "content": "fn clamp(v: f32, min_max: (f32, f32)) -> f32 {\n\n let (min, max) = min_max;\n\n if v < min {\n\n return min;\n\n }\n\n if v > max {\n\n return max;\n\n }\n\n v\n\n}\n\n\n\npub struct DiffDriveModel {\n\n x: f32,\n\n y: f32,\n\n or_in_rad: f32,\n\n radius: f32,\n\n v: f32,\n\n w: f32,\n\n trail: Trail,\n\n scale: f32,\n", "file_path": "src/diff_drive_model.rs", "rank": 0, "score": 56192.38453555835 }, { "content": "pub trait Reward {\n\n fn reward(&self, fcn: &FCN, params: &Array1<f32>, num_episodes: usize) -> f32;\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\npub struct CEO {\n\n pub generations: usize,\n\n pub batch_size: usize,\n\n pub num_evalation_samples: usize,\n\n pub elite_frac: f32,\n\n pub initial_std: f32,\n\n pub noise_factor: f32,\n\n}\n\n\n\nimpl Default for CEO {\n\n fn default() -> CEO {\n\n CEO {\n\n generations: 300,\n\n batch_size: 50,\n", "file_path": "src/ceo.rs", "rank": 1, "score": 43403.153783493144 }, { "content": "\n\nimpl fmt::Display for FCN {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"fcn, layers={:?}, num params={}\",\n\n self.layers,\n\n self.params.len()\n\n )\n\n }\n\n}\n\n\n\nimpl FCN {\n\n pub fn new(layers: Vec<(usize, Activation)>) -> FCN {\n\n assert!(\n\n layers.len() >= 2,\n\n \"Trying to create a model with less than 2 layers.\"\n\n );\n\n let num_params = {\n\n let mut num_params = 0;\n", "file_path": "src/fcn.rs", "rank": 2, "score": 16513.144805225213 }, { "content": "use ndarray::prelude::*;\n\nuse ndarray_rand::rand_distr::Uniform;\n\nuse ndarray_rand::RandomExt;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt;\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\npub enum Activation {\n\n Linear,\n\n LeakyReLu(f32),\n\n Sigmoid,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\npub struct FCN {\n\n layers: Vec<(usize, Activation)>,\n\n params: Array1<f32>,\n\n}\n", "file_path": "src/fcn.rs", "rank": 3, "score": 16512.898589381777 }, { "content": " for i in 1..layers.len() {\n\n num_params += (layers[i - 1].0 + 1) * layers[i].0;\n\n }\n\n num_params\n\n };\n\n FCN {\n\n layers: layers,\n\n params: //Array::from_elem((num_params,), 0.01),\n\n Array::random(num_params, Uniform::new(0.0, 1.0)),\n\n }\n\n }\n\n\n\n pub fn params(&self) -> &Array1<f32> {\n\n &self.params\n\n }\n\n\n\n pub fn set_params(&mut self, new_params: Array1<f32>) {\n\n self.params = new_params;\n\n }\n\n\n", "file_path": "src/fcn.rs", "rank": 4, "score": 16510.38587487348 }, { "content": " /// Clones input but not params.\n\n pub fn at_with(&self, input: &Array1<f32>, params: &Array1<f32>) -> Array1<f32> {\n\n assert_eq!(input.len(), self.layers[0].0, \"Invalid input len for fcn\");\n\n assert_eq!(\n\n params.len(),\n\n self.params.len(),\n\n \"Invalid params len for fcn\"\n\n );\n\n let mut params_offset = 0;\n\n let mut output = input.to_owned();\n\n output = match &self.layers[0].1 {\n\n Activation::Linear => output,\n\n Activation::LeakyReLu(leak) => output.mapv(|e| if e > 0.0 { e } else { e * leak }),\n\n Activation::Sigmoid => output.mapv(|e| 1.0 / (1.0 + (-e).exp())),\n\n };\n\n for i in 1..self.layers.len() {\n\n let prev_layer_dof = self.layers[i - 1].0;\n\n let curr_layer_dof = self.layers[i].0;\n\n let curr_layer_activation = &self.layers[i].1;\n\n // let now = Instant::now();\n", "file_path": "src/fcn.rs", "rank": 5, "score": 16507.32846658502 }, { "content": " // println!(\"2:{}\", now.elapsed().as_nanos());\n\n }\n\n output\n\n }\n\n\n\n pub fn at(&self, input: &Array1<f32>) -> Array1<f32> {\n\n self.at_with(&input, &self.params)\n\n }\n\n}\n", "file_path": "src/fcn.rs", "rank": 6, "score": 16505.332207519536 }, { "content": " let matrix = params\n\n .slice(s![\n\n params_offset..(params_offset + prev_layer_dof * curr_layer_dof)\n\n ])\n\n .into_shape((curr_layer_dof, prev_layer_dof))\n\n .unwrap();\n\n params_offset += prev_layer_dof * curr_layer_dof;\n\n let bias = params\n\n .slice(s![params_offset..(params_offset + curr_layer_dof)])\n\n .into_shape(curr_layer_dof)\n\n .unwrap();\n\n // println!(\"1:{}\", now.elapsed().as_nanos());\n\n // let now = Instant::now();\n\n output = matrix.dot(&output) + bias;\n\n output = match curr_layer_activation {\n\n Activation::Linear => output,\n\n Activation::LeakyReLu(leak) => output.mapv(|e| if e > 0.0 { e } else { e * leak }),\n\n Activation::Sigmoid => output.mapv(|e| 1.0 / (1.0 + (-e).exp())),\n\n };\n\n params_offset += curr_layer_dof;\n", "file_path": "src/fcn.rs", "rank": 7, "score": 16502.30408163802 }, { "content": " }\n\n\n\n pub fn control(&self) -> (f32, f32) {\n\n (self.v, self.w)\n\n }\n\n\n\n // pub fn state(&self) -> (f32, f32, f32) {\n\n // (self.x, self.y, self.or_in_rad)\n\n // }\n\n\n\n pub fn scaled_state(&self) -> (f32, f32, f32) {\n\n (\n\n (self.goal.0 - self.x) / self.scale,\n\n (self.goal.1 - self.y) / self.scale,\n\n self.or_in_rad,\n\n )\n\n }\n\n\n\n pub fn increment_control(&mut self, dv: f32, dw: f32) {\n\n self.v = clamp(self.v + dv, DiffDriveModel::V_BOUNDS);\n\n self.w = clamp(self.w + dw, DiffDriveModel::W_BOUNDS);\n\n }\n\n\n\n pub fn set_control(&mut self, v: f32, w: f32) {\n\n self.v = clamp(v, DiffDriveModel::V_BOUNDS);\n\n self.w = clamp(w, DiffDriveModel::W_BOUNDS);\n\n }\n\n}\n", "file_path": "src/diff_drive_model.rs", "rank": 8, "score": 13653.93364444202 }, { "content": " goal: (f32, f32),\n\n}\n\n\n\nimpl DiffDriveModel {\n\n const V_BOUNDS: (f32, f32) = (0.0, 20.0);\n\n const W_BOUNDS: (f32, f32) = (-1.0, 1.0);\n\n const TRIAL_LENGTH: usize = 500;\n\n\n\n pub fn spawn_randomly(\n\n x_bounds: (f32, f32),\n\n y_bounds: (f32, f32),\n\n or_bounds: (f32, f32),\n\n radius: f32,\n\n goal: (f32, f32),\n\n ) -> DiffDriveModel {\n\n // Compile time asserts\n\n const_assert!(DiffDriveModel::V_BOUNDS.0 < DiffDriveModel::V_BOUNDS.1);\n\n const_assert!(DiffDriveModel::W_BOUNDS.0 < DiffDriveModel::W_BOUNDS.1);\n\n // Spawn at random location\n\n let mut rng = rand::thread_rng();\n", "file_path": "src/diff_drive_model.rs", "rank": 9, "score": 13653.076280712043 }, { "content": "use ggez::nalgebra::Point2;\n\nuse ggez::*;\n\nuse rand::Rng;\n\nuse trail::Trail;\n\n\n\nmod trail {\n\n use super::*;\n\n\n\n pub struct Trail {\n\n queue: Vec<Point2<f32>>,\n\n limit: usize,\n\n }\n\n\n\n impl Trail {\n\n pub fn new(limit: usize) -> Trail {\n\n Trail {\n\n queue: Vec::with_capacity(limit),\n\n limit: limit,\n\n }\n\n }\n", "file_path": "src/diff_drive_model.rs", "rank": 10, "score": 13650.620179715605 }, { "content": "\n\n pub fn add(&mut self, x: f32, y: f32) {\n\n if self.queue.len() > 0 {\n\n let new_point = Point2::new(x, y);\n\n if self.queue[self.queue.len() - 1] != new_point {\n\n self.queue.push(Point2::new(x, y));\n\n }\n\n } else {\n\n self.queue.push(Point2::new(x, y));\n\n }\n\n if self.queue.len() > self.limit {\n\n self.queue.remove(0);\n\n }\n\n }\n\n\n\n pub fn draw(&self, ctx: &mut ggez::Context) -> ggez::GameResult {\n\n if self.queue.len() > 1 {\n\n let line = graphics::Mesh::new_line(\n\n ctx,\n\n &self.queue,\n", "file_path": "src/diff_drive_model.rs", "rank": 11, "score": 13647.709260412345 }, { "content": " scale: scale,\n\n goal: goal,\n\n }\n\n }\n\n\n\n pub fn update(&mut self, dt: f32) -> ggez::GameResult {\n\n self.x += self.v * self.or_in_rad.cos() * dt;\n\n self.y += self.v * self.or_in_rad.sin() * dt;\n\n self.or_in_rad += self.w * dt;\n\n self.trail.add(self.x, self.y);\n\n Ok(())\n\n }\n\n\n\n pub fn draw(&self, ctx: &mut ggez::Context) -> ggez::GameResult {\n\n let circle = graphics::Mesh::new_circle(\n\n ctx,\n\n graphics::DrawMode::fill(),\n\n Point2::new(self.x, self.y),\n\n self.radius,\n\n 0.1,\n", "file_path": "src/diff_drive_model.rs", "rank": 12, "score": 13646.743621973714 }, { "content": " let x = x_bounds.0 + (x_bounds.1 - x_bounds.0) * rng.gen::<f32>();\n\n let y = y_bounds.0 + (y_bounds.1 - y_bounds.0) * rng.gen::<f32>();\n\n let or = or_bounds.0 + (or_bounds.1 - or_bounds.0) * rng.gen::<f32>();\n\n // Trail config\n\n let mut trail = Trail::new(DiffDriveModel::TRIAL_LENGTH);\n\n trail.add(x, y);\n\n // Normalized scale w.r.t goal\n\n let scale = {\n\n let scale_x = (goal.0 - x).abs();\n\n let scale_y = (goal.1 - y).abs();\n\n (scale_x * scale_x + scale_y * scale_y).sqrt()\n\n };\n\n DiffDriveModel {\n\n x: x,\n\n y: y,\n\n or_in_rad: or,\n\n radius: radius,\n\n v: 0.0,\n\n w: 0.0,\n\n trail: trail,\n", "file_path": "src/diff_drive_model.rs", "rank": 13, "score": 13646.381012483755 }, { "content": " graphics::WHITE,\n\n )?;\n\n graphics::draw(ctx, &circle, (Point2::new(0.0, 0.0),))?;\n\n\n\n let line = graphics::Mesh::new_line(\n\n ctx,\n\n &[\n\n Point2::new(self.x, self.y),\n\n Point2::new(\n\n self.x + self.radius * self.or_in_rad.cos(),\n\n self.y + self.radius * self.or_in_rad.sin(),\n\n ),\n\n ],\n\n 2.0,\n\n graphics::Color::from((1.0, 0.0, 0.0)),\n\n )?;\n\n graphics::draw(ctx, &line, (Point2::new(0.0, 0.0),))?;\n\n\n\n self.trail.draw(ctx)?;\n\n Ok(())\n", "file_path": "src/diff_drive_model.rs", "rank": 14, "score": 13643.14743999857 }, { "content": " 2.0,\n\n graphics::Color::from((0.0, 1.0, 1.0)),\n\n )?;\n\n graphics::draw(ctx, &line, (Point2::new(0.0, 0.0),))?;\n\n }\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/diff_drive_model.rs", "rank": 15, "score": 13642.714450047255 }, { "content": " num_evalation_samples: 300,\n\n elite_frac: 0.25,\n\n initial_std: 2.0,\n\n noise_factor: 2.0,\n\n }\n\n }\n\n}\n\n\n\nimpl CEO {\n\n pub fn optimize(\n\n &self,\n\n fcn: &mut FCN,\n\n reward: &(dyn Reward + std::marker::Sync),\n\n ) -> Result<Array1<f32>, NormalError> {\n\n let n_elite = (self.batch_size as f32 * self.elite_frac).round().floor() as usize;\n\n let mut noise_std = Array::from_elem((fcn.params().len(),), self.initial_std);\n\n for generation in 0..self.generations {\n\n let (sorted_th_means, mean_reward) = {\n\n let mut reward_th_mean_tuples = (0..self.batch_size)\n\n .into_par_iter()\n", "file_path": "src/ceo.rs", "rank": 16, "score": 11.204215353192627 }, { "content": "use ggez::nalgebra::Point2;\n\nuse ggez::*;\n\nuse rand::Rng;\n\n\n\n#[derive(Debug)]\n\npub struct Goal {\n\n x: f32,\n\n y: f32,\n\n}\n\n\n\nimpl Goal {\n\n pub fn in_region(x_bounds: (f32, f32), y_bounds: (f32, f32)) -> Goal {\n\n let mut rng = rand::thread_rng();\n\n let x = x_bounds.0 + (x_bounds.1 - x_bounds.0) * rng.gen::<f32>();\n\n let y = y_bounds.0 + (y_bounds.1 - y_bounds.0) * rng.gen::<f32>();\n\n Goal { x: x, y: y }\n\n }\n\n\n\n pub fn coordinates(&self) -> (f32, f32) {\n\n (self.x, self.y)\n", "file_path": "src/goal.rs", "rank": 17, "score": 8.330536690847989 }, { "content": " .map(|_| {\n\n let randn_noise: Array1<f32> =\n\n Array::random(fcn.params().len(), StandardNormal);\n\n let scaled_randn_noise = randn_noise * &noise_std;\n\n let perturbed_params = scaled_randn_noise + fcn.params();\n\n (\n\n reward.reward(fcn, &perturbed_params, self.num_evalation_samples),\n\n perturbed_params,\n\n )\n\n })\n\n .collect::<Vec<(f32, Array1<f32>)>>();\n\n reward_th_mean_tuples.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n\n reward_th_mean_tuples.reverse();\n\n let (rewards, sorted_th_means): (Vec<_>, Vec<_>) =\n\n reward_th_mean_tuples.into_iter().unzip();\n\n (\n\n sorted_th_means,\n\n rewards.iter().sum::<f32>() / rewards.len() as f32,\n\n )\n\n };\n", "file_path": "src/ceo.rs", "rank": 18, "score": 7.877664402232485 }, { "content": "use crate::fcn::*;\n\nuse ndarray::prelude::*;\n\nuse ndarray::stack;\n\nuse ndarray_rand::rand_distr::{NormalError, StandardNormal};\n\nuse ndarray_rand::RandomExt;\n\nuse rayon::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\n\n", "file_path": "src/ceo.rs", "rank": 19, "score": 7.284698079659927 }, { "content": "#[macro_use]\n\nextern crate static_assertions;\n\npub mod ceo;\n\npub mod diff_drive_model;\n\npub mod fcn;\n\npub mod goal;\n", "file_path": "src/lib.rs", "rank": 20, "score": 7.134774168952739 }, { "content": " let elite_ths = sorted_th_means\n\n .iter()\n\n .take(n_elite)\n\n .map(|th| th.slice(s![..]))\n\n .collect::<Vec<ArrayView1<f32>>>();\n\n let elite_ths = stack(Axis(0), &elite_ths)\n\n .unwrap()\n\n .into_shape((n_elite, fcn.params().len()))\n\n .unwrap();\n\n fcn.set_params(elite_ths.mean_axis(Axis(0)).unwrap());\n\n noise_std = elite_ths.std_axis(Axis(0), 0.0);\n\n noise_std += self.noise_factor / (generation + 1) as f32;\n\n println!(\n\n \"generation={} mean_reward={:?} reward_with_current_th={:?}, th_std_mean={:?}\",\n\n generation + 1,\n\n mean_reward,\n\n reward.reward(fcn, &fcn.params(), self.num_evalation_samples),\n\n noise_std.mean(),\n\n );\n\n }\n\n Ok(noise_std)\n\n }\n\n}\n", "file_path": "src/ceo.rs", "rank": 21, "score": 6.867607732797217 }, { "content": "## demonstration\n\n\n\n- Sin fitting.\n\n\n\n![](./github/sin.png)\n\n\n\n- Exp fitting.\n\n\n\n![](./github/exp.png)\n\n\n\n- The following video illustrates a few neural-network controlled differential-drive agents.\n\n\n\n[![](http://img.youtube.com/vi/4jXgBfBq2UI/0.jpg)](http://www.youtube.com/watch?v=4jXgBfBq2UI)\n\n\n\n## roadmap\n\n- Controller impl.\n\n - [x] Conv, Relu needed?\n\n - Probably not. Since for the input (x, y, th, xg, yg) probably there no local relations or sequential memory required.\n\n - A simple strategy would be to orient towards goal. True theta can be obtained by a function of (x, y, xg, yg). w and v can be given to reduce theta reside and distance residue.\n\n - [x] Variable number of dynamic dof layers.\n\n - [x] Per layer activations.\n\n - [x] Random param initialization.\n\n - [x] Make ceo() a struct.\n\n - [x] Seperate bin crate for each network.\n\n - Speedup\n\n - [x] Parallelize.\n\n - [x] sin().\n\n - [x] exp().\n\n- Controller design.\n\n - Model.\n\n - [x] Input design & normalization.\n\n - [x] Hidden layer design.\n\n - [x] Output design.\n\n - [x] Param init design.\n\n - Reward function.\n\n - [x] Different reward functions.\n\n - [x] Randomized start poses.\n\n - [x] Randomized goals.\n\n - Optimizor.\n\n - [x] CEO.\n\n - Scenarios\n\n - [x] No obstacles.\n\n - Goal\n\n - [x] Position\n\n - [x] Should network determine stopping condition?\n\n - Simulator\n\n - [x] Interactive differential drive model.\n\n - [x] Constraints on controls.\n\n- [x] Report and demo.\n\n\n\n- Future work\n\n - [ ] Goal orientation\n\n - [ ] Wall boundaries.\n\n - [ ] Maybe move generation logic inside model? Removes into shapes a lot that way.\n\n - [ ] median().\n\n - [ ] Step level optimization vs Trajectory level optimization.\n\n - [ ] Known static obstacles.\n", "file_path": "README.md", "rank": 22, "score": 5.947816997072291 }, { "content": " }\n\n\n\n pub fn draw(&self, ctx: &mut ggez::Context) -> ggez::GameResult {\n\n let circle = graphics::Mesh::new_circle(\n\n ctx,\n\n graphics::DrawMode::fill(),\n\n Point2::new(self.x, self.y),\n\n 5.0,\n\n 0.1,\n\n graphics::Color::from((0.0, 1.0, 0.0)),\n\n )?;\n\n graphics::draw(ctx, &circle, (Point2::new(0.0, 0.0),))\n\n }\n\n}\n", "file_path": "src/goal.rs", "rank": 23, "score": 4.807886473619959 }, { "content": "# wall-e\n\n\n\n## description\n\n- A neural-network controller for a differential-drive agent to reach a goal.\n\n- A simple fully-connected network with ReLu activations is used as controller.\n\n- Noisy cross-entroy optimizer (ceo), a gradient-less optimization method is used for network optimization.\n\n\n\n## code\n\n- The code is written in stable `rust`.\n\n- A library named `wall-e` is implemented, which is used in the bin crates.\n\n- The bin crates are the following\n\n - sin: fits sin function using ceo optmized neural-network.\n\n - exp: fits exp function using ceo optmized neural-network.\n\n - sim: provides a simulator for controlling differential-drive agent manually.\n\n - rl: optimizes a neural-network controller for a differential-drive agent to reach a goal.\n\n- The design of network, reward function and agent can be found in `report/report.tex`.\n\n - To compile it to pdf, use `latexmk -pdf report.tex`.\n\n\n\n## documentation\n\n- The documentation of the code is itself.\n\n\n\n## usage\n\n- Install stable rust and cargo.\n\n- Use `cargo run --release --bin rl sin` to fit a sin function.\n\n- Use `cargo run --release --bin rl exp` to fit an exp function.\n\n- Use `cargo run --release --bin rl sim` to start a simulator and control a differential-drive agent manually.\n\n - `up down` increases or decrease linesr speed.\n\n - `left right` change angular velocity.\n\n - `s` stop.\n\n- Use `cargo run --release --bin rl` to run optimization, save the experiment and visualize it.\n\n - The saved file can be tweaked by hand to change the setting.\n\n - Ex. Spawn regions of agent and goal can be changed.\n\n- Use `cargo run --release --bin rl <path-to-json>` to visualize experiment.\n\n - `r` respawn agent and goal.\n\n - `p` toggle play/pause simulator.\n\n - `pageup pagedown` change `dt` of simulation.\n\n\n", "file_path": "README.md", "rank": 24, "score": 4.223882410305446 } ]
Rust
src/iter/cmp.rs
Bergmann89/aspar
e3d7c56297232aa66e720cc0766b1c25bc11420d
use std::cmp::{Ord, Ordering, PartialEq, PartialOrd}; use crate::{Driver, Executor, IndexedParallelIterator, ParallelIterator, WithIndexedProducer}; /* Cmp */ pub struct Cmp<XA, XB> { iterator_a: XA, iterator_b: XB, } impl<XA, XB> Cmp<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB) -> Self { Self { iterator_a, iterator_b, } } } impl<'a, XA, XB, I> Driver<'a, Ordering, Option<Ordering>> for Cmp<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: Ord + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, Ordering, Option<Ordering>>, { let Self { iterator_a, iterator_b, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); let ord_len = len_a.cmp(&len_b); let executor = executor.into_inner(); let inner = iterator_a .zip(iterator_b) .map(|(a, b)| Ord::cmp(&a, &b)) .find_first(|ord| ord != &Ordering::Equal) .exec_with(executor); E::map(inner, move |inner| inner.unwrap_or(ord_len)) } } /* PartialCmp */ pub struct PartialCmp<XA, XB> { iterator_a: XA, iterator_b: XB, } impl<XA, XB> PartialCmp<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB) -> Self { Self { iterator_a, iterator_b, } } } impl<'a, XA, XB, I> Driver<'a, Option<Ordering>, Option<Option<Ordering>>> for PartialCmp<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialOrd + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, Option<Ordering>, Option<Option<Ordering>>>, { let Self { iterator_a, iterator_b, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); let ord_len = len_a.cmp(&len_b); let executor = executor.into_inner(); let inner = iterator_a .zip(iterator_b) .map(|(a, b)| PartialOrd::partial_cmp(&a, &b)) .find_first(|ord| ord != &Some(Ordering::Equal)) .exec_with(executor); E::map(inner, move |inner| inner.unwrap_or(Some(ord_len))) } } /* Equal */ pub struct Equal<XA, XB> { iterator_a: XA, iterator_b: XB, expected: bool, } impl<XA, XB> Equal<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB, expected: bool) -> Self { Self { iterator_a, iterator_b, expected, } } } impl<'a, XA, XB, I> Driver<'a, bool, Option<bool>> for Equal<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialEq + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, bool, Option<bool>>, { let Self { iterator_a, iterator_b, expected, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); if (len_a == len_b) ^ expected { return executor.ready(false); } iterator_a .zip(iterator_b) .all(move |(x, y)| PartialEq::eq(&x, &y) == expected) .exec_with(executor) } } /* Compare */ pub struct Compare<XA, XB> { iterator_a: XA, iterator_b: XB, ord: Ordering, ord_opt: Option<Ordering>, } impl<XA, XB> Compare<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB, ord: Ordering, ord_opt: Option<Ordering>) -> Self { Self { iterator_a, iterator_b, ord, ord_opt, } } } impl<'a, XA, XB, I> Driver<'a, bool, Option<Ordering>, Option<Option<Ordering>>> for Compare<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialOrd + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, bool, Option<Ordering>, Option<Option<Ordering>>>, { let Self { iterator_a, iterator_b, ord, ord_opt, } = self; let executor = executor.into_inner(); let inner = PartialCmp::new(iterator_a, iterator_b).exec_with(executor); E::map(inner, move |inner| inner == Some(ord) || inner == ord_opt) } }
use std::cmp::{Ord, Ordering, PartialEq, PartialOrd}; use crate::{Driver, Executor, IndexedParallelIterator, ParallelIterator, WithIndexedProducer}; /* Cmp */ pub struct Cmp<XA, XB> { iterator_a: XA, iterator_b: XB, } impl<XA, XB> Cmp<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB) -> Self { Self { iterator_a, iterator_b, } } } impl<'a, XA, XB, I> Driver<'a, Ordering, Option<Ordering>> for Cmp<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: Ord + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, Ordering, Option<Ordering>>, { let Self { iterator_a, iterator_b, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); let ord_len = len_a.cmp(&len_b); let executor = executor.into_inner(); let inner = iterator_a .zip(iterator_b) .map(|(a, b)| Ord::cmp(&a, &b)) .find_first(|ord| ord != &Ordering::Equal) .exec_with(executor); E::map(inner, move |inner| inner.unwrap_or(ord_len)) } } /* PartialCmp */ pub struct PartialCmp<XA, XB> { iterator_a: XA, iterator_b: XB, } impl<XA, XB> PartialCmp<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB) -> Self { Self { iterator_a, iterator_b, } } } impl<'a, XA, XB, I> Driver<'a, Option<Ordering>, Option<Option<Ordering>>> for PartialCmp<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialOrd + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, Option<Ordering>, Option<Option<Ordering>>>, { let Self { iterator_a, iterator_b, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); let ord_len = len_a.cmp(&len_b); let executor = executor.into_inner(); let inner = iterator_a .zip(iterator_b) .map(|(a, b)| PartialOrd::partial_cmp(&a, &b)) .find_first(|ord| ord != &Some(Ordering::Equal)) .exec_with(executor); E::map(inner, move |inner| inner.unwrap_or(Some(ord_len))) } } /* Equal */ pub struct Equal<XA, XB> { iterator_a: XA, iterator_b: XB, expected: bool, } impl<XA, XB> Equal<XA, XB> { pub fn new(iterator_a: XA, iterator_b: XB, expected: bool) -> Self { Self { iterator_a, iterator_b, expected, } } } impl<'a, XA, XB, I> Driver<'a, bool, Option<bool>> for Equal<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialEq + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, bool, Option<bool>>, { let Self { iterator_a, iterator_b, expected, } = self; let len_a = iterator_a.len_hint(); let len_b = iterator_b.len_hint(); if (len_a == len_b) ^ expected { return executor.ready(false); } iterator_a .zip(iter
pub fn new(iterator_a: XA, iterator_b: XB, ord: Ordering, ord_opt: Option<Ordering>) -> Self { Self { iterator_a, iterator_b, ord, ord_opt, } } } impl<'a, XA, XB, I> Driver<'a, bool, Option<Ordering>, Option<Option<Ordering>>> for Compare<XA, XB> where XA: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, XB: IndexedParallelIterator<'a, Item = I> + WithIndexedProducer<'a, Item = I>, I: PartialOrd + Send + 'a, { fn exec_with<E>(self, executor: E) -> E::Result where E: Executor<'a, bool, Option<Ordering>, Option<Option<Ordering>>>, { let Self { iterator_a, iterator_b, ord, ord_opt, } = self; let executor = executor.into_inner(); let inner = PartialCmp::new(iterator_a, iterator_b).exec_with(executor); E::map(inner, move |inner| inner == Some(ord) || inner == ord_opt) } }
ator_b) .all(move |(x, y)| PartialEq::eq(&x, &y) == expected) .exec_with(executor) } } /* Compare */ pub struct Compare<XA, XB> { iterator_a: XA, iterator_b: XB, ord: Ordering, ord_opt: Option<Ordering>, } impl<XA, XB> Compare<XA, XB> {
random
[]
Rust
src/fake_device.rs
szeged/blurmock
a538c2c5eaf19071d964b09819d9ca8fdebfb1a1
use core::ops::Deref; use fake_adapter::FakeBluetoothAdapter; use fake_service::FakeBluetoothGATTService; use hex; use std::collections::HashMap; use std::error::Error; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct FakeBluetoothDevice { id: Arc<Mutex<String>>, adapter: Arc<FakeBluetoothAdapter>, address: Arc<Mutex<String>>, appearance: Arc<Mutex<Option<u16>>>, class: Arc<Mutex<u32>>, gatt_services: Arc<Mutex<Vec<Arc<FakeBluetoothGATTService>>>>, is_paired: Arc<Mutex<bool>>, is_connectable: Arc<Mutex<bool>>, is_connected: Arc<Mutex<bool>>, is_trusted: Arc<Mutex<bool>>, is_blocked: Arc<Mutex<bool>>, is_legacy_pairing: Arc<Mutex<bool>>, uuids: Arc<Mutex<Vec<String>>>, name: Arc<Mutex<Option<String>>>, icon: Arc<Mutex<String>>, alias: Arc<Mutex<String>>, product_version: Arc<Mutex<u32>>, rssi: Arc<Mutex<Option<i16>>>, tx_power: Arc<Mutex<Option<i16>>>, modalias: Arc<Mutex<String>>, manufacturer_data: Arc<Mutex<Option<HashMap<u16, Vec<u8>>>>>, service_data: Arc<Mutex<Option<HashMap<String, Vec<u8>>>>>, } impl FakeBluetoothDevice { pub fn new(id: String, adapter: Arc<FakeBluetoothAdapter>, address: String, appearance: Option<u16>, class: u32, gatt_services: Vec<Arc<FakeBluetoothGATTService>>, is_paired: bool, is_connectable: bool, is_connected: bool, is_trusted: bool, is_blocked: bool, is_legacy_pairing: bool, uuids: Vec<String>, name: Option<String>, icon: String, alias: String, product_version: u32, rssi: Option<i16>, tx_power: Option<i16>, modalias: String, manufacturer_data: Option<HashMap<u16, Vec<u8>>>, service_data: Option<HashMap<String, Vec<u8>>>) -> Arc<FakeBluetoothDevice> { if let Ok(existing_device) = adapter.get_device(id.clone()) { return existing_device; } let device = Arc::new(FakeBluetoothDevice{ id: Arc::new(Mutex::new(id)), adapter: adapter.clone(), address: Arc::new(Mutex::new(address)), appearance: Arc::new(Mutex::new(appearance)), class: Arc::new(Mutex::new(class)), gatt_services: Arc::new(Mutex::new(gatt_services)), is_paired: Arc::new(Mutex::new(is_paired)), is_connectable: Arc::new(Mutex::new(is_connectable)), is_connected: Arc::new(Mutex::new(is_connected)), is_trusted: Arc::new(Mutex::new(is_trusted)), is_blocked: Arc::new(Mutex::new(is_blocked)), is_legacy_pairing: Arc::new(Mutex::new(is_legacy_pairing)), uuids: Arc::new(Mutex::new(uuids)), name: Arc::new(Mutex::new(name)), icon: Arc::new(Mutex::new(icon)), alias: Arc::new(Mutex::new(alias)), product_version: Arc::new(Mutex::new(product_version)), rssi: Arc::new(Mutex::new(rssi)), tx_power: Arc::new(Mutex::new(tx_power)), modalias: Arc::new(Mutex::new(modalias)), manufacturer_data: Arc::new(Mutex::new(manufacturer_data)), service_data: Arc::new(Mutex::new(service_data)), }); let _ = adapter.add_device(device.clone()); device } pub fn new_empty(adapter: Arc<FakeBluetoothAdapter>, device_id: String) -> Arc<FakeBluetoothDevice> { FakeBluetoothDevice::new( /*id*/ device_id, /*adapter*/ adapter, /*address*/ String::new(), /*appearance*/ None, /*class*/ 0, /*gatt_services*/ vec!(), /*is_paired*/ false, /*is_connectable*/ false, /*is_connected*/ false, /*is_trusted*/ false, /*is_blocked*/ false, /*is_legacy_pairing*/ false, /*uuids*/ vec!(), /*name*/ None, /*icon*/ String::new(), /*alias*/ String::new(), /*product_version*/ 0, /*rssi*/ None, /*tx_power*/ None, /*modalias*/ String::new(), /*manufacturer_data*/ None, /*service_data*/ None, ) } make_getter!(get_id, id); make_setter!(set_id, id); make_getter!(get_address, address, String); make_setter!(set_address, address, String); make_option_getter!(get_name, name, String); make_setter!(set_name, name, Option<String>); make_getter!(get_icon, icon, String); make_setter!(set_icon, icon, String); make_getter!(get_class, class, u32); make_setter!(set_class, class, u32); make_option_getter!(get_appearance, appearance, u16); make_setter!(set_appearance, appearance, Option<u16>); make_getter!(get_uuids, uuids, Vec<String>); make_setter!(set_uuids, uuids, Vec<String>); make_getter!(is_paired); make_setter!(set_paired, is_paired, bool); make_getter!(is_connectable); make_setter!(set_connectable, is_connectable, bool); make_getter!(is_connected); make_setter!(set_connected, is_connected, bool); make_getter!(is_trusted); make_setter!(set_trusted, is_trusted, bool); make_getter!(is_blocked); make_setter!(set_blocked, is_blocked, bool); make_getter!(get_alias, alias, String); make_setter!(set_alias, alias, String); make_getter!(is_legacy_pairing); make_setter!(set_legacy_pairing, is_legacy_pairing, bool); make_setter!(set_modalias, modalias, String); make_option_getter!(get_rssi, rssi, i16); make_setter!(set_rssi, rssi, Option<i16>); make_option_getter!(get_tx_power, tx_power, i16); make_setter!(set_tx_power, tx_power, Option<i16>); make_option_getter!(get_manufacturer_data, manufacturer_data, HashMap<u16, Vec<u8>>); make_setter!(set_manufacturer_data, manufacturer_data, Option<HashMap<u16, Vec<u8>>>); make_option_getter!(get_service_data, service_data, HashMap<String, Vec<u8>>); make_setter!(set_service_data, service_data, Option<HashMap<String, Vec<u8>>>); pub fn get_adapter(&self) -> Result<Arc<FakeBluetoothAdapter>, Box<Error>> { Ok(self.adapter.clone()) } pub fn pair(&self) -> Result<(), Box<Error>> { self.set_paired(true) } pub fn cancel_pairing(&self) -> Result<(), Box<Error>> { self.set_paired(false) } pub fn get_modalias(&self) -> Result<(String, u32, u32, u32), Box<Error>> { let cloned = self.modalias.clone(); let modalias = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; let ids: Vec<&str> = modalias.split(":").collect(); let source = String::from(ids[0]); let vendor = hex::decode(&ids[1][1..5]).unwrap(); let product = hex::decode(&ids[1][6..10]).unwrap(); let device = hex::decode(&ids[1][11..15]).unwrap(); Ok((source, (vendor[0] as u32) * 16 * 16 + (vendor[1] as u32), (product[0] as u32) * 16 * 16 + (product[1] as u32), (device[0] as u32) * 16 * 16 + (device[1] as u32))) } pub fn get_vendor_id_source(&self) -> Result<String, Box<Error>> { let (vendor_id_source,_,_,_) = try!(self.get_modalias()); Ok(vendor_id_source) } pub fn get_vendor_id(&self) -> Result<u32, Box<Error>> { let (_,vendor_id,_,_) = try!(self.get_modalias()); Ok(vendor_id) } pub fn get_product_id(&self) -> Result<u32, Box<Error>> { let (_,_,product_id,_) = try!(self.get_modalias()); Ok(product_id) } pub fn get_device_id(&self) -> Result<u32, Box<Error>> { let (_,_,_,device_id) = try!(self.get_modalias()); Ok(device_id) } pub fn get_gatt_services(&self) -> Result<Vec<String>, Box<Error>> { if !(try!(self.is_connected())) { return Err(Box::from("Device not connected.")); } let cloned = self.gatt_services.clone(); let gatt_services = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.into_iter().map(|s| s.get_id()).collect()) } pub fn get_gatt_service_structs(&self) -> Result<Vec<Arc<FakeBluetoothGATTService>>, Box<Error>> { if !(try!(self.is_connected())) { return Err(Box::from("Device not connected.")); } let cloned = self.gatt_services.clone(); let gatt_services = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services) } pub fn get_gatt_service(&self, id: String) -> Result<Arc<FakeBluetoothGATTService>, Box<Error>> { let services = try!(self.get_gatt_service_structs()); for service in services { let service_id = service.get_id(); if service_id == id { return Ok(service); } } Err(Box::from("No service exists with the given id.")) } pub fn add_service(&self, service: Arc<FakeBluetoothGATTService>) -> Result<(), Box<Error>> { let cloned = self.gatt_services.clone(); let mut gatt_services = match cloned.lock() { Ok(guard) => guard, Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.push(service)) } pub fn remove_service(&self, id: String) -> Result<(), Box<Error>> { let cloned = self.gatt_services.clone(); let mut gatt_services = match cloned.lock() { Ok(guard) => guard, Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.retain(|s| s.get_id() != id)) } pub fn connect_profile(&self, _uuid: String) -> Result<(), Box<Error>> { unimplemented!(); } pub fn disconnect_profile(&self, _uuid: String) -> Result<(), Box<Error>> { unimplemented!(); } pub fn connect(&self) -> Result<(), Box<Error>> { let is_connectable = try!(self.is_connectable()); let is_connected = try!(self.is_connected()); if is_connected { return Ok(()); } if is_connectable { return self.set_connected(true); } return Err(Box::from("Could not connect to the device.")); } pub fn disconnect(&self) -> Result<(), Box<Error>>{ let is_connected = try!(self.is_connected()); if is_connected { return self.set_connected(false); } return Err(Box::from("The device is not connected.")); } }
use core::ops::Deref; use fake_adapter::FakeBluetoothAdapter; use fake_service::FakeBluetoothGATTService; use hex; use std::collections::HashMap; use std::error::Error; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct FakeBluetoothDevice { id: Arc<Mutex<String>>, adapter: Arc<FakeBluetoothAdapter>, address: Arc<Mutex<String>>, appearance: Arc<Mutex<Option<u16>>>, class: Arc<Mutex<u32>>, gatt_services: Arc<Mutex<Vec<Arc<FakeBluetoothGATTService>>>>, is_paired: Arc<Mutex<bool>>, is_connectable: Arc<Mutex<bool>>, is_connected: Arc<Mutex<bool>>, is_trusted: Arc<Mutex<bool>>, is_blocked: Arc<Mutex<bool>>, is_legacy_pairing: Arc<Mutex<bool>>, uuids: Arc<Mutex<Vec<String>>>, name: Arc<Mutex<Option<String>>>, icon: Arc<Mutex<String>>, alias: Arc<Mutex<String>>, product_version: Arc<Mutex<u32>>, rssi: Arc<Mutex<Option<i16>>>, tx_power: Arc<Mutex<Option<i16>>>, modalias: Arc<Mutex<String>>, manufacturer_data: Arc<Mutex<Option<HashMap<u16, Vec<u8>>>>>, service_data: Arc<Mutex<Option<HashMap<String, Vec<u8>>>>>, } impl FakeBluetoothDevice { pub fn new(id: String, adapter: Arc<FakeBluetoothAdapter>, address: String, appearance: Option<u16>, class: u32, gatt_services: Vec<Arc<FakeBluetoothGATTService>>, is_paired: bool, is_connectable: bool, is_connected: bool, is_trusted: bool, is_blocked: bool, is_legacy_pairing: bool, uuids: Vec<String>, name: Option<String>, icon: String, alias: String, product_version: u32, rssi: Option<i16>, tx_power: Option<i16>, modalias: String, manufacturer_data: Option<HashMap<u16, Vec<u8>>>, service_data: Option<HashMap<String, Vec<u8>>>) -> Arc<FakeBluetoothDevice> { if let Ok(existing_device) = adapter.get_device(id.clone()) { return existing_device; } let device = Arc::new(FakeBluetoothDevice{ id: Arc::new(Mutex::new(id)), adapter: adapter.clone(), address: Arc::new(Mutex::new(address)), appearance: Arc::new(Mutex::new(appearance)), class: Arc::new(Mutex::new(class)), gatt_services: Arc::new(Mutex::new(gatt_services)), is_paired: Arc::new(Mutex::new(is_paired)), is_connectable: Arc::new(Mutex::new(is_connectable)), is_connected: Arc::new(Mutex::new(is_connected)), is_trusted: Arc::new(Mutex::new(is_trusted)), is_blocked: Arc::new(Mutex::new(is_blocked)), is_legacy_pairing: Arc::new(Mutex::new(is_legacy_pairing)), uuids: Arc::new(Mutex::new(uuids)), name: Arc::new(Mutex::new(name)), icon: Arc::new(Mutex::new(icon)), alias: Arc::new(Mutex::new(alias)), product_version: Arc::new(Mutex::new(product_version)), rssi: Arc::new(Mutex::new(rssi)), tx_power: Arc::new(Mutex::new(tx_power)), modalias: Arc::new(Mutex::new(modalias)), manufacturer_data: Arc::new(Mutex::new(manufacturer_data)), service_data: Arc::new(Mutex::new(service_data)), }); let _ = adapter.add_device(device.clone()); device } pub fn new_empty(adapter: Arc<FakeBluetoothAdapter>, device_id: String) -> Arc<FakeBluetoothDevice> { FakeBluetoothDevice::new( /*id*/ device_id, /*adapter*/ adapter, /*address*/ String::new(), /*appearance*/ None, /*class*/ 0, /*gatt_services*/ vec!(), /*is_paired*/ false, /*is_connectable*/ false, /*is_connected*/ false, /*is_trusted*/ false, /*is_blocked*/ false, /*is_legacy_pairing*/ false, /*uuids*/ vec!(), /*name*/ None, /*icon*/ String::new(), /*alias*/ String::new(), /*product_version*/ 0, /*rssi*/ None, /*tx_power*/ None, /*modalias*/ String::new(), /*manufacturer_data*/ None, /*service_data*/ None, ) } make_getter!(get_id, id); make_setter!(set_id, id); make_getter!(get_address, address, String); make_setter!(set_address, address, String); make_option_getter!(get_name, name, String); make_setter!(set_name, name, Option<String>); make_getter!(get_icon, icon, String); make_setter!(set_icon, icon, String); make_getter!(get_class, class, u32); make_setter!(set_class, class, u32); make_option_getter!(get_appearance, appearance, u16); make_setter!(set_appearance, appearance, Option<u16>); make_getter!(get_uuids, uuids, Vec<String>); make_setter!(set_uuids, uuids, Vec<String>); make_getter!(is_paired); make_setter!(set_paired, is_paired, bool); make_getter!(is_connectable); make_setter!(set_connectable, is_connectable, bool); make_getter!(is_connected); make_setter!(set_connected, is_connected, bool); make_getter!(is_trusted); make_setter!(set_trusted, is_trusted, bool); make_getter!(is_blocked); make_setter!(set_blocked, is_blocked, bool); make_getter!(get_alias, alias, String); make_setter!(set_alias, alias, String); make_getter!(is_legacy_pairing); make_setter!(set_legacy_pairing, is_legacy_pairing, bool); make_setter!(set_modalias, modalias, String); make_option_getter!(get_rssi, rssi, i16); make_setter!(set_rssi, rssi, Option<i16>); make_option_getter!(get_tx_power, tx_power, i16); make_setter!(set_tx_power, tx_power, Option<i16>); make_option_getter!(get_manufacturer_data, manufacturer_data, HashMap<u16, Vec<u8>>); make_setter!(set_manufacturer_data, manufacturer_data, Option<HashMap<u16, Vec<u8>>>); make_option_getter!(get_service_data, service_data, HashMap<String, Vec<u8>>); make_setter!(set_service_data, service_data, Option<HashMap<String, Vec<u8>>>); pub fn get_adapter(&self) -> Result<Arc<FakeBluetoothAdapter>, Box<Error>> { Ok(self.adapter.clone()) } pub fn pair(&self) -> Result<(), Box<Error>> { self.set_paired(true) } pub fn cancel_pairing(&self) -> Result<(), Box<Error>> { self.set_paired(false) } pub fn get_modalias(&self) -> Result<(String, u32, u32, u32), Box<Error>> { let cloned = self.modalias.clone(); let modalias = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; let ids: Vec<&str> = modalias.split(":").collect(); let source = String::from(ids[0]); let vendor = hex::decode(&ids[1][1..5]).unwrap(); let product = hex::decode(&ids[1][6..10]).unwrap(); let device = hex::decode(&ids[1][11..15]).unwrap();
} pub fn get_vendor_id_source(&self) -> Result<String, Box<Error>> { let (vendor_id_source,_,_,_) = try!(self.get_modalias()); Ok(vendor_id_source) } pub fn get_vendor_id(&self) -> Result<u32, Box<Error>> { let (_,vendor_id,_,_) = try!(self.get_modalias()); Ok(vendor_id) } pub fn get_product_id(&self) -> Result<u32, Box<Error>> { let (_,_,product_id,_) = try!(self.get_modalias()); Ok(product_id) } pub fn get_device_id(&self) -> Result<u32, Box<Error>> { let (_,_,_,device_id) = try!(self.get_modalias()); Ok(device_id) } pub fn get_gatt_services(&self) -> Result<Vec<String>, Box<Error>> { if !(try!(self.is_connected())) { return Err(Box::from("Device not connected.")); } let cloned = self.gatt_services.clone(); let gatt_services = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.into_iter().map(|s| s.get_id()).collect()) } pub fn get_gatt_service_structs(&self) -> Result<Vec<Arc<FakeBluetoothGATTService>>, Box<Error>> { if !(try!(self.is_connected())) { return Err(Box::from("Device not connected.")); } let cloned = self.gatt_services.clone(); let gatt_services = match cloned.lock() { Ok(guard) => guard.deref().clone(), Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services) } pub fn get_gatt_service(&self, id: String) -> Result<Arc<FakeBluetoothGATTService>, Box<Error>> { let services = try!(self.get_gatt_service_structs()); for service in services { let service_id = service.get_id(); if service_id == id { return Ok(service); } } Err(Box::from("No service exists with the given id.")) } pub fn add_service(&self, service: Arc<FakeBluetoothGATTService>) -> Result<(), Box<Error>> { let cloned = self.gatt_services.clone(); let mut gatt_services = match cloned.lock() { Ok(guard) => guard, Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.push(service)) } pub fn remove_service(&self, id: String) -> Result<(), Box<Error>> { let cloned = self.gatt_services.clone(); let mut gatt_services = match cloned.lock() { Ok(guard) => guard, Err(_) => return Err(Box::from("Could not get the value.")), }; Ok(gatt_services.retain(|s| s.get_id() != id)) } pub fn connect_profile(&self, _uuid: String) -> Result<(), Box<Error>> { unimplemented!(); } pub fn disconnect_profile(&self, _uuid: String) -> Result<(), Box<Error>> { unimplemented!(); } pub fn connect(&self) -> Result<(), Box<Error>> { let is_connectable = try!(self.is_connectable()); let is_connected = try!(self.is_connected()); if is_connected { return Ok(()); } if is_connectable { return self.set_connected(true); } return Err(Box::from("Could not connect to the device.")); } pub fn disconnect(&self) -> Result<(), Box<Error>>{ let is_connected = try!(self.is_connected()); if is_connected { return self.set_connected(false); } return Err(Box::from("The device is not connected.")); } }
Ok((source, (vendor[0] as u32) * 16 * 16 + (vendor[1] as u32), (product[0] as u32) * 16 * 16 + (product[1] as u32), (device[0] as u32) * 16 * 16 + (device[1] as u32)))
call_expression
[ { "content": "\n\n pub fn get_modalias(&self) -> Result<(String, u32, u32, u32), Box<Error>> {\n\n let cloned = self.modalias.clone();\n\n let modalias = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n let ids: Vec<&str> = modalias.split(\":\").collect();\n\n\n\n let source = String::from(ids[0]);\n\n let vendor = hex::decode(&ids[1][1..5]).unwrap();\n\n let product = hex::decode(&ids[1][6..10]).unwrap();\n\n let device = hex::decode(&ids[1][11..15]).unwrap();\n\n\n\n Ok((source,\n\n (vendor[0] as u32) * 16 * 16 + (vendor[1] as u32),\n\n (product[0] as u32) * 16 * 16 + (product[1] as u32),\n\n (device[0] as u32) * 16 * 16 + (device[1] as u32)))\n\n }\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 0, "score": 19225.500187286227 }, { "content": " pub fn get_vendor_id_source(&self) -> Result<String, Box<Error>> {\n\n let (vendor_id_source,_,_,_) = try!(self.get_modalias());\n\n Ok(vendor_id_source)\n\n }\n\n\n\n pub fn get_vendor_id(&self) -> Result<u32, Box<Error>> {\n\n let (_,vendor_id,_,_) = try!(self.get_modalias());\n\n Ok(vendor_id)\n\n }\n\n\n\n pub fn get_product_id(&self) -> Result<u32, Box<Error>> {\n\n let (_,_,product_id,_) = try!(self.get_modalias());\n\n Ok(product_id)\n\n }\n\n\n\n pub fn get_device_id(&self) -> Result<u32, Box<Error>> {\n\n let (_,_,_,device_id) = try!(self.get_modalias());\n\n Ok(device_id)\n\n }\n\n}\n", "file_path": "src/fake_adapter.rs", "rank": 1, "score": 19220.052764491877 }, { "content": " is_discoverable: Arc<Mutex<bool>>,\n\n is_pairable: Arc<Mutex<bool>>,\n\n pairable_timeout: Arc<Mutex<u32>>,\n\n discoverable_timeout: Arc<Mutex<u32>>,\n\n is_discovering: Arc<Mutex<bool>>,\n\n uuids: Arc<Mutex<Vec<String>>>,\n\n modalias: Arc<Mutex<String>>,\n\n}\n\n\n\nimpl FakeBluetoothAdapter {\n\n pub fn new(id: String,\n\n is_present: bool,\n\n is_powered: bool,\n\n can_start_discovery: bool,\n\n can_stop_discovery: bool,\n\n devices: Vec<Arc<FakeBluetoothDevice>>,\n\n ad_datas: Vec<String>,\n\n address: String,\n\n name: String,\n\n alias: String,\n", "file_path": "src/fake_adapter.rs", "rank": 2, "score": 19219.904339161632 }, { "content": "use core::ops::Deref;\n\nuse fake_device::FakeBluetoothDevice;\n\nuse fake_discovery_session::FakeBluetoothDiscoverySession;\n\nuse hex;\n\nuse std::error::Error;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FakeBluetoothAdapter {\n\n id: Arc<Mutex<String>>,\n\n is_present: Arc<Mutex<bool>>,\n\n is_powered: Arc<Mutex<bool>>,\n\n can_start_discovery: Arc<Mutex<bool>>,\n\n can_stop_discovery: Arc<Mutex<bool>>,\n\n devices: Arc<Mutex<Vec<Arc<FakeBluetoothDevice>>>>,\n\n ad_datas: Arc<Mutex<Vec<String>>>,\n\n address: Arc<Mutex<String>>,\n\n name: Arc<Mutex<String>>,\n\n alias: Arc<Mutex<String>>,\n\n class: Arc<Mutex<u32>>,\n", "file_path": "src/fake_adapter.rs", "rank": 3, "score": 19219.410685988645 }, { "content": " class: u32,\n\n is_discoverable: bool,\n\n is_pairable: bool,\n\n pairable_timeout: u32,\n\n discoverable_timeout: u32,\n\n is_discovering: bool,\n\n uuids: Vec<String>,\n\n modalias: String)\n\n -> Arc<FakeBluetoothAdapter> {\n\n Arc::new(FakeBluetoothAdapter {\n\n id: Arc::new(Mutex::new(id)),\n\n is_present: Arc::new(Mutex::new(is_present)),\n\n is_powered: Arc::new(Mutex::new(is_powered)),\n\n can_start_discovery: Arc::new(Mutex::new(can_start_discovery)),\n\n can_stop_discovery: Arc::new(Mutex::new(can_stop_discovery)),\n\n devices: Arc::new(Mutex::new(devices)),\n\n ad_datas: Arc::new(Mutex::new(ad_datas)),\n\n address: Arc::new(Mutex::new(address)),\n\n name: Arc::new(Mutex::new(name)),\n\n alias: Arc::new(Mutex::new(alias)),\n", "file_path": "src/fake_adapter.rs", "rank": 4, "score": 19215.690603633462 }, { "content": " make_getter!(get_address, address, String);\n\n\n\n make_setter!(set_address, address, String);\n\n\n\n make_getter!(get_name, name, String);\n\n\n\n make_setter!(set_name, name, String);\n\n\n\n make_getter!(get_alias, alias, String);\n\n\n\n make_setter!(set_alias, alias, String);\n\n\n\n make_getter!(get_class, class, u32);\n\n\n\n make_setter!(set_class, class, u32);\n\n\n\n make_getter!(is_discoverable);\n\n\n\n make_setter!(set_discoverable, is_discoverable, bool);\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 5, "score": 19213.32340999361 }, { "content": " make_setter!(set_modalias, modalias, String);\n\n\n\n pub fn get_device(&self, id: String) -> Result<Arc<FakeBluetoothDevice>, Box<Error>> {\n\n let devices = try!(self.get_devices());\n\n for device in devices {\n\n let device_id = device.get_id();\n\n if device_id == id {\n\n return Ok(device);\n\n }\n\n }\n\n Err(Box::from(\"No device exists with the given id.\"))\n\n }\n\n\n\n pub fn get_device_list(&self) -> Result<Vec<String>, Box<Error>> {\n\n let devices = try!(self.get_devices());\n\n let mut ids = vec![];\n\n for device in &devices {\n\n let id = device.get_id();\n\n ids.push(id);\n\n }\n", "file_path": "src/fake_adapter.rs", "rank": 6, "score": 19210.950411676968 }, { "content": " /*address*/ String::new(),\n\n /*name*/ String::new(),\n\n /*alias*/ String::new(),\n\n /*class*/ 0,\n\n /*is_discoverable*/ false,\n\n /*is_pairable*/ false,\n\n /*pairable_timeout*/ 0,\n\n /*discoverable_timeout*/ 0,\n\n /*is_discovering*/ false,\n\n /*uuids*/ vec![],\n\n /*modalias*/ String::new(),\n\n )\n\n }\n\n\n\n make_getter!(get_id, id);\n\n\n\n make_setter!(set_id, id);\n\n\n\n make_getter!(is_present);\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 7, "score": 19210.62244363355 }, { "content": " pub fn remove_device(&self, id: String) -> Result<(), Box<Error>> {\n\n let cloned = self.devices.clone();\n\n let mut devices = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(devices.retain(|d| d.get_id() != id))\n\n }\n\n\n\n pub fn get_first_ad_data(&self) -> Result<String, Box<Error>> {\n\n let ad_datas = try!(self.get_ad_datas());\n\n if ad_datas.is_empty() {\n\n return Err(Box::from(\"No ad_data found.\"))\n\n }\n\n Ok(ad_datas[0].clone())\n\n }\n\n\n\n pub fn create_discovery_session(&self) -> Result<FakeBluetoothDiscoverySession, Box<Error>> {\n\n FakeBluetoothDiscoverySession::create_session(Arc::new(self.clone()))\n\n }\n", "file_path": "src/fake_adapter.rs", "rank": 8, "score": 19210.004633542707 }, { "content": " Ok(ids)\n\n }\n\n\n\n pub fn get_first_device(&self) -> Result<Arc<FakeBluetoothDevice>, Box<Error>> {\n\n let devices = try!(self.get_devices());\n\n if devices.is_empty() {\n\n return Err(Box::from(\"No device found.\"))\n\n }\n\n Ok(devices[0].clone())\n\n }\n\n\n\n pub fn add_device(&self, device: Arc<FakeBluetoothDevice>) -> Result<(), Box<Error>> {\n\n let cloned = self.devices.clone();\n\n let mut devices = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(devices.push(device))\n\n }\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 9, "score": 19209.193033617357 }, { "content": " class: Arc::new(Mutex::new(class)),\n\n is_discoverable: Arc::new(Mutex::new(is_discoverable)),\n\n is_pairable: Arc::new(Mutex::new(is_pairable)),\n\n pairable_timeout: Arc::new(Mutex::new(pairable_timeout)),\n\n discoverable_timeout: Arc::new(Mutex::new(discoverable_timeout)),\n\n is_discovering: Arc::new(Mutex::new(is_discovering)),\n\n uuids: Arc::new(Mutex::new(uuids)),\n\n modalias: Arc::new(Mutex::new(modalias)),\n\n })\n\n }\n\n\n\n pub fn new_empty() -> Arc<FakeBluetoothAdapter> {\n\n FakeBluetoothAdapter::new(\n\n /*id*/ String::new(),\n\n /*is_present*/ true,\n\n /*is_powered*/ false,\n\n /*can_start_discovery*/ true,\n\n /*can_stop_discovery*/ true,\n\n /*devices*/ vec![],\n\n /*ad_datas*/ vec![],\n", "file_path": "src/fake_adapter.rs", "rank": 10, "score": 19208.221755982297 }, { "content": " make_getter!(is_pairable);\n\n\n\n make_setter!(set_pairable, is_pairable, bool);\n\n\n\n make_getter!(get_pairable_timeout, pairable_timeout, u32);\n\n\n\n make_setter!(set_pairable_timeout, pairable_timeout, u32);\n\n\n\n make_getter!(get_discoverable_timeout, discoverable_timeout, u32);\n\n\n\n make_setter!(set_discoverable_timeout, discoverable_timeout, u32);\n\n\n\n make_getter!(is_discovering);\n\n\n\n make_setter!(set_discovering, is_discovering, bool);\n\n\n\n make_getter!(get_uuids, uuids, Vec<String>);\n\n\n\n make_setter!(set_uuids, uuids, Vec<String>);\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 11, "score": 19206.742793537105 }, { "content": " make_setter!(set_present, is_present, bool);\n\n\n\n make_getter!(is_powered);\n\n\n\n make_setter!(set_powered, is_powered, bool);\n\n\n\n make_getter!(get_can_start_discovery, can_start_discovery, bool);\n\n\n\n make_setter!(set_can_start_discovery, can_start_discovery, bool);\n\n\n\n make_getter!(get_can_stop_discovery, can_stop_discovery, bool);\n\n\n\n make_setter!(set_can_stop_discovery, can_stop_discovery, bool);\n\n\n\n make_getter!(get_devices, devices, Vec<Arc<FakeBluetoothDevice>>);\n\n\n\n make_getter!(get_ad_datas, ad_datas, Vec<String>);\n\n\n\n make_setter!(set_ad_datas, ad_datas, Vec<String>);\n\n\n", "file_path": "src/fake_adapter.rs", "rank": 12, "score": 19204.21994394201 }, { "content": " make_setter!(set_is_primary, is_primary, bool);\n\n\n\n make_setter!(set_includes, included_services, Vec<Arc<FakeBluetoothGATTService>>);\n\n\n\n make_getter!(get_uuid, uuid, String);\n\n\n\n make_setter!(set_uuid, uuid, String);\n\n\n\n pub fn get_device(&self) -> Result<Arc<FakeBluetoothDevice>, Box<Error>> {\n\n Ok(self.device.clone())\n\n }\n\n\n\n pub fn get_gatt_characteristics(&self) -> Result<Vec<String>, Box<Error>> {\n\n let cloned = self.gatt_characteristics.clone();\n\n let gatt_characteristics = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_characteristics.into_iter().map(|s| s.get_id()).collect())\n\n }\n", "file_path": "src/fake_service.rs", "rank": 29, "score": 19.9363098248096 }, { "content": "use core::ops::Deref;\n\nuse fake_descriptor::FakeBluetoothGATTDescriptor;\n\nuse fake_service::FakeBluetoothGATTService;\n\nuse std::error::Error;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FakeBluetoothGATTCharacteristic {\n\n id: Arc<Mutex<String>>,\n\n uuid: Arc<Mutex<String>>,\n\n service: Arc<FakeBluetoothGATTService>,\n\n value: Arc<Mutex<Option<Vec<u8>>>>,\n\n is_notifying: Arc<Mutex<bool>>,\n\n flags: Arc<Mutex<Vec<String>>>,\n\n gatt_descriptors: Arc<Mutex<Vec<Arc<FakeBluetoothGATTDescriptor>>>>,\n\n}\n\n\n\nimpl FakeBluetoothGATTCharacteristic {\n\n pub fn new(id: String,\n\n uuid: String,\n", "file_path": "src/fake_characteristic.rs", "rank": 30, "score": 19.638682726666843 }, { "content": "use core::ops::Deref;\n\nuse fake_characteristic::FakeBluetoothGATTCharacteristic;\n\nuse std::error::Error;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FakeBluetoothGATTDescriptor {\n\n id: Arc<Mutex<String>>,\n\n uuid: Arc<Mutex<String>>,\n\n characteristic: Arc<FakeBluetoothGATTCharacteristic>,\n\n value: Arc<Mutex<Option<Vec<u8>>>>,\n\n flags: Arc<Mutex<Vec<String>>>,\n\n}\n\n\n\nimpl FakeBluetoothGATTDescriptor {\n\n pub fn new(id: String,\n\n uuid: String,\n\n characteristic: Arc<FakeBluetoothGATTCharacteristic>,\n\n value: Option<Vec<u8>>,\n\n flags: Vec<String>)\n", "file_path": "src/fake_descriptor.rs", "rank": 31, "score": 19.57078467964263 }, { "content": "use core::ops::Deref;\n\nuse fake_characteristic::FakeBluetoothGATTCharacteristic;\n\nuse fake_device::FakeBluetoothDevice;\n\nuse std::error::Error;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FakeBluetoothGATTService {\n\n id: Arc<Mutex<String>>,\n\n device: Arc<FakeBluetoothDevice>,\n\n gatt_characteristics: Arc<Mutex<Vec<Arc<FakeBluetoothGATTCharacteristic>>>>,\n\n is_primary: Arc<Mutex<bool>>,\n\n included_services: Arc<Mutex<Vec<Arc<FakeBluetoothGATTService>>>>,\n\n uuid: Arc<Mutex<String>>,\n\n}\n\n\n\nimpl FakeBluetoothGATTService {\n\n pub fn new(id: String,\n\n device: Arc<FakeBluetoothDevice>,\n\n gatt_characteristics: Vec<Arc<FakeBluetoothGATTCharacteristic>>,\n", "file_path": "src/fake_service.rs", "rank": 32, "score": 19.48086866803224 }, { "content": " make_getter!(get_uuid, uuid, String);\n\n\n\n make_setter!(set_uuid, uuid, String);\n\n\n\n make_option_getter!(get_value, value, Vec<u8>);\n\n\n\n make_setter!(set_value, value, Option<Vec<u8>>);\n\n\n\n make_getter!(is_notifying);\n\n\n\n make_setter!(set_notifying, is_notifying, bool);\n\n\n\n make_getter!(get_flags, flags, Vec<String>);\n\n\n\n make_setter!(set_flags, flags, Vec<String>);\n\n\n\n make_getter!(get_gatt_descriptor_structs, gatt_descriptors, Vec<Arc<FakeBluetoothGATTDescriptor>>);\n\n\n\n pub fn get_service(&self) -> Result<Arc<FakeBluetoothGATTService>, Box<Error>> {\n\n Ok(self.service.clone())\n", "file_path": "src/fake_characteristic.rs", "rank": 33, "score": 16.957418872501037 }, { "content": " pub fn $function_name(&self) -> Result<$ret_type, Box<Error>> {\n\n let cloned = self.$attr.clone();\n\n let attr_value = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n match attr_value {\n\n Some(value) => return Ok(value),\n\n None => return Err(Box::from(\"Could not get the value.\")),\n\n }\n\n }\n\n };\n\n);\n\n\n\nmacro_rules! make_setter(\n\n ($function_name: ident, $attr: ident, $attr_type:ty ) => {\n\n pub fn $function_name(&self, value: $attr_type) -> Result<(), Box<Error>> {\n\n let cloned = self.$attr.clone();\n\n let mut value_to_change = match cloned.lock() {\n\n Ok(guard) => guard,\n", "file_path": "src/lib.rs", "rank": 34, "score": 16.302329886630147 }, { "content": " let cloned = self.gatt_descriptors.clone();\n\n let mut gatt_descriptors = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_descriptors.retain(|d| d.get_id() != id))\n\n }\n\n\n\n pub fn read_value(&self) -> Result<Vec<u8>, Box<Error>> {\n\n self.get_value()\n\n }\n\n\n\n pub fn write_value(&self, value: Vec<u8>) -> Result<(), Box<Error>> {\n\n self.set_value(Some(value))\n\n }\n\n}\n", "file_path": "src/fake_characteristic.rs", "rank": 35, "score": 16.214034463096734 }, { "content": " Err(_) => String::new(),\n\n };\n\n attr_value\n\n }\n\n };\n\n\n\n ($attr_name: ident) => {\n\n pub fn $attr_name(&self) -> Result<bool, Box<Error>> {\n\n let cloned = self.$attr_name.clone();\n\n let attr_value = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(attr_value)\n\n }\n\n };\n\n);\n\n\n\nmacro_rules! make_option_getter(\n\n ($function_name: ident, $attr: ident, $ret_type:ty) => {\n", "file_path": "src/lib.rs", "rank": 36, "score": 15.763399986926165 }, { "content": " is_primary: bool,\n\n included_services: Vec<Arc<FakeBluetoothGATTService>>,\n\n uuid: String)\n\n -> Arc<FakeBluetoothGATTService> {\n\n if let Ok(existing_service) = device.get_gatt_service(id.clone()) {\n\n return existing_service;\n\n }\n\n let service = Arc::new(FakeBluetoothGATTService {\n\n id: Arc::new(Mutex::new(id)),\n\n device: device.clone(),\n\n gatt_characteristics: Arc::new(Mutex::new(gatt_characteristics)),\n\n is_primary: Arc::new(Mutex::new(is_primary)),\n\n included_services: Arc::new(Mutex::new(included_services)),\n\n uuid: Arc::new(Mutex::new(uuid)),\n\n });\n\n let _ = device.add_service(service.clone());\n\n service\n\n }\n\n\n\n pub fn new_empty(device: Arc<FakeBluetoothDevice>,\n", "file_path": "src/fake_service.rs", "rank": 37, "score": 15.68135362647645 }, { "content": " /*uuid*/ String::new(),\n\n /*characteristic*/ characteristic,\n\n /*value*/ None,\n\n /*flags*/ vec!(),\n\n )\n\n }\n\n\n\n make_getter!(get_id, id);\n\n\n\n make_setter!(set_id, id);\n\n\n\n make_getter!(get_uuid, uuid, String);\n\n\n\n make_setter!(set_uuid, uuid, String);\n\n\n\n make_option_getter!(get_value, value, Vec<u8>);\n\n\n\n make_setter!(set_value, value, Option<Vec<u8>>);\n\n\n\n make_getter!(get_flags, flags, Vec<String>);\n", "file_path": "src/fake_descriptor.rs", "rank": 38, "score": 15.68029354902974 }, { "content": "extern crate hex;\n\nextern crate core;\n\n\n\nmacro_rules! make_getter(\n\n ($function_name: ident, $attr: ident, $ret_type:ty) => {\n\n pub fn $function_name(&self) -> Result<$ret_type, Box<Error>> {\n\n let cloned = self.$attr.clone();\n\n let attr_value = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(attr_value)\n\n }\n\n };\n\n\n\n ($function_name: ident, $attr: ident) => {\n\n pub fn $function_name(&self) -> String {\n\n let cloned = self.$attr.clone();\n\n let attr_value = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n", "file_path": "src/lib.rs", "rank": 39, "score": 15.66742924997258 }, { "content": " Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(*value_to_change = value)\n\n }\n\n };\n\n\n\n ($function_name: ident, $attr: ident) => {\n\n pub fn $function_name(&self, value: String) {\n\n let cloned = self.$attr.clone();\n\n let mut value_to_change = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return (),\n\n };\n\n *value_to_change = value\n\n }\n\n };\n\n);\n\n\n\npub mod fake_adapter;\n\npub mod fake_device;\n\npub mod fake_service;\n\npub mod fake_characteristic;\n\npub mod fake_descriptor;\n\npub mod fake_discovery_session;\n", "file_path": "src/lib.rs", "rank": 40, "score": 15.567026528541287 }, { "content": " let cloned = self.included_services.clone();\n\n let mut included_services = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(included_services.retain(|i| i.get_id() != id))\n\n }\n\n\n\n pub fn get_includes(&self) -> Result<Vec<String>, Box<Error>> {\n\n let cloned = self.included_services.clone();\n\n let included_services = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(included_services.into_iter().map(|s| s.get_id()).collect())\n\n }\n\n\n\n}\n", "file_path": "src/fake_service.rs", "rank": 41, "score": 15.216100639411694 }, { "content": " }\n\n\n\n pub fn start_notify(&self) -> Result<(), Box<Error>> {\n\n self.set_notifying(true)\n\n }\n\n\n\n pub fn stop_notify(&self) -> Result<(), Box<Error>> {\n\n self.set_notifying(false)\n\n }\n\n\n\n pub fn get_gatt_descriptors(&self) -> Result<Vec<String>, Box<Error>> {\n\n let cloned = self.gatt_descriptors.clone();\n\n let gatt_descriptors = match cloned.lock() {\n\n Ok(guard) => guard.deref().clone(),\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_descriptors.into_iter().map(|s| s.get_id()).collect())\n\n }\n\n\n\n pub fn get_gatt_descriptor(&self, id: String) -> Result<Arc<FakeBluetoothGATTDescriptor>, Box<Error>> {\n", "file_path": "src/fake_characteristic.rs", "rank": 42, "score": 15.105055889606314 }, { "content": "\n\n pub fn remove_characteristic(&self, id: String) -> Result<(), Box<Error>> {\n\n let cloned = self.gatt_characteristics.clone();\n\n let mut gatt_characteristics = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_characteristics.retain(|c| c.get_id() != id))\n\n }\n\n\n\n pub fn add_included_service(&self, service: Arc<FakeBluetoothGATTService>) -> Result<(), Box<Error>> {\n\n let cloned = self.included_services.clone();\n\n let mut included_services = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(included_services.push(service))\n\n }\n\n\n\n pub fn remove_included_service(&self, id: String) -> Result<(), Box<Error>> {\n", "file_path": "src/fake_service.rs", "rank": 43, "score": 14.626044375876518 }, { "content": " let descriptors = try!(self.get_gatt_descriptor_structs());\n\n for descriptor in descriptors {\n\n let descriptor_id = descriptor.get_id();\n\n if descriptor_id == id {\n\n return Ok(descriptor);\n\n }\n\n }\n\n Err(Box::from(\"No descriptor exists with the given id.\"))\n\n }\n\n\n\n pub fn add_descriptor(&self, descriptor: Arc<FakeBluetoothGATTDescriptor>) -> Result<(), Box<Error>> {\n\n let cloned = self.gatt_descriptors.clone();\n\n let mut gatt_descriptors = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_descriptors.push(descriptor))\n\n }\n\n\n\n pub fn remove_descriptor(&self, id: String) -> Result<(), Box<Error>> {\n", "file_path": "src/fake_characteristic.rs", "rank": 44, "score": 14.485765102119624 }, { "content": " service: Arc<FakeBluetoothGATTService>,\n\n value: Option<Vec<u8>>,\n\n is_notifying: bool,\n\n flags: Vec<String>,\n\n gatt_descriptors: Vec<Arc<FakeBluetoothGATTDescriptor>>)\n\n -> Arc<FakeBluetoothGATTCharacteristic> {\n\n if let Ok(existing_characteristic) = service.get_gatt_characteristic(id.clone()) {\n\n return existing_characteristic;\n\n }\n\n let characteristic = Arc::new(FakeBluetoothGATTCharacteristic {\n\n id: Arc::new(Mutex::new(id)),\n\n uuid: Arc::new(Mutex::new(uuid)),\n\n service: service.clone(),\n\n value: Arc::new(Mutex::new(value)),\n\n is_notifying: Arc::new(Mutex::new(is_notifying)),\n\n flags: Arc::new(Mutex::new(flags)),\n\n gatt_descriptors: Arc::new(Mutex::new(gatt_descriptors)),\n\n });\n\n let _ = service.add_characteristic(characteristic.clone());\n\n characteristic\n", "file_path": "src/fake_characteristic.rs", "rank": 45, "score": 14.382544450079566 }, { "content": "\n\n pub fn get_gatt_characteristic(&self, id: String) -> Result<Arc<FakeBluetoothGATTCharacteristic>, Box<Error>> {\n\n let characteristics = try!(self.get_gatt_characteristic_structs());\n\n for characteristic in characteristics {\n\n let characteristic_id = characteristic.get_id();\n\n if characteristic_id == id {\n\n return Ok(characteristic);\n\n }\n\n }\n\n Err(Box::from(\"No characteristic exists with the given id.\"))\n\n }\n\n\n\n pub fn add_characteristic(&self, characteristic: Arc<FakeBluetoothGATTCharacteristic>) -> Result<(), Box<Error>> {\n\n let cloned = self.gatt_characteristics.clone();\n\n let mut gatt_characteristics = match cloned.lock() {\n\n Ok(guard) => guard,\n\n Err(_) => return Err(Box::from(\"Could not get the value.\")),\n\n };\n\n Ok(gatt_characteristics.push(characteristic))\n\n }\n", "file_path": "src/fake_service.rs", "rank": 46, "score": 14.199276812040965 }, { "content": "\n\n make_setter!(set_flags, flags, Vec<String>);\n\n\n\n pub fn get_characteristic(&self) -> Result<Arc<FakeBluetoothGATTCharacteristic>, Box<Error>> {\n\n Ok(self.characteristic.clone())\n\n }\n\n\n\n pub fn read_value(&self) -> Result<Vec<u8>, Box<Error>> {\n\n self.get_value()\n\n }\n\n\n\n pub fn write_value(&self, value: Vec<u8>) -> Result<(), Box<Error>> {\n\n self.set_value(Some(value))\n\n }\n\n}\n", "file_path": "src/fake_descriptor.rs", "rank": 47, "score": 13.864377303469706 }, { "content": " }\n\n\n\n pub fn new_empty(service: Arc<FakeBluetoothGATTService>,\n\n characteristic_id: String)\n\n -> Arc<FakeBluetoothGATTCharacteristic> {\n\n FakeBluetoothGATTCharacteristic::new(\n\n /*id*/ characteristic_id,\n\n /*uuid*/ String::new(),\n\n /*service*/ service,\n\n /*value*/ None,\n\n /*is_notifying*/ false,\n\n /*flags*/ vec!(),\n\n /*gatt_descriptors*/ vec!(),\n\n )\n\n }\n\n\n\n make_getter!(get_id, id);\n\n\n\n make_setter!(set_id, id);\n\n\n", "file_path": "src/fake_characteristic.rs", "rank": 48, "score": 13.057423713460558 }, { "content": " service_id: String)\n\n -> Arc<FakeBluetoothGATTService> {\n\n FakeBluetoothGATTService::new(\n\n /*id*/ service_id,\n\n /*device*/ device,\n\n /*gatt_characteristics*/ vec!(),\n\n /*is_primary*/ true,\n\n /*included_services*/ vec!(),\n\n /*uuid*/ String::new(),\n\n )\n\n }\n\n\n\n make_getter!(get_id, id);\n\n\n\n make_setter!(set_id, id);\n\n\n\n make_getter!(get_gatt_characteristic_structs, gatt_characteristics, Vec<Arc<FakeBluetoothGATTCharacteristic>>);\n\n\n\n make_getter!(is_primary);\n\n\n", "file_path": "src/fake_service.rs", "rank": 49, "score": 12.798104512311742 }, { "content": " -> Arc<FakeBluetoothGATTDescriptor> {\n\n if let Ok(existing_descriptor) = characteristic.get_gatt_descriptor(id.clone()) {\n\n return existing_descriptor;\n\n }\n\n let descriptor = Arc::new(FakeBluetoothGATTDescriptor {\n\n id: Arc::new(Mutex::new(id)),\n\n uuid: Arc::new(Mutex::new(uuid)),\n\n characteristic: characteristic.clone(),\n\n value: Arc::new(Mutex::new(value)),\n\n flags: Arc::new(Mutex::new(flags)),\n\n });\n\n let _ = characteristic.add_descriptor(descriptor.clone());\n\n descriptor\n\n }\n\n\n\n pub fn new_empty(characteristic: Arc<FakeBluetoothGATTCharacteristic>,\n\n descriptor_id: String)\n\n -> Arc<FakeBluetoothGATTDescriptor> {\n\n FakeBluetoothGATTDescriptor::new(\n\n /*id*/ descriptor_id,\n", "file_path": "src/fake_descriptor.rs", "rank": 50, "score": 12.750478347090155 }, { "content": " pub fn get_adapter(&self) -> Arc<FakeBluetoothAdapter> {\n\n self.adapter.clone()\n\n }\n\n\n\n pub fn start_discovery(&self) -> Result<(), Box<Error>> {\n\n match self.adapter.get_can_start_discovery() {\n\n Ok(false) => Err(Box::from(\"Failed to start discovery session\")),\n\n Ok(true) => Ok(()),\n\n Err(err) => Err(err),\n\n }\n\n }\n\n\n\n pub fn stop_discovery(&self) -> Result<(), Box<Error>> {\n\n match self.adapter.get_can_stop_discovery() {\n\n Ok(false) => Err(Box::from(\"Failed to stop discovery session\")),\n\n Ok(true) => Ok(()),\n\n Err(err) => Err(err),\n\n }\n\n }\n\n}\n", "file_path": "src/fake_discovery_session.rs", "rank": 51, "score": 11.636700394523594 }, { "content": "use fake_adapter::FakeBluetoothAdapter;\n\nuse std::error::Error;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Debug)]\n\npub struct FakeBluetoothDiscoverySession {\n\n adapter: Arc<FakeBluetoothAdapter>,\n\n}\n\n\n\nimpl FakeBluetoothDiscoverySession {\n\n pub fn create_session(adapter: Arc<FakeBluetoothAdapter>) -> Result<FakeBluetoothDiscoverySession, Box<Error>> {\n\n Ok(FakeBluetoothDiscoverySession::new(adapter))\n\n }\n\n\n\n fn new(adapter: Arc<FakeBluetoothAdapter>) -> FakeBluetoothDiscoverySession {\n\n FakeBluetoothDiscoverySession {\n\n adapter: adapter,\n\n }\n\n }\n\n\n", "file_path": "src/fake_discovery_session.rs", "rank": 52, "score": 11.41140530728937 } ]
Rust
build/src/lib.rs
BusyJay/jinkela
88cdbdb57ae53ea13ea1b8f81e4641e6d67686b4
use std::fs::File; use std::io::Write; #[derive(Default)] pub struct Builder { out_dir: Option<String>, includes: Vec<String>, sources: Vec<String>, } impl Builder { pub fn out_dir(&mut self, dir: impl Into<String>) -> &mut Builder { self.out_dir = Some(dir.into()); self } pub fn include_dir(&mut self, dir: impl Into<String>) -> &mut Builder { self.includes.push(dir.into()); self } pub fn compile_proto(&mut self, proto: impl Into<String>) -> &mut Builder { self.sources.push(proto.into()); self } pub fn build(&self) { for (key, value) in std::env::vars() { println!("{}: {}", key, value); } let proto_dir = self.out_dir.clone().unwrap_or_else(|| { let out_dir = std::env::var("OUT_DIR").unwrap(); format!("{}/protos", out_dir) }); if std::path::Path::new(&proto_dir).exists() { std::fs::remove_dir_all(&proto_dir).unwrap(); } std::fs::create_dir_all(&proto_dir).unwrap(); let protoc = protoc::Protoc::from_env_path(); let desc_file = format!("{}/mod.desc", proto_dir); let mut includes: Vec<&str> = Vec::new(); for i in &self.includes { includes.push(&i); } let mut inputs: Vec<&str> = Vec::new(); for s in &self.sources { inputs.push(&s); } protoc.write_descriptor_set(protoc::DescriptorSetOutArgs { out: &desc_file, includes: &includes, input: &inputs, include_imports: true, }).unwrap(); self.internal_build(&proto_dir, &desc_file); let modules: Vec<_> = std::fs::read_dir(&proto_dir).unwrap().filter_map(|res| { let path = match res { Ok(e) => e.path(), Err(e) => panic!("failed to list {}: {:?}", proto_dir, e), }; if path.extension() == Some(std::ffi::OsStr::new("rs")) { let name = path.file_stem().unwrap().to_str().unwrap(); Some((name.replace('-', "_"), name.to_owned())) } else { None } }).collect(); let mut f = File::create(format!("{}/mod.rs", proto_dir)).unwrap(); for (module, file_name) in &modules { if !module.contains('.') { writeln!(f, "pub mod {};", module).unwrap(); continue; } let mut level = 0; for part in module.split('.') { writeln!(f, "{:level$}pub mod {} {{", "", part, level = level).unwrap(); level += 1; } writeln!(f, "include!(\"{}.rs\");", file_name).unwrap(); for _ in (0..level).rev() { writeln!(f, "{:1$}}}", "", level).unwrap(); } } } #[cfg(feature = "protobuf-codec")] fn internal_build(&self, out_dir: &str, desc_file: &str) { println!("building protobuf at {} for {}", out_dir, desc_file); let desc_bytes = std::fs::read(&desc_file).unwrap(); let desc: protobuf::descriptor::FileDescriptorSet = protobuf::parse_from_bytes(&desc_bytes).unwrap(); let mut files_to_generate = Vec::new(); 'outer: for file in &self.sources { let f = std::path::Path::new(file); for include in &self.includes { if let Some(truncated) = f.strip_prefix(include).ok() { files_to_generate.push(format!("{}", truncated.display())); continue 'outer; } } panic!("file {:?} is not found in includes {:?}", file, self.includes); } protobuf_codegen::gen_and_write( desc.get_file(), &files_to_generate, &std::path::Path::new(out_dir), &protobuf_codegen::Customize::default(), ).unwrap(); self.build_grpcio(&desc.get_file(), &files_to_generate, &out_dir); } #[cfg(feature = "prost-codec")] fn internal_build(&self, out_dir: &str, desc_file: &str) { println!("building prost at {}", out_dir); let mut cfg = prost_build::Config::new(); cfg.type_attribute(".", "#[derive(::jinkela::Classicalize)]").out_dir(out_dir); cfg.compile_protos(&self.sources, &self.includes).unwrap(); self.build_grpcio(out_dir, desc_file); } #[cfg(feature = "grpcio-protobuf-codec")] fn build_grpcio(&self, desc: &[protobuf::descriptor::FileDescriptorProto], files_to_generates: &[String], output: &str) { println!("building protobuf with grpcio at {}", output); let output_dir = std::path::Path::new(output); let results = grpcio_compiler::codegen::gen(&desc, &files_to_generates); for res in results { let out_file = output_dir.join(&res.name); let mut f = File::create(&out_file).unwrap(); f.write_all(&res.content).unwrap(); } } #[cfg(all(feature = "protobuf-codec", not(feature = "grpcio-protobuf-codec")))] fn build_grpcio(&self, _: &[protobuf::descriptor::FileDescriptorProto], _: &[String], _: &str) {} #[cfg(feature = "grpcio-prost-codec")] fn build_grpcio(&self, out_dir: &str, desc_file: &str) { use prost::Message; let desc_bytes = std::fs::read(&desc_file).unwrap(); let desc = prost_types::FileDescriptorSet::decode(&desc_bytes).unwrap(); let mut files_to_generate = Vec::new(); 'outer: for file in &self.sources { let f = std::path::Path::new(file); for include in &self.includes { if let Some(truncated) = f.strip_prefix(include).ok() { files_to_generate.push(format!("{}", truncated.display())); continue 'outer; } } panic!("file {:?} is not found in includes {:?}", file, self.includes); } let out_dir = std::path::Path::new(out_dir); let results = grpcio_compiler::codegen::gen(&desc.file, &files_to_generate); for res in results { let out_file = out_dir.join(&res.name); let mut f = File::create(&out_file).unwrap(); f.write_all(&res.content).unwrap(); } } #[cfg(all(feature = "prost-codec", not(feature = "grpcio-prost-codec")))] fn build_grpcio(&self, _out_dir: &str, _desc_file: &str) {} }
use std::fs::File; use std::io::Write; #[derive(Default)] pub struct Builder { out_dir: Option<String>, includes: Vec<String>, sources: Vec<String>, } impl Builder { pub fn out_dir(&mut self, dir: impl Into<String>) -> &mut Builder { self.out_dir = Some(dir.into()); self } pub fn include_dir(&mut self, dir: impl Into<String>) -> &mut Builder { self.includes.push(dir.into()); self } pub fn compile_proto(&mut self, proto: impl Into<String>) -> &mut Builder { self.sources.push(proto.into()); self } pub fn build(&self) { for (key, value) in std::env::vars() { println!("{}: {}", key, value); } let proto_dir = self.out_dir.clone().unwrap_or_else(|| { let out_dir = std::env::var("OUT_DIR").unwrap(); format!("{}/protos", out_dir) }); if std::path::Path::new(&proto_dir).exists() { std::fs::remove_dir_all(&proto_dir).unwrap(); } std::fs::create_dir_all(&proto_dir).unwrap(); let protoc = protoc::Protoc::from_env_path(); let desc_file = format!("{}/mod.desc", proto_dir); let mut includes: Vec<&str> = Vec::new(); for i in &self.includes { includes.push(&i); } let mut inputs: Vec<&str> = Vec::new(); for s in &self.sources { inputs.push(&s); } protoc.write_descriptor_set(protoc::DescriptorSetOutArgs { out: &desc_file, includes: &includes, input: &inputs, include_imports: true, }).unwrap(); self.internal_build(&proto_dir, &desc_file); let modules: Vec<_> = std::fs::read_dir(&proto_dir).unwrap().filter_map(|res| { let path = match res { Ok(e) => e.path(), Err(e) => panic!("failed to list {}: {:?}", proto_dir, e), }; if path.extension() == Some(std::ffi::OsStr::new("rs")) { let name = path.file_stem().unwrap().to_str().unwrap(); Some((name.replace('-', "_"), name.to_owned())) } else { None } }).collect(); let mut f = File::create(format!("{}/mod.rs", proto_dir)).unwrap(); for (module, file_name) in &modules { if !module.contains('.') { writeln!(f, "pub mod {};", module).unwrap(); continue; } let mut level = 0; for part in module.split('.') { writeln!(f, "{:level$}pub mod {} {{", "", part, level = level).unwrap(); level += 1; } writeln!(f, "include!(\"{}.rs\");", file_name).unwrap(); for _ in (0..level).rev() { writeln!(f, "{:1$}}}", "", level).unwrap(); } } } #[cfg(feature = "protobuf-codec")] fn internal_build(&self, out_dir: &str, desc_file: &str) { println!("building protobuf at {} for {}", out_dir, desc_file); let desc_bytes = std::fs::read(&desc_file).unwrap(); let desc: protobuf::descriptor::FileDescriptorSet = protobuf::parse_from_bytes(&desc_bytes).unwrap(); let mut files_to_generate = Vec::new(); 'outer: for file in &self.sources { let f = std::path::Path::new(file); for include in &self.includes { if let Some(truncated) = f.strip_prefix(include).ok() { files_to_generate.push(format!("{}", truncated.display())); continue 'outer; } } panic!("file {:?} is not found in includes {:?}", file, self.includes); } protobuf_codegen::gen_and_write( desc.get_file(), &files_to_generate, &std::path::Path::new(out_dir), &protobuf_codegen::Customize::default(), ).unwrap(); self.build_grpcio(&desc.get_file(), &files_to_generate, &out_dir); } #[cfg(feature = "prost-codec")] fn internal_build(&self, out_dir: &str, desc_file: &str) { println!("building prost at {}", out_dir); let mut cfg = prost_build::Config::new(); cfg.type_attribute(".", "#[derive(::jinkela::Classicalize)]").out_dir(out_dir); cfg.compile_protos(&self.sources, &self.includes).unwrap(); self.build_grpcio(out_dir, desc_file); } #[cfg(feature = "grpcio-protobuf-codec")] fn build_grpcio(&self, desc: &[protobuf::descriptor::FileDescriptorProto], files_to_generates: &[String], output: &str) { println!("building protobuf with grpcio at {}", output); let output_dir = std::path::Path::new(output); let results = grpcio_compiler::codegen::gen(&desc, &files_to_generates); for res in results { let out_file = output_dir.join(&res.name); let mut f = File::create(&out_file).unwrap(); f.write_all(&res.content).unwrap(); } } #[cfg(all(feature = "protobuf-codec", not(feature = "grpcio-protobuf-codec")))] fn build_grpcio(&self, _: &[protobuf::descriptor::FileDescriptorProto], _: &[String], _: &str) {} #[cfg(feature = "grpcio-prost-codec")] fn build_grpcio(&self, out_dir: &str, desc_file: &str) { use prost::Message; let desc_bytes = std::fs::read(&desc_file).unwrap(); let desc = prost_types::FileDescriptorSet::decode(&desc_bytes).unwrap(); let mut files_to_generate = Vec::new(); 'outer: for
); } let out_dir = std::path::Path::new(out_dir); let results = grpcio_compiler::codegen::gen(&desc.file, &files_to_generate); for res in results { let out_file = out_dir.join(&res.name); let mut f = File::create(&out_file).unwrap(); f.write_all(&res.content).unwrap(); } } #[cfg(all(feature = "prost-codec", not(feature = "grpcio-prost-codec")))] fn build_grpcio(&self, _out_dir: &str, _desc_file: &str) {} }
file in &self.sources { let f = std::path::Path::new(file); for include in &self.includes { if let Some(truncated) = f.strip_prefix(include).ok() { files_to_generate.push(format!("{}", truncated.display())); continue 'outer; } } panic!("file {:?} is not found in includes {:?}", file, self.includes
function_block-random_span
[ { "content": "#[proc_macro_derive(Classicalize, attributes(prost))]\n\npub fn classicalize(input: TokenStream) -> TokenStream {\n\n let input: DeriveInput = syn::parse(input).unwrap();\n\n let s = match input.data {\n\n Data::Struct(s) => classicalize_struct(input.ident, s),\n\n Data::Enum(e) => classicalize_enum(input.ident, e),\n\n Data::Union(_) => panic!(\"union is not supported yet.\"),\n\n };\n\n s.into()\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 0, "score": 68019.76999154553 }, { "content": "fn classicalize_struct(ident: Ident, s: DataStruct) -> proc_macro2::TokenStream {\n\n let fields = match s {\n\n DataStruct {\n\n fields: Fields::Named(FieldsNamed { named: fields, .. }),\n\n ..\n\n }\n\n | DataStruct {\n\n fields:\n\n Fields::Unnamed(FieldsUnnamed {\n\n unnamed: fields, ..\n\n }),\n\n ..\n\n } => fields.into_iter().collect(),\n\n DataStruct {\n\n fields: Fields::Unit,\n\n ..\n\n } => Vec::new(),\n\n };\n\n\n\n // Put impls in a const, so that 'extern crate' can be used.\n", "file_path": "derive/src/lib.rs", "rank": 1, "score": 35073.667441920225 }, { "content": "fn classicalize_string_field(field: &Field) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let set = Ident::new(&format!(\"set_{}\", ident_str), Span::call_site());\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n let take = Ident::new(&format!(\"take_{}\", ident_str), Span::call_site());\n\n quote! {\n\n pub fn #set(&mut self, value: String) {\n\n self.#ident = value;\n\n }\n\n\n\n pub fn #get(&self) -> &str {\n\n &self.#ident\n\n }\n\n\n\n pub fn #take(&mut self) -> String {\n\n ::std::mem::replace(&mut self.#ident, String::new())\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 2, "score": 31096.511968743718 }, { "content": "pub trait GenericEnum: Sized {\n\n fn values() -> &'static [Self];\n\n}\n\n\n\n#[cfg(feature = \"protobuf-codec\")]\n\nmod codec {\n\n pub use protobuf::ProtobufError;\n\n\n\n impl<T: protobuf::Message + Default> super::GenericMessage for T {\n\n type Error = ProtobufError;\n\n\n\n #[inline]\n\n fn compute_size(&self) -> usize {\n\n protobuf::Message::compute_size(self) as usize\n\n }\n\n\n\n #[inline]\n\n fn encode_into(&self, buf: &mut Vec<u8>) -> Result<(), ProtobufError> {\n\n protobuf::Message::write_to_vec(self, buf)\n\n }\n", "file_path": "src/lib.rs", "rank": 3, "score": 28341.104616012904 }, { "content": "/// Unifies different interfaces of message in different protocol implementations.\n\npub trait GenericMessage: Sized {\n\n type Error;\n\n\n\n /// Get the size of encoded messages.\n\n fn compute_size(&self) -> usize;\n\n /// Encode the message into buf.\n\n fn encode_into(&self, buf: &mut Vec<u8>) -> Result<(), Self::Error>;\n\n /// Decode a message from the data.\n\n fn decode_from(data: &[u8]) -> Result<Self, Self::Error>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 28341.104616012904 }, { "content": "fn classicalize_repeated_field(field: &Field) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let origin_ty = &field.ty;\n\n let ty = match origin_ty {\n\n Type::Path(tp) => {\n\n let wrapper = tp.path.segments.iter().last().unwrap();\n\n assert_eq!(wrapper.ident, \"Vec\", \"expected Vec, but got {:?}\", origin_ty);\n\n let generic_arg = match wrapper.arguments {\n\n PathArguments::AngleBracketed(ref params) => params.args.iter().next().unwrap(),\n\n _ => panic!(\"unexpected token {:?}\", origin_ty),\n\n };\n\n match generic_arg {\n\n GenericArgument::Type(ty) => ty,\n\n _ => panic!(\"expected generic, but get {:?}\", origin_ty),\n\n }\n\n },\n", "file_path": "derive/src/lib.rs", "rank": 5, "score": 18627.647045575497 }, { "content": "fn classicalize_bytes_field(field: &Field) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let set = Ident::new(&format!(\"set_{}\", ident_str), Span::call_site());\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n let take = Ident::new(&format!(\"take_{}\", ident_str), Span::call_site());\n\n quote! {\n\n pub fn #set(&mut self, value: Vec<u8>) {\n\n self.#ident = value;\n\n }\n\n\n\n pub fn #get(&self) -> &[u8] {\n\n &self.#ident\n\n }\n\n\n\n pub fn #take(&mut self) -> Vec<u8> {\n\n ::std::mem::replace(&mut self.#ident, Vec::new())\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "derive/src/lib.rs", "rank": 6, "score": 18627.647045575497 }, { "content": "fn classicalize_copy_field(field: &Field) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let ty = &field.ty;\n\n let set = Ident::new(&format!(\"set_{}\", ident_str), Span::call_site());\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n quote! {\n\n pub fn #set(&mut self, value: #ty) {\n\n self.#ident = value;\n\n }\n\n\n\n pub fn #get(&self) -> #ty {\n\n self.#ident\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 7, "score": 18627.647045575497 }, { "content": "fn classicalize_optional_message_field(field: &Field) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let ty = &field.ty;\n\n let ty = match ty {\n\n Type::Path(tp) => {\n\n let wrapper = tp.path.segments.iter().last().unwrap();\n\n assert_eq!(wrapper.ident, \"Option\", \"expected option, but got {:?}\", ty);\n\n let generic_arg = match wrapper.arguments {\n\n PathArguments::AngleBracketed(ref params) => params.args.iter().next().unwrap(),\n\n _ => panic!(\"unexpected token {:?}\", ty),\n\n };\n\n match generic_arg {\n\n GenericArgument::Type(ty) => ty,\n\n _ => panic!(\"expected generic, but get {:?}\", ty),\n\n }\n\n },\n", "file_path": "derive/src/lib.rs", "rank": 8, "score": 18214.373623529555 }, { "content": "fn classicalize_accessors(field: &Field) -> Option<proc_macro2::TokenStream> {\n\n for a in &field.attrs {\n\n for m in a.interpret_meta() {\n\n match m {\n\n Meta::List(MetaList { ident, nested, .. }) => {\n\n if ident == \"prost\" {\n\n let mut ft = FieldType::Copyable;\n\n let mut freq = Frequency::Optional;\n\n for n in nested {\n\n match n {\n\n NestedMeta::Meta(Meta::Word(w)) => if w == \"message\" {\n\n ft = FieldType::Message;\n\n } else if w == \"optional\" {\n\n freq = Frequency::Optional;\n\n } else if w == \"repeated\" {\n\n freq = Frequency::Repeated;\n\n } else if w == \"bytes\" {\n\n ft = FieldType::Bytes;\n\n } else if w == \"string\" {\n\n ft = FieldType::String;\n", "file_path": "derive/src/lib.rs", "rank": 9, "score": 18094.792998795994 }, { "content": "fn classicalize_enum(ident: Ident, s: DataEnum) -> proc_macro2::TokenStream {\n\n let dummy_const = Ident::new(&format!(\"{}_CLASSICAL_ENUMERATION\", ident), Span::call_site());\n\n\n\n // Map the variants into 'fields'.\n\n let mut variants = Vec::with_capacity(s.variants.len());\n\n for v in s.variants {\n\n let value_ident = &v.ident;\n\n variants.push(quote! { #ident::#value_ident});\n\n }\n\n quote! {\n\n #[allow(non_snake_case, unused_attributes)]\n\n const #dummy_const: () = {\n\n extern crate jinkela as _jinkela;\n\n\n\n impl _jinkela::GenericEnum for #ident {\n\n fn values() -> &'static [#ident] {\n\n &[#(#variants,)*]\n\n }\n\n }\n\n };\n\n }\n\n}\n", "file_path": "derive/src/lib.rs", "rank": 10, "score": 17220.493084953312 }, { "content": "fn classicalize_enum_field(field: &Field, lit: &Lit) -> proc_macro2::TokenStream {\n\n let ident = field.ident.as_ref().unwrap();\n\n let mut ident_str = ident.to_string();\n\n if ident_str.starts_with(\"r#\") {\n\n ident_str = ident_str[2..].to_owned();\n\n }\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n let ty = match lit {\n\n Lit::Str(s) => syn::parse_str::<Path>(&s.value()).unwrap(),\n\n _ => panic!(\"expected enum type, but got {:?}\", lit),\n\n };\n\n quote! {\n\n pub fn #get(&self) -> #ty {\n\n match #ty::from_i32(self.#ident) {\n\n Some(v) => v,\n\n None => panic!(\"Unexpected enum value for #lit: {}\", self.#ident),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 11, "score": 16836.495950092027 }, { "content": "\n\n #[inline]\n\n fn decode_from(data: &[u8]) -> Result<T, ProtobufError> {\n\n let mut m = T::default();\n\n m.merge_from_bytes(data)?;\n\n Ok(m)\n\n }\n\n }\n\n\n\n impl<T: protobuf::ProtobufEnum> super::GenericEnum for T {\n\n #[inline]\n\n fn values() -> &'static [Self] {\n\n <T as protobuf::ProtobufEnum>::values()\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"prost-codec\")]\n\nmod codec {\n\n use prost::{EncodeError, DecodeError};\n", "file_path": "src/lib.rs", "rank": 12, "score": 11.846766561514125 }, { "content": " }\n\n\n\n impl<T: prost::Message + Default> super::GenericMessage for T {\n\n type Error = ProtobufError;\n\n\n\n #[inline]\n\n fn compute_size(&self) -> usize {\n\n self.encoded_len()\n\n }\n\n\n\n #[inline]\n\n fn encode_into(&self, data: &mut Vec<u8>) -> Result<(), ProtobufError> {\n\n prost::Message::encode(self, data).map_err(ProtobufError::Encode)\n\n }\n\n\n\n #[inline]\n\n fn decode_from(data: &[u8]) -> Result<T, ProtobufError> {\n\n T::decode(data).map_err(ProtobufError::Decode)\n\n }\n\n }\n\n}\n\n\n\npub use codec::ProtobufError;\n\n#[cfg(feature = \"prost-codec\")]\n\npub use jinkela_derive::*;\n", "file_path": "src/lib.rs", "rank": 13, "score": 10.652085187137637 }, { "content": " E::T\n\n }\n\n }\n\n\n\n impl E {\n\n pub fn from_i32(i: i32) -> Option<E> {\n\n match i {\n\n 0 => Some(E::T),\n\n 1 => Some(E::C),\n\n _ => None\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_methods() {\n\n let mut a = A::default();\n\n assert!(!a.has_b1());\n\n assert!(!a.has_b2());\n\n assert_eq!(a.get_b1().b, 0);\n", "file_path": "tests/tests.rs", "rank": 14, "score": 8.39273804062668 }, { "content": " pub cf: std::string::String,\n\n #[prost(bytes)]\n\n pub key: std::vec::Vec<u8>,\n\n }\n\n\n\n #[derive(::jinkela::Classicalize, Default, Debug, PartialEq)]\n\n struct B {\n\n #[prost(uint64)]\n\n b: u64,\n\n }\n\n\n\n #[derive(::jinkela::Classicalize, Debug, PartialEq)]\n\n #[repr(i32)]\n\n enum E {\n\n T = 0,\n\n C = 1,\n\n }\n\n\n\n impl Default for E {\n\n fn default() -> E {\n", "file_path": "tests/tests.rs", "rank": 15, "score": 8.276172129912824 }, { "content": "#![recursion_limit = \"4096\"]\n\n\n\nextern crate proc_macro;\n\n\n\nuse quote::quote;\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::Span;\n\nuse syn::{\n\n Data, DataEnum, DataStruct, DeriveInput, Fields, FieldsNamed, PathArguments, GenericArgument,\n\n FieldsUnnamed, Ident, Meta, Field, MetaList, NestedMeta, Type, Lit, Path,\n\n};\n\n\n\n#[proc_macro_derive(Classicalize, attributes(prost))]\n", "file_path": "derive/src/lib.rs", "rank": 16, "score": 7.934762929543308 }, { "content": "#[cfg(feature = \"prost-codec\")]\n\nextern crate prost;\n\n\n\n#[cfg(feature = \"prost-codec\")]\n\nmod prost_tests {\n\n use jinkela::GenericEnum;\n\n\n\n #[derive(::jinkela::Classicalize, Default, Debug)]\n\n struct A {\n\n #[prost(message, optional)]\n\n b1: Option<B>,\n\n #[prost(message, optional)]\n\n b2: ::std::option::Option<B>,\n\n #[prost(message, repeated)]\n\n b3: Vec<B>,\n\n #[prost(enumeration = \"E\")]\n\n r#type: i32,\n\n #[prost(bool)]\n\n pub notify_only: bool,\n\n #[prost(string)]\n", "file_path": "tests/tests.rs", "rank": 17, "score": 7.8665931562893885 }, { "content": " _ => panic!(\"unexpected type {:?}\", ty),\n\n };\n\n let set = Ident::new(&format!(\"set_{}\", ident_str), Span::call_site());\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n let take = Ident::new(&format!(\"take_{}\", ident_str), Span::call_site());\n\n let mutation = Ident::new(&format!(\"mut_{}\", ident_str), Span::call_site());\n\n let has = Ident::new(&format!(\"has_{}\", ident_str), Span::call_site());\n\n quote! {\n\n pub fn #set(&mut self, value: #ty) {\n\n self.#ident = Some(value);\n\n }\n\n\n\n pub fn #get(&self) -> &#ty {\n\n self.#ident.as_ref().unwrap_or_else(|| #ty::default_instance())\n\n }\n\n\n\n pub fn #mutation(&mut self) -> &mut #ty {\n\n self.#ident.get_or_insert_with(|| #ty::default())\n\n }\n\n\n", "file_path": "derive/src/lib.rs", "rank": 18, "score": 6.460362015520044 }, { "content": " _ => panic!(\"unexpected type {:?}\", origin_ty),\n\n };\n\n let set = Ident::new(&format!(\"set_{}\", ident_str), Span::call_site());\n\n let get = Ident::new(&format!(\"get_{}\", ident_str), Span::call_site());\n\n let take = Ident::new(&format!(\"take_{}\", ident_str), Span::call_site());\n\n let mutation = Ident::new(&format!(\"mut_{}\", ident_str), Span::call_site());\n\n quote! {\n\n pub fn #set(&mut self, value: #origin_ty) {\n\n self.#ident = value;\n\n }\n\n\n\n pub fn #get(&self) -> &[#ty] {\n\n &self.#ident\n\n }\n\n\n\n pub fn #mutation(&mut self) -> &mut #origin_ty {\n\n &mut self.#ident\n\n }\n\n\n\n pub fn #take(&mut self) -> #origin_ty {\n\n ::std::mem::replace(&mut self.#ident, ::std::vec::Vec::new())\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 19, "score": 6.132056563712119 }, { "content": "\n\n quick_error! {\n\n /// The error for PROST!. It defines error in a weird way.\n\n #[derive(Debug, PartialEq)]\n\n pub enum ProtobufError {\n\n /// Error for when encoding messages.\n\n Encode(err: EncodeError) {\n\n from()\n\n cause(err)\n\n description(err.description())\n\n display(\"{:?}\", err)\n\n }\n\n /// Error for decoding messages.\n\n Decode(err: DecodeError) {\n\n from()\n\n cause(err)\n\n description(err.description())\n\n display(\"{:?}\", err)\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 20, "score": 5.310593962745513 }, { "content": " }\n\n NestedMeta::Meta(Meta::NameValue(nv)) => if nv.ident == \"enumeration\" {\n\n return Some(classicalize_enum_field(field, &nv.lit))\n\n }\n\n _ => ()\n\n }\n\n }\n\n return Some(match freq {\n\n Frequency::Repeated => classicalize_repeated_field(field),\n\n Frequency::Optional => {\n\n match ft {\n\n FieldType::Message => classicalize_optional_message_field(field),\n\n FieldType::Copyable => classicalize_copy_field(field),\n\n FieldType::String => classicalize_string_field(field),\n\n FieldType::Bytes => classicalize_bytes_field(field),\n\n }\n\n }\n\n })\n\n }\n\n },\n\n _ => (),\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 21, "score": 5.2978823832317286 }, { "content": " assert_eq!(a.get_b3().len(), 2);\n\n a.set_cf(\"test\".to_owned());\n\n assert_eq!(a.get_cf(), \"test\");\n\n assert_eq!(a.take_cf(), \"test\".to_owned());\n\n a.set_key(b\"test\".to_vec());\n\n assert_eq!(a.get_key(), b\"test\");\n\n assert_eq!(a.take_key(), b\"test\".to_vec());\n\n a.set_notify_only(true);\n\n assert!(a.get_notify_only());\n\n }\n\n\n\n #[test]\n\n fn test_enum() {\n\n assert_eq!(E::values(), &[E::T, E::C]);\n\n }\n\n}\n", "file_path": "tests/tests.rs", "rank": 22, "score": 5.2462755479294945 }, { "content": " pub fn #take(&mut self) -> #ty {\n\n self.#ident.take().unwrap_or_else(|| #ty::default())\n\n }\n\n\n\n pub fn #has(&self) -> bool {\n\n self.#ident.is_some()\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 23, "score": 5.158143191941244 }, { "content": "#[cfg(feature = \"prost-codec\")]\n\n#[macro_use]\n\nextern crate quick_error;\n\n\n\n/// Unifies different interfaces of message in different protocol implementations.\n", "file_path": "src/lib.rs", "rank": 24, "score": 4.467625858230086 }, { "content": " extern crate prost as _prost;\n\n extern crate bytes as _bytes;\n\n extern crate lazy_static;\n\n\n\n impl #ident {\n\n pub fn default_instance() -> &'static #ident {\n\n lazy_static::lazy_static! {\n\n static ref INSTANCE: #ident = #ident::default();\n\n }\n\n &*INSTANCE\n\n }\n\n }\n\n\n\n #methods\n\n };\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 25, "score": 3.752131734175318 }, { "content": " assert_eq!(a.get_b2().b, 0);\n\n a.mut_b1().b = 2;\n\n a.mut_b2().b = 1;\n\n assert!(a.has_b1());\n\n assert!(a.has_b2());\n\n assert_eq!(a.get_b1().b, 2);\n\n assert_eq!(a.get_b2().b, 1);\n\n assert_eq!(a.take_b2().b, 1);\n\n let b = B::default();\n\n assert_eq!(*B::default_instance(), b);\n\n a.set_b2(b);\n\n assert_eq!(a.get_b2().b, 0);\n\n assert_eq!(a.get_type(), E::T);\n\n a.r#type = E::C as i32;\n\n assert_eq!(a.get_type(), E::C);\n\n assert_eq!(a.get_b3(), &[]);\n\n a.mut_b3().push(B::default());\n\n assert_eq!(a.take_b3(), vec![B::default()]);\n\n assert_eq!(a.get_b3(), &[]);\n\n a.set_b3(vec![B::default(), B::default()]);\n", "file_path": "tests/tests.rs", "rank": 26, "score": 2.717200615240511 }, { "content": " let dummy_const = Ident::new(&format!(\"{}_CLASSICAL_MESSAGE\", ident), Span::call_site());\n\n\n\n let methods = fields\n\n .iter()\n\n .flat_map(classicalize_accessors)\n\n .collect::<Vec<_>>();\n\n let methods = if methods.is_empty() {\n\n quote!()\n\n } else {\n\n quote! {\n\n #[allow(dead_code)]\n\n impl #ident {\n\n #(#methods)*\n\n }\n\n }\n\n };\n\n\n\n quote! {\n\n #[allow(non_snake_case, unused_attributes)]\n\n const #dummy_const: () = {\n", "file_path": "derive/src/lib.rs", "rank": 27, "score": 2.1227886982619992 } ]
Rust
src/sms/async/send.rs
drahnr/messagebird
2d22bac58359b9f21410d47bc306d36b538ea710
use super::super::*; use futures::*; use hyper; use hyper_rustls; use std::env; use std::fmt; use std::marker::PhantomData; use std::ops::Deref; #[derive(Debug, Clone)] pub struct AccessKey(String); impl Deref for AccessKey { type Target = String; fn deref(&self) -> &Self::Target { &self.0 } } impl FromStr for AccessKey { type Err = MessageBirdError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(AccessKey(s.to_string())) } } impl From<String> for AccessKey { fn from(s: String) -> Self { AccessKey(s) } } impl fmt::Display for AccessKey { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl AccessKey { pub fn from_env() -> Result<AccessKey, MessageBirdError> { let raw = env::var("MESSAGEBIRD_ACCESSKEY").map_err(|_e| MessageBirdError::AccessKeyError { msg: "env".to_string(), })?; AccessKey::from_str(raw.as_str()) } } pub type RequestMessageList = Request<parameter::list::ListParameters, MessageList>; pub type RequestView = Request<parameter::view::ViewParameters, Message>; pub type RequestSend = Request<parameter::send::SendParameters, Message>; pub struct Request<T, R> { future: Box<dyn Future<Item = R, Error = MessageBirdError>>, phantom: PhantomData<T>, } impl<T, R> Future for Request<T, R> { type Item = R; type Error = MessageBirdError; fn poll(&mut self) -> Result<Async<Self::Item>, Self::Error> { self.future.poll() } } fn request_future_with_json_response<R>( client: &mut hyper::Client< hyper_rustls::HttpsConnector<hyper::client::HttpConnector>, hyper::Body, >, request: hyper::Request<hyper::Body>, ) -> impl Future<Item = R, Error = MessageBirdError> where R: 'static + Sized + Send + Sync + for<'de> serde::de::Deserialize<'de> + std::fmt::Debug, { debug!("request {:?}", request); let fut = client .request(request) .map_err(|e: hyper::Error| { debug!("request {:?}", e); MessageBirdError::RequestError }) .and_then(|response: hyper::Response<hyper::Body>| { let status = response.status(); debug!("rest status code: {}", status); futures::future::ok(response) }) .and_then(|response: hyper::Response<hyper::Body>| { let status = response.status(); let body: hyper::Body = response.into_body(); body.concat2() .map_err(|e| { debug!("body concat {:?}", e); MessageBirdError::RequestError }) .map(move |x| (status, x)) }) .and_then(|(status, body): (_, hyper::Chunk)| { debug!("response: {:?}", String::from_utf8(body.to_vec()).unwrap()); match status { hyper::StatusCode::OK | hyper::StatusCode::CREATED => { match serde_json::from_slice::<R>(&body).map_err(|e| { debug!("Failed to parse response body: {:?}", e); MessageBirdError::ParseError }) { Err(e) => futures::future::err(e), Ok(x) => { debug!("Parsed response {:?}", x); futures::future::ok(x) } } } _ => match serde_json::from_slice::<ServiceErrors>(&body).map_err(|e| { debug!("Failed to parse response body: {:?}", e); MessageBirdError::ParseError }) { Err(e) => futures::future::err(e), Ok(service_errors) => { let service_errors = service_errors.into(); debug!("Parsed error response {:?}", service_errors); futures::future::err(MessageBirdError::ServiceError(service_errors)) } }, } }); fut } impl<P, R> Request<P, R> where P: Send + Query, R: 'static + Send + Sync + for<'de> serde::de::Deserialize<'de> + std::fmt::Debug, { pub fn new(parameters: &P, accesskey: &AccessKey) -> Self { let https = hyper_rustls::HttpsConnector::new(4); let mut client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(https); let mut request = hyper::Request::builder(); request.uri(parameters.uri()); request.method(parameters.method()); request.header( hyper::header::AUTHORIZATION, format!("AccessKey {}", accesskey), ); debug!("{:?}", request); let request: hyper::Request<_> = if parameters.method() == hyper::Method::POST { request.header( hyper::header::CONTENT_TYPE, format!("application/x-www-form-urlencoded"), ); parameters .uri() .query() .map(|body: &str| { let body = body.to_string(); request.header(hyper::header::CONTENT_LENGTH, format!("{}", body.len())); request.body(body.into()).unwrap() }) .unwrap_or_else(|| { request.header(hyper::header::CONTENT_LENGTH, format!("{}", 0)); request.body(hyper::Body::empty()).unwrap() }) } else { request.header(hyper::header::CONTENT_LENGTH, format!("{}", 0)); request.body(hyper::Body::empty()).unwrap() }; let future = request_future_with_json_response::<R>(&mut client, request); let future = Box::new(future); Self { future, phantom: PhantomData, } } }
use super::super::*; use futures::*; use hyper; use hyper_rustls; use std::env; use std::fmt; use std::marker::PhantomData; use std::ops::Deref; #[derive(Debug, Clone)] pub struct AccessKey(String); impl Deref for AccessKey { type Target = String; fn deref(&self) -> &Self::Target { &self.0 } } impl FromStr for AccessKey { type Err = MessageBirdError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(AccessKey(s.to_string())) } } impl From<String> for AccessKey { fn from(s: String) -> Self { AccessKey(s) } } impl fmt::Display for AccessKey { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl AccessKey { pub fn from_env() -> Result<AccessKey, MessageBirdError> { let raw = env::var("MESSAGEBIRD_ACCESSKEY").map_err(|_e| MessageBirdError::AccessKeyError { msg: "env".to_string(), })?; AccessKey::from_str(raw.as_str()) } } pub type RequestMessageLis
futures::future::ok(response) }) .and_then(|response: hyper::Response<hyper::Body>| { let status = response.status(); let body: hyper::Body = response.into_body(); body.concat2() .map_err(|e| { debug!("body concat {:?}", e); MessageBirdError::RequestError }) .map(move |x| (status, x)) }) .and_then(|(status, body): (_, hyper::Chunk)| { debug!("response: {:?}", String::from_utf8(body.to_vec()).unwrap()); match status { hyper::StatusCode::OK | hyper::StatusCode::CREATED => { match serde_json::from_slice::<R>(&body).map_err(|e| { debug!("Failed to parse response body: {:?}", e); MessageBirdError::ParseError }) { Err(e) => futures::future::err(e), Ok(x) => { debug!("Parsed response {:?}", x); futures::future::ok(x) } } } _ => match serde_json::from_slice::<ServiceErrors>(&body).map_err(|e| { debug!("Failed to parse response body: {:?}", e); MessageBirdError::ParseError }) { Err(e) => futures::future::err(e), Ok(service_errors) => { let service_errors = service_errors.into(); debug!("Parsed error response {:?}", service_errors); futures::future::err(MessageBirdError::ServiceError(service_errors)) } }, } }); fut } impl<P, R> Request<P, R> where P: Send + Query, R: 'static + Send + Sync + for<'de> serde::de::Deserialize<'de> + std::fmt::Debug, { pub fn new(parameters: &P, accesskey: &AccessKey) -> Self { let https = hyper_rustls::HttpsConnector::new(4); let mut client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(https); let mut request = hyper::Request::builder(); request.uri(parameters.uri()); request.method(parameters.method()); request.header( hyper::header::AUTHORIZATION, format!("AccessKey {}", accesskey), ); debug!("{:?}", request); let request: hyper::Request<_> = if parameters.method() == hyper::Method::POST { request.header( hyper::header::CONTENT_TYPE, format!("application/x-www-form-urlencoded"), ); parameters .uri() .query() .map(|body: &str| { let body = body.to_string(); request.header(hyper::header::CONTENT_LENGTH, format!("{}", body.len())); request.body(body.into()).unwrap() }) .unwrap_or_else(|| { request.header(hyper::header::CONTENT_LENGTH, format!("{}", 0)); request.body(hyper::Body::empty()).unwrap() }) } else { request.header(hyper::header::CONTENT_LENGTH, format!("{}", 0)); request.body(hyper::Body::empty()).unwrap() }; let future = request_future_with_json_response::<R>(&mut client, request); let future = Box::new(future); Self { future, phantom: PhantomData, } } }
t = Request<parameter::list::ListParameters, MessageList>; pub type RequestView = Request<parameter::view::ViewParameters, Message>; pub type RequestSend = Request<parameter::send::SendParameters, Message>; pub struct Request<T, R> { future: Box<dyn Future<Item = R, Error = MessageBirdError>>, phantom: PhantomData<T>, } impl<T, R> Future for Request<T, R> { type Item = R; type Error = MessageBirdError; fn poll(&mut self) -> Result<Async<Self::Item>, Self::Error> { self.future.poll() } } fn request_future_with_json_response<R>( client: &mut hyper::Client< hyper_rustls::HttpsConnector<hyper::client::HttpConnector>, hyper::Body, >, request: hyper::Request<hyper::Body>, ) -> impl Future<Item = R, Error = MessageBirdError> where R: 'static + Sized + Send + Sync + for<'de> serde::de::Deserialize<'de> + std::fmt::Debug, { debug!("request {:?}", request); let fut = client .request(request) .map_err(|e: hyper::Error| { debug!("request {:?}", e); MessageBirdError::RequestError }) .and_then(|response: hyper::Response<hyper::Body>| { let status = response.status(); debug!("rest status code: {}", status);
random
[ { "content": "/// TODO the name is misleading/obsolete, should be something with params\n\npub trait Query {\n\n fn uri(&self) -> hyper::Uri;\n\n fn method(&self) -> hyper::Method {\n\n hyper::Method::GET\n\n }\n\n}\n\n\n\n/// Contact Id\n\n///\n\n/// TODO not implemented just yet\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Contact(u64);\n\n\n\nimpl Default for Contact {\n\n fn default() -> Self {\n\n Contact(0)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Contact {\n", "file_path": "src/sms/parameter/types.rs", "rank": 0, "score": 61315.32133358276 }, { "content": "struct TypeDetailsVisitor;\n\n\n\nimpl<'de> Visitor<'de> for TypeDetailsVisitor {\n\n type Value = TypeDetails;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a TypeDetails Map\")\n\n }\n\n\n\n fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n\n where\n\n M: MapAccess<'de>,\n\n {\n\n let mut set = TypeDetails::new();\n\n\n\n while let Some((key, value)) = access.next_entry()? {\n\n let p: (String, String) = (key, value);\n\n println!(\"TypeDetail key: {:?} value: {:?}\", p.0, p.1);\n\n // TODO map error properly\n\n let td = TypeDetail::try_from(p).expect(\"Expected a valid type detail\");\n", "file_path": "src/sms/typedetails.rs", "rank": 1, "score": 59816.71030680021 }, { "content": "fn main() -> Result<(), MessageBirdError> {\n\n env_logger::init();\n\n\n\n let msisdn_str = std::env::var(\"SMS_RECIPIENT\".to_string())\n\n .expect(\"SMS_RECIPIENT should contain the number without prefix\");\n\n let msisdn: Msisdn = Msisdn::from_str(msisdn_str.as_str())\n\n .expect(\"SMS_RECIPIENT did not contain a valid number\");\n\n\n\n info!(\"example: listing all remote messages\");\n\n let q = sms::list::ListParameters::builder()\n\n //.with_payload_type(PayloadType::Sms) // curently the API has a bug which returns unexpected/undocumented JSON, so stay away from this for the time being\n\n .with_direction(Direction::SendToMobile)\n\n //.with_origin(Originator::from_str(\"inbox\")?) // inbox is the default set by messagebird\n\n .with_destination(msisdn)\n\n //.with_contact() // unimplemented for the time being!\n\n //.contains_term(\"fun\")\n\n .skip(1)\n\n .count(10)\n\n .build();\n\n\n", "file_path": "examples/list.rs", "rank": 3, "score": 42953.24889752285 }, { "content": "fn main() -> Result<(), MessageBirdError> {\n\n env_logger::init();\n\n\n\n let msisdn_str = std::env::var(\"SMS_RECIPIENT\".to_string())\n\n .expect(\"Missing SMS_RECIPIENT environment variable\");\n\n let msisdn: Msisdn = Msisdn::from_str(msisdn_str.as_str())?;\n\n\n\n info!(\"example: sending a message\");\n\n let sendable = sms::send::SendParameters::builder()\n\n .payload(\n\n PayloadType::Sms,\n\n Payload::Text(\"fun\".to_string()),\n\n PayloadEncoding::Auto,\n\n )\n\n .origin(AlphaNumeric(\"inbox\".to_string()).into())\n\n .add_recipient(msisdn.into())\n\n //.add_recipient(Recipient::new())\n\n .build();\n\n\n\n let accesskey = AccessKey::from_env()?;\n\n let fut = RequestSend::new(&sendable, &accesskey);\n\n let fut = fut.and_then(|sent_msg: Message| {\n\n info!(\"{:?}\", sent_msg);\n\n futures::future::ok(())\n\n });\n\n let mut core = tokio_core::reactor::Core::new().unwrap();\n\n core.run(fut.map(|_| ()))\n\n}\n", "file_path": "examples/send.rs", "rank": 4, "score": 42953.24889752285 }, { "content": "fn incoming(\n\n req: Request<Body>,\n\n _client: &Client<HttpConnector>,\n\n latest: &Arc<RwLock<Option<String>>>,\n\n) -> Box<dyn Future<Item = Response<Body>, Error = hyper::Error> + Send> {\n\n println!(\"incoming!\");\n\n let method = req.method();\n\n let uri = req.uri();\n\n match (method, uri.path(), uri.query()) {\n\n (&Method::GET, \"/vmn\", Some(query)) => {\n\n let x = query\n\n .parse::<messagebird::sms::NotificationQueryVMN>()\n\n .expect(\"Failed to parse\");\n\n\n\n let mut guard = latest.write().unwrap();\n\n *guard = Some(format!(\"vmn {}\", query));\n\n\n\n println!(\"notfied of vmn sms {:?}\", x);\n\n let body = format!(\"notfied of shortcode sms {:?}\", x);\n\n let response = Response::builder()\n", "file_path": "examples/receive.rs", "rank": 5, "score": 40936.49133295217 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let addr = \"127.0.0.1:8181\".parse().unwrap();\n\n\n\n let latest: Option<String> = None;\n\n let latest = Arc::new(RwLock::new(latest));\n\n\n\n hyper::rt::run(future::lazy(move || {\n\n let client = Client::new();\n\n let service = move || {\n\n let client = client.clone();\n\n let latest = latest.clone();\n\n service_fn(move |req| incoming(req, &client, &latest))\n\n };\n\n\n\n let server = Server::bind(&addr)\n\n .serve(service)\n\n .map_err(|e| eprintln!(\"server error: {}\", e));\n\n\n\n println!(\"Listening on http://{}\", addr);\n\n\n\n server\n\n }));\n\n}\n", "file_path": "examples/receive.rs", "rank": 6, "score": 40936.49133295217 }, { "content": "/// Visitor for parsing the ServiceErrorCode from integers\n\nstruct ErrorCodeVisitor;\n\n\n\nimpl<'de> Visitor<'de> for ErrorCodeVisitor {\n\n type Value = ServiceErrorCode;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid ServiceErrorCode\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n ServiceErrorCode::from_u64(value)\n\n .ok_or(de::Error::invalid_value(Unexpected::Unsigned(value), &self))\n\n }\n\n\n\n fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n", "file_path": "src/serviceerror.rs", "rank": 7, "score": 36451.28936187451 }, { "content": "struct OriginatorVisitor;\n\n\n\nimpl<'de> Visitor<'de> for OriginatorVisitor {\n\n type Value = Originator;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid originator\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Originator::from_str(value)\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Originator {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n", "file_path": "src/sms/originator.rs", "rank": 8, "score": 36451.28936187451 }, { "content": "struct IdentifierVisitor;\n\n\n\nimpl<'de> Visitor<'de> for IdentifierVisitor {\n\n type Value = Identifier;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid identifier str with 32 characters\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Identifier::from_str(value)\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Identifier {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n", "file_path": "src/sms/identifier.rs", "rank": 9, "score": 36451.28936187451 }, { "content": "struct PayloadVisitor;\n\n\n\nimpl<'de> Visitor<'de> for PayloadVisitor {\n\n type Value = Payload;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid payload, either string or binary\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n // TODO this actually requires context\n\n // TODO on how to parse the `value`\n\n // TODO without the type it is impossible to decide\n\n // TODO if i.e. 1234 is a Binary repr or a Text\n\n Payload::from_str(value)\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n", "file_path": "src/sms/payload.rs", "rank": 10, "score": 36451.28936187451 }, { "content": "struct MessageClassVisitor;\n\n\n\nimpl<'de> Visitor<'de> for MessageClassVisitor {\n\n type Value = MessageClass;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid message class integer [0-3]\")\n\n }\n\n\n\n // serde_json treats each number as u64?\n\n fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n MessageClass::from_u64(value)\n\n .ok_or(de::Error::invalid_value(Unexpected::Unsigned(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for MessageClass {\n", "file_path": "src/sms/messageclass.rs", "rank": 11, "score": 35267.0849750755 }, { "content": "struct DateTimeVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DateTimeVisitor {\n\n type Value = DateTime;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid date time formatted str\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Self::Value::from_str(value)\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for DateTime {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n", "file_path": "src/sms/datetime.rs", "rank": 12, "score": 35267.0849750755 }, { "content": "struct CallbackUrlVisitor;\n\n\n\nimpl<'de> Visitor<'de> for CallbackUrlVisitor {\n\n type Value = CallbackUrl;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid callback/href http url\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Url::parse(value)\n\n .map(|url| CallbackUrl(url))\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for CallbackUrl {\n", "file_path": "src/sms/callbackurl.rs", "rank": 13, "score": 35267.0849750755 }, { "content": "\n\nimpl ToString for QueryRecipient {\n\n fn to_string(&self) -> String {\n\n match self {\n\n QueryRecipient::Group(ref group) => group.to_string(),\n\n QueryRecipient::Msisdn(ref msisdn) => msisdn.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for QueryRecipient {\n\n type Err = MessageBirdError;\n\n fn from_str(_s: &str) -> Result<Self, Self::Err> {\n\n unimplemented!(\"TODO implement deserialize and all of the Group API\")\n\n }\n\n}\n\n\n\nimpl From<Msisdn> for QueryRecipient {\n\n fn from(msisdn: Msisdn) -> Self {\n\n QueryRecipient::Msisdn(msisdn)\n", "file_path": "src/sms/parameter/types.rs", "rank": 14, "score": 27047.201482546727 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"no group\")\n\n }\n\n}\n\n\n\n/// recpient for sending a message\n\n///\n\n/// Differs from the message format, such that it will serialize to a string\n\n/// and can also be a group\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum QueryRecipient {\n\n Group(Group),\n\n Msisdn(Msisdn),\n\n}\n\n\n\nimpl From<Recipient> for QueryRecipient {\n\n fn from(_recipient: Recipient) -> Self {\n\n unimplemented!(\"TODO implement convenience conversion\")\n\n }\n\n}\n", "file_path": "src/sms/parameter/types.rs", "rank": 15, "score": 27044.06501031457 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Contact({})\", self.0)\n\n }\n\n}\n\n\n\n/// Group\n\n///\n\n/// Send a message to a predefined group of receivers\n\n///\n\n/// TODO not implemented just yet\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Group;\n\n\n\nimpl Default for Group {\n\n fn default() -> Self {\n\n Group\n\n }\n\n}\n\n\n\nimpl fmt::Display for Group {\n", "file_path": "src/sms/parameter/types.rs", "rank": 16, "score": 27043.471151679267 }, { "content": " }\n\n}\n\n\n\nimpl From<Group> for QueryRecipient {\n\n fn from(group: Group) -> Self {\n\n QueryRecipient::Group(group)\n\n }\n\n}\n\n\n\nimpl Serialize for QueryRecipient {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let val_str = self.to_string();\n\n serializer.serialize_str(val_str.as_str())\n\n }\n\n}\n\n\n\n// only need one way for this one, ambiguity for recipients makes impl\n", "file_path": "src/sms/parameter/types.rs", "rank": 17, "score": 27042.368676465187 }, { "content": "use super::*;\n\n\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\nuse hyper;\n\nuse std::fmt;\n\nuse std::string::ToString;\n\n\n\n/// TODO the name is misleading/obsolete, should be something with params\n", "file_path": "src/sms/parameter/types.rs", "rank": 18, "score": 27041.40162520154 }, { "content": "// deserialize impossible without knowing all the existing group ids\n\n// which would imply implementing the group id API\n\n//\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n #[derive(Debug, Serialize, Eq, PartialEq)]\n\n struct DummyQuery<T> {\n\n pub inner: T,\n\n }\n\n\n\n use super::*;\n\n #[test]\n\n fn recipient() {\n\n let recipient: QueryRecipient = Msisdn::new(123475).unwrap().into();\n\n\n\n let recipient = DummyQuery { inner: recipient };\n\n\n\n let recipient_str = serde_url_params::to_string(&recipient).unwrap();\n\n println!(\"recipient is {}\", recipient_str);\n", "file_path": "src/sms/parameter/types.rs", "rank": 19, "score": 27039.660038941125 }, { "content": "\n\n let recipients_str = serde_url_params::to_string(&recipients).unwrap();\n\n println!(\"recipient is Some(...) => \\\"{}\\\"\", recipients_str);\n\n }\n\n\n\n #[test]\n\n fn recipient_optional_none() {\n\n let recipients: Option<QueryRecipient> = None;\n\n\n\n let recipients = DummyQuery { inner: recipients };\n\n\n\n let recipients_str = serde_url_params::to_string(&recipients).unwrap();\n\n println!(\"recipient is None => \\\"{}\\\"\", recipients_str);\n\n }\n\n}\n", "file_path": "src/sms/parameter/types.rs", "rank": 20, "score": 27037.840168063507 }, { "content": " }\n\n\n\n #[test]\n\n fn recipient_vec() {\n\n let recipients: Vec<QueryRecipient> = vec![\n\n Msisdn::new(123475).unwrap().into(),\n\n Msisdn::new(777777777).unwrap().into(),\n\n ];\n\n\n\n let recipients = DummyQuery { inner: recipients };\n\n\n\n let recipients_str = serde_url_params::to_string(&recipients).unwrap();\n\n println!(\"recipients are \\\"{}\\\"\", recipients_str);\n\n }\n\n\n\n #[test]\n\n fn recipient_optional_some() {\n\n let recipients: Option<QueryRecipient> = Some(Msisdn::new(123475).unwrap().into());\n\n\n\n let recipients = DummyQuery { inner: recipients };\n", "file_path": "src/sms/parameter/types.rs", "rank": 21, "score": 27036.218498853825 }, { "content": " Ok(Msisdn(raw))\n\n } else {\n\n Err(MessageBirdError::TypeError {\n\n msg: format!(\"Invalid phone number: {}\", raw),\n\n })\n\n }\n\n }\n\n\n\n /// convert from u64\n\n ///\n\n /// TODO use TryFrom as soon as stabilized\n\n pub fn try_from(raw: u64) -> Result<Self, MessageBirdError> {\n\n Msisdn::new(raw)\n\n }\n\n}\n\n\n\nimpl FromStr for Msisdn {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|_e| MessageBirdError::ParseError)\n", "file_path": "src/sms/recipient.rs", "rank": 23, "score": 23.34305317366509 }, { "content": "}\n\n\n\nimpl Identifier {\n\n pub fn new(raw: String) -> Self {\n\n Identifier(raw)\n\n }\n\n pub fn as_str(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Identifier {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl FromStr for Identifier {\n\n type Err = MessageBirdError;\n\n\n", "file_path": "src/sms/identifier.rs", "rank": 24, "score": 23.189015138524553 }, { "content": "///\n\n/// Not very useful right now, recommended to not use unless you have explicit issues\n\n/// i.e. some base stations in south eastern europe happily convert binary SMS to\n\n/// textual SMS - because why not? In that case alternate routes might help to circumpass\n\n/// the issues.\n\n#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct Gateway(pub u32);\n\n\n\nimpl Deref for Gateway {\n\n type Target = u32;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl FromStr for Gateway {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|_e| MessageBirdError::ParseError)\n\n }\n", "file_path": "src/sms/message.rs", "rank": 25, "score": 22.24432254652982 }, { "content": "impl Hash for TypeDetail {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n match self {\n\n TypeDetail::UserDataHeader(_) => 1.hash(state),\n\n // expiermental: _ => #[allow(unreachable_code)] unreachable!(\"Mising implementation for struct variant\"),\n\n }\n\n }\n\n}\n\n\n\nimpl TypeDetail {\n\n pub fn try_from(src: (String, String)) -> Result<Self, MessageBirdError> {\n\n match src.0.as_str() {\n\n \"udh\" => Ok(TypeDetail::UserDataHeader(src.1)),\n\n x => Err(MessageBirdError::TypeError {\n\n msg: format!(\"Unknown TypeDetail \\\"{}\\\"\", x),\n\n }),\n\n }\n\n }\n\n\n\n pub fn as_tuple(self) -> (String, String) {\n", "file_path": "src/sms/typedetails.rs", "rank": 26, "score": 21.299189099072112 }, { "content": "use super::*;\n\n\n\n// requires manual Serialize/Deserialize impl\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(rename = \"msisdn\")]\n\npub struct Msisdn(u64);\n\n\n\nimpl Deref for Msisdn {\n\n type Target = u64;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\n/// Mobile Subscriber Integrated Services Digital Network Number\n\n///\n\n/// A worldwide unique phone number. This does not require a `+` or `00` prefix before the country code.\n\nimpl Msisdn {\n\n pub fn new(raw: u64) -> Result<Self, MessageBirdError> {\n\n if raw != 0 {\n", "file_path": "src/sms/recipient.rs", "rank": 27, "score": 20.64196516372468 }, { "content": "}\n\n\n\n// requires manual Serialize/Deserialize impl\n\n#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct AlphaNumeric(pub String);\n\n\n\nimpl AlphaNumeric {\n\n pub fn as_str(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n\n\n\nimpl FromStr for AlphaNumeric {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(r\"^[A-Za-z0-9_-]+$\").unwrap();\n\n }\n\n if RE.is_match(s) {\n\n Ok(AlphaNumeric(String::from(s)))\n", "file_path": "src/sms/originator.rs", "rank": 29, "score": 19.851287381032108 }, { "content": " .or_else(|_e| {\n\n AlphaNumeric::from_str(s)\n\n .and_then(|alphanumeric| Ok(Originator::Other(alphanumeric)))\n\n })\n\n }\n\n}\n\n\n\nimpl From<u64> for Originator {\n\n fn from(raw: u64) -> Self {\n\n Originator::TelephoneNumber(raw.into())\n\n }\n\n}\n\n\n\n// impl FromStr for PayloadType {\n\n// type Err = MessageBirdError;\n\n// fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n// serde_plain::from_str::<Self>(s).map_err(|_e| {\n\n// MessageBirdError::ParseError\n\n// })\n\n// }\n", "file_path": "src/sms/originator.rs", "rank": 30, "score": 18.802480504154076 }, { "content": "/// SMS message type enum\n\n///\n\n/// Determines the type of the message payload\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum PayloadType {\n\n /// regular text SMS, encoding defined by `PayloadEncoding`\n\n Sms,\n\n /// raw binary encoding of bytes, some providers are incapable of handling those with their base stations, be warned\n\n Binary,\n\n /// priority notification style SMS, there is no guarantee that this is stored on the phone\n\n Flash,\n\n}\n\n\n\nimpl FromStr for PayloadType {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|_e| MessageBirdError::ParseError)\n\n }\n\n}\n", "file_path": "src/sms/payload.rs", "rank": 31, "score": 18.637212514339826 }, { "content": " }\n\n}\n\n\n\nimpl FromStr for Status {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|_e| MessageBirdError::ParseError)\n\n }\n\n}\n\n\n\nimpl ToString for Status {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\n/// Recipient\n\n///\n\n/// Definition of a recepient, used for querying the status of a SMS.\n\n/// Contains the deliver status of a message as well as the time of posting\n", "file_path": "src/sms/recipient.rs", "rank": 32, "score": 18.276810906394783 }, { "content": "\n\nimpl ServiceError {\n\n #[allow(unused)]\n\n pub fn new(code: ServiceErrorCode, description: String, parameter: Option<String>) -> Self {\n\n Self {\n\n code,\n\n description,\n\n parameter,\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for ServiceError {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|e| {\n\n debug!(\"ServiceError from_str: {:?}\", e);\n\n MessageBirdError::ParseError\n\n })\n\n }\n", "file_path": "src/serviceerror.rs", "rank": 33, "score": 17.9470378118956 }, { "content": "///\n\n/// Used for the sending direction.\n\n///\n\n/// `PayloadType` and `PayloadEncoding` are unrelated and used for querying.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum Payload {\n\n Bytes(Vec<u8>),\n\n Text(String),\n\n}\n\n\n\nimpl Default for Payload {\n\n fn default() -> Self {\n\n Payload::Text(\"default\".to_string())\n\n }\n\n}\n\n\n\n// You can even choose to implement multiple traits, like Lower and UpperHex\n\nimpl fmt::LowerHex for Payload {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n", "file_path": "src/sms/payload.rs", "rank": 34, "score": 17.929739304614216 }, { "content": " type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(r\"^\\+?[1-9][0-9]+$\").unwrap();\n\n }\n\n if RE.is_match(s) {\n\n Ok(TelephoneNumber(String::from(s)))\n\n } else {\n\n Err(MessageBirdError::FormatError {\n\n chunk: String::from(s),\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl From<u64> for TelephoneNumber {\n\n //type Err = MessageBirdError;\n\n fn from(raw: u64) -> Self {\n\n TelephoneNumber(raw.to_string())\n\n }\n", "file_path": "src/sms/originator.rs", "rank": 35, "score": 17.77208342640833 }, { "content": "use super::*;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\n\n\nuse std::fmt;\n\n\n\nuse regex::Regex;\n\n\n\n// TODO impl into() for Originator\n\n// requires manual Serialize/Deserialize impl\n\n#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct TelephoneNumber(pub String);\n\n\n\nimpl TelephoneNumber {\n\n pub fn as_str(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n\n\n\nimpl FromStr for TelephoneNumber {\n", "file_path": "src/sms/originator.rs", "rank": 36, "score": 17.560207882435943 }, { "content": "\n\nimpl ToString for PayloadType {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\nimpl PayloadType {\n\n pub fn as_str(&self) -> &str {\n\n match self {\n\n PayloadType::Sms => \"sms\",\n\n PayloadType::Binary => \"binary\",\n\n PayloadType::Flash => \"flash\",\n\n }\n\n }\n\n}\n\n\n\n/// Payload data\n\n///\n\n/// Enum representing both raw bytes/binary as well as text based sms messages.\n", "file_path": "src/sms/payload.rs", "rank": 37, "score": 17.548814276300124 }, { "content": " fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n // XXX: taken from the example on the webpage\n\n const VALID_LENGTH: usize = 32;\n\n if s.len() != VALID_LENGTH {\n\n Err(MessageBirdError::TypeError {\n\n msg: format!(\n\n \"unexpected id length {}, expected {}\",\n\n s.len(),\n\n VALID_LENGTH\n\n ),\n\n })\n\n } else {\n\n Ok(Self::new(String::from(s)))\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for Identifier {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(self.as_str())\n\n }\n\n}\n\n\n", "file_path": "src/sms/identifier.rs", "rank": 38, "score": 17.39261749925556 }, { "content": "# messagebird-async\n\n\n\n[![Build Status](https://ci.spearow.io/api/v1/teams/main/pipelines/messagebird/jobs/master-validate/badge)](https://ci.spearow.io/teams/main/pipelines/messagebird) [![Crates.io](https://img.shields.io/crates/v/messagebird-async.svg)](https://crates.io/crates/messagebird-async) [![docs.rs](https://docs.rs/messagebird-async/badge.svg)](https://docs.rs/messagebird-async) [![License](https://img.shields.io/crates/l/messagebird-async.svg)](#license)\n\n\n\nMessageBird is a service for sending SMS at its core https://www.messagebird.com/\n\n\n\n## Example\n\n\n\n### Send a SMS Message\n\n\n\nSending a sms to a specified target is implemented in `examples/send.rs`:\n\n\n\n```sh\n\nexport MESSAGEBIRD_ACCESSKEY=abio8usad..dfahdk\n\nexport SMS_RECIPIENT=1234556\n\n```\n\n\n\nThe `SMS_RECIPIENT` should NOT contain leading zeros nor the `+`. The countrycode is still necessary.\n\n\n\n```sh\n\ncargo run --example send\n\n```\n\n\n\nor copy & paste:\n\n\n\n```rust\n\n#[macro_use]\n\nextern crate log;\n\nextern crate env_logger;\n\nextern crate futures;\n\nextern crate messagebird_async;\n\nextern crate tokio_core;\n\n\n\nuse futures::future::Future;\n\nuse messagebird_async::errors::*;\n\nuse messagebird_async::sms;\n\nuse messagebird_async::sms::*;\n\n\n\nfn main() -> Result<(), MessageBirdError> {\n\n env_logger::init();\n\n\n\n let msisdn_str = std::env::var(\"SMS_RECIPIENT\".to_string())\n\n .expect(\"SMS_RECIPIENT should contain the number without prefix\");\n\n let msisdn: Msisdn = Msisdn::from_str(msisdn_str.as_str())\n\n .expect(\"SMS_RECIPIENT did not contain a valid number\");\n\n\n\n info!(\"example: sending a message\");\n\n let sendable = sms::send::SendParameters::builder()\n\n .payload(\n\n PayloadType::Sms,\n\n Payload::Text(\"fun\".to_string()),\n\n PayloadEncoding::Auto,\n\n )\n\n .origin(AlphaNumeric(\"inbox\".to_string()).into())\n\n .add_recipient(msisdn.into())\n\n //.add_recipient(Recipient::new())\n\n .build();\n\n\n\n let accesskey = AccessKey::from_env()?;\n\n let fut = RequestSend::new(&sendable, &accesskey);\n\n let fut = fut.and_then(|sent_msg: Message| {\n\n info!(\"{:?}\", sent_msg);\n\n futures::future::ok(())\n\n });\n\n let mut core = tokio_core::reactor::Core::new().unwrap();\n\n core.run(fut.map(|_| ()))\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 39, "score": 17.26689332135101 }, { "content": " Direction::ReceivedFromMobile => \"mo\",\n\n _ => \"invalid\",\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Direction {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n serde_plain::from_str::<Self>(s).map_err(|_e| MessageBirdError::ParseError)\n\n }\n\n}\n\n\n\nimpl ToString for Direction {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\n/// Determines the Gateway ID\n", "file_path": "src/sms/message.rs", "rank": 41, "score": 16.724069605710387 }, { "content": " .parse()\n\n .expect(\"Failed to parse send query object to hyper::Uri\");\n\n uri\n\n }\n\n fn method(&self) -> hyper::Method {\n\n hyper::Method::POST\n\n }\n\n}\n\n\n\npub struct Builder(SendParameters);\n\n\n\nimpl Builder {\n\n pub fn payload(\n\n mut self,\n\n payload_type: PayloadType,\n\n payload: Payload,\n\n payload_encoding: PayloadEncoding,\n\n ) -> Self {\n\n self.0.payload_type = Some(payload_type);\n\n self.0.payload_encoding = Some(payload_encoding);\n", "file_path": "src/sms/parameter/send.rs", "rank": 42, "score": 16.59001471487318 }, { "content": "}\n\n\n\nimpl From<u64> for Recipient {\n\n fn from(raw: u64) -> Self {\n\n Recipient::new(raw)\n\n }\n\n}\n\n\n\nimpl FromStr for Recipient {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let s = s.replace('\"', \"\");\n\n s.parse::<u64>()\n\n .and_then(|x: u64| Ok(Recipient::from(x)))\n\n .map_err(|e| {\n\n debug!(\"{:?}\", e);\n\n MessageBirdError::ParseError\n\n })\n\n }\n\n}\n", "file_path": "src/sms/recipient.rs", "rank": 43, "score": 16.5213653931183 }, { "content": "\n\n#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)]\n\npub struct NotificationQueryShort {\n\n mid: u64,\n\n shortcode: String,\n\n keyword: String,\n\n originator: Originator,\n\n operator: u64, //MCCMNC\n\n #[serde(rename = \"message\")]\n\n payload: Payload, // The body of the SMS message, including the (sub)keyword.\n\n receive_datetime: DateTime, // stamp format YmdHis\n\n}\n\n\n\nimpl FromStr for NotificationQueryShort {\n\n type Err = MessageBirdError;\n\n fn from_str(query: &str) -> Result<Self, Self::Err> {\n\n serde_qs::from_str(query).map_err(|e| {\n\n debug!(\"{:?}\", e);\n\n MessageBirdError::ParseError\n\n })\n", "file_path": "src/sms/notification.rs", "rank": 44, "score": 16.39414492909681 }, { "content": "use serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse chrono;\n\nuse chrono::offset::{FixedOffset, Local, Offset};\n\nuse std::ops::Deref;\n\n\n\nuse crate::errors::*;\n\n\n\n/// Timestamp\n\n///\n\n/// A timestamp with a fixed offset.\n\n#[derive(Debug, Eq, PartialEq, Hash, Clone)]\n\npub struct DateTime(chrono::DateTime<FixedOffset>);\n\n\n\nimpl Deref for DateTime {\n\n type Target = chrono::DateTime<FixedOffset>;\n", "file_path": "src/sms/datetime.rs", "rank": 45, "score": 16.347361940196947 }, { "content": "#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)]\n\npub struct NotificationQueryVMN {\n\n id: String,\n\n recipient: Msisdn,\n\n originator: Originator,\n\n #[serde(rename = \"body\")]\n\n payload: Payload,\n\n #[serde(rename = \"createdDatetime\")]\n\n created_datetime: DateTime, // RFC3339 format (Y-m-d\\TH:i:sP)\n\n}\n\n\n\nimpl FromStr for NotificationQueryVMN {\n\n type Err = MessageBirdError;\n\n fn from_str(query: &str) -> Result<Self, Self::Err> {\n\n serde_qs::from_str(query).map_err(|e| {\n\n debug!(\"{:?}\", e);\n\n MessageBirdError::ParseError\n\n })\n\n }\n\n}\n", "file_path": "src/sms/notification.rs", "rank": 46, "score": 16.087108089717578 }, { "content": " pub fn builder() -> Builder {\n\n Builder(SendParameters::default())\n\n }\n\n}\n\n\n\nuse std::fmt;\n\nuse std::string::String;\n\n\n\nimpl fmt::Display for SendParameters {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let base = String::from(\"https://rest.messagebird.com/messages\");\n\n let query = serde_url_params::to_string(self).unwrap();\n\n write!(f, \"{}?{}\", base, query)\n\n }\n\n}\n\n\n\nimpl Query for SendParameters {\n\n fn uri(&self) -> hyper::Uri {\n\n let uri: hyper::Uri = self\n\n .to_string()\n", "file_path": "src/sms/parameter/send.rs", "rank": 47, "score": 15.842306452212474 }, { "content": "use super::*;\n\n\n\nuse serde_plain;\n\n\n\npub use std::str::FromStr;\n\npub use std::string::ToString;\n\n\n\nuse std::ops::Deref;\n\nuse std::slice::Iter;\n\nuse std::time::Duration;\n\n\n\nmod datetime;\n\npub use self::datetime::*;\n\n\n\nmod callbackurl;\n\npub use self::callbackurl::*;\n\n\n\nmod identifier;\n\npub use self::identifier::*;\n\n\n", "file_path": "src/sms/mod.rs", "rank": 48, "score": 15.674269373231896 }, { "content": " }\n\n }\n\n\n\n /// add a new type detail to the set of type details\n\n pub fn add(&mut self, td: TypeDetail) {\n\n self.inner.insert(td);\n\n }\n\n\n\n /// iterate over all type details\n\n pub fn iter(&mut self) -> Iter<TypeDetail> {\n\n self.inner.iter()\n\n }\n\n}\n\n\n\nimpl Default for TypeDetails {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n", "file_path": "src/sms/typedetails.rs", "rank": 49, "score": 15.447633784897597 }, { "content": "impl Originator {\n\n pub fn as_str(&self) -> &str {\n\n match self {\n\n Originator::TelephoneNumber(ref telephonenumber) => telephonenumber.as_str(),\n\n Originator::Other(ref alphanumeric) => alphanumeric.as_str(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Originator {\n\n fn default() -> Self {\n\n AlphaNumeric::from_str(\"inbox\").unwrap().into()\n\n }\n\n}\n\n\n\nimpl FromStr for Originator {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n TelephoneNumber::from_str(s)\n\n .and_then(|telephonenumber| Ok(Originator::TelephoneNumber(telephonenumber)))\n", "file_path": "src/sms/originator.rs", "rank": 50, "score": 15.102475532113155 }, { "content": " match self {\n\n TypeDetail::UserDataHeader(x) => (String::from(\"udh\"), x),\n\n // expiermental: _ => #[allow(unreachable_code)] unreachable!(\"tuple conversion\"),\n\n }\n\n }\n\n}\n\n\n\n/// HashSet of type details\n\n///\n\n///\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct TypeDetails {\n\n inner: HashSet<TypeDetail>,\n\n}\n\n\n\nimpl TypeDetails {\n\n pub fn new() -> Self {\n\n Self {\n\n // TODO make sure the keys are unique, possibly implement Hash trait on TypeDetail manually\n\n inner: HashSet::new(),\n", "file_path": "src/sms/typedetails.rs", "rank": 51, "score": 14.98990366227426 }, { "content": " fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nuse std::time;\n\nimpl Default for DateTime {\n\n fn default() -> Self {\n\n let systime = time::SystemTime::now();\n\n let datetime_local = chrono::DateTime::<Local>::from(systime);\n\n let tz = chrono::offset::Utc.fix();\n\n let datetime_with_tz = datetime_local.with_timezone(&tz);\n\n DateTime(datetime_with_tz)\n\n }\n\n}\n\n\n\nimpl DateTime {\n\n pub fn now() -> Self {\n\n Self::default()\n\n }\n", "file_path": "src/sms/datetime.rs", "rank": 52, "score": 14.847426043410003 }, { "content": " /// Filters for the payload type, either being `SMS` or `Binary`\n\n pub fn with_payload_type(mut self, payload_type: PayloadType) -> Self {\n\n self.0.payload_type = Some(payload_type);\n\n self\n\n }\n\n\n\n /// Filters for the direction\n\n pub fn with_direction(mut self, direction: Direction) -> Self {\n\n self.0.direction = Some(direction);\n\n self\n\n }\n\n\n\n /// Filters for the status of the messages being listed\n\n pub fn with_status(mut self, status: Status) -> Self {\n\n self.0.status = Some(status);\n\n self\n\n }\n\n\n\n /// Counterpart to `with_origin`\n\n pub fn with_destination<T>(mut self, msisdn: T) -> Self\n", "file_path": "src/sms/parameter/list.rs", "rank": 53, "score": 14.391744245445594 }, { "content": " Payload::Bytes(ref bytes) => {\n\n for byte in bytes {\n\n write!(f, \"{:x} \", byte)?;\n\n }\n\n }\n\n Payload::Text(ref s) => {\n\n for byte in s.as_bytes() {\n\n write!(f, \"{:x} \", byte)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl FromStr for Payload {\n\n type Err = MessageBirdError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Payload::Text(String::from(s)))\n", "file_path": "src/sms/payload.rs", "rank": 54, "score": 14.341799101484824 }, { "content": " static RAW_TYPE_DETAILS_EMPTY: &str = r#\"\n\n{}\n\n\"#;\n\n static RAW_TYPE_DETAILS_WITH_UDH: &str = r#\"\n\n{\n\n \"udh\" : \"UserDataHeaderContent:)\"\n\n}\n\n\"#;\n\n\n\n lazy_static! {\n\n static ref DETAILS: TypeDetails = {\n\n let mut details = TypeDetails::new();\n\n details.add(TypeDetail::UserDataHeader(\"some\".to_string()));\n\n details\n\n };\n\n }\n\n\n\n serde_roundtrip!(serde_typedetails_empty, TypeDetails, TypeDetails::new());\n\n deser_roundtrip!(deser_typedetails_empty, TypeDetails, RAW_TYPE_DETAILS_EMPTY);\n\n\n", "file_path": "src/sms/typedetails.rs", "rank": 57, "score": 13.74023634915206 }, { "content": " /// Validate the message contained a fixed term\n\n pub fn contains_term(mut self, term: &str) -> Self {\n\n self.0.searchterms.push(term.to_string());\n\n self\n\n }\n\n\n\n /// Filters for messages that were sent in a certain range\n\n pub fn between(self, start: DateTime, stop: DateTime) -> Self {\n\n self.from(start).until(stop)\n\n }\n\n\n\n /// Defines the timspan since when the message was sent\n\n /// TODO rename, too amibiguous\n\n pub fn from(mut self, start: DateTime) -> Self {\n\n self.0.start = Some(start);\n\n self\n\n }\n\n\n\n /// Until what timestamp the messages we are interested in are filtered\n\n pub fn until(mut self, stop: DateTime) -> Self {\n", "file_path": "src/sms/parameter/list.rs", "rank": 58, "score": 13.647158099094034 }, { "content": "use super::*;\n\n\n\nuse std::fmt;\n\nuse std::string::ToString;\n\n\n\n/// QuerySend is an object that can be passed on to MessageBird API to trigger sending a SMS\n\n#[derive(Debug, Serialize, Eq, PartialEq)]\n\npub struct ViewParameters {\n\n #[serde(rename = \"id\")]\n\n identifier: Identifier,\n\n}\n\n\n\nimpl ViewParameters {\n\n pub fn new<T>(id: T) -> Self\n\n where\n\n T: Into<Identifier>,\n\n {\n\n Self {\n\n identifier: id.into(),\n\n }\n", "file_path": "src/sms/parameter/view.rs", "rank": 59, "score": 13.534140998012589 }, { "content": " Self { errors: vec![] }\n\n }\n\n}\n\n\n\nimpl ServiceErrors {\n\n fn new(errors: Vec<ServiceError>) -> Self {\n\n Self { errors }\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl Deref for ServiceErrors {\n\n type Target = Vec<ServiceError>;\n\n fn deref(&self) -> &Self::Target {\n\n &self.errors\n\n }\n\n}\n\n\n\nimpl From<Vec<ServiceError>> for ServiceErrors {\n\n fn from(errors: Vec<ServiceError>) -> Self {\n", "file_path": "src/serviceerror.rs", "rank": 60, "score": 13.470258696453525 }, { "content": " write!(f, \"{}?{}\", base, query)\n\n }\n\n}\n\n\n\nimpl Query for ListParameters {\n\n fn uri(&self) -> hyper::Uri {\n\n let uri: hyper::Uri = self\n\n .to_string()\n\n .parse()\n\n .expect(\"Failed to parse list query object to hyper::Uri\");\n\n uri\n\n }\n\n}\n\n\n\nimpl ListParameters {\n\n /// Construct the `ListParameters` using the builder pattern\n\n pub fn builder() -> Builder {\n\n Builder::default()\n\n }\n\n}\n", "file_path": "src/sms/parameter/list.rs", "rank": 63, "score": 13.177386756616976 }, { "content": "extern crate env_logger;\n\nextern crate futures;\n\nextern crate hyper;\n\nextern crate messagebird_async as messagebird;\n\nextern crate tokio_core;\n\n\n\nuse futures::{future, Future};\n\n\n\nuse hyper::client::HttpConnector;\n\nuse hyper::service::service_fn;\n\nuse hyper::{Body, Client, Method, Request, Response, Server, StatusCode};\n\n\n\nuse std::sync::{Arc, RwLock};\n\n\n\nstatic NOTFOUND: &[u8] = b\"Not Found\";\n\n\n", "file_path": "examples/receive.rs", "rank": 64, "score": 13.025280905887136 }, { "content": "\n\npub struct Builder(ListParameters);\n\n\n\nimpl Default for Builder {\n\n fn default() -> Self {\n\n Builder(ListParameters::default())\n\n }\n\n}\n\n\n\nimpl Builder {\n\n /// Filters for the origin of the message\n\n ///\n\n /// May be either a phone number, contact (unimplemented as of now) or a string.\n\n /// Messages which are sent via the API and do not have an origin defined will have `\"inbox\"`\n\n /// be the default originator.\n\n pub fn with_origin(mut self, originator: Originator) -> Self {\n\n self.0.originator = Some(originator);\n\n self\n\n }\n\n\n", "file_path": "src/sms/parameter/list.rs", "rank": 65, "score": 12.810557348054076 }, { "content": " }\n\n pub fn iter(&mut self) -> Iter<Recipient> {\n\n self.items.iter()\n\n }\n\n pub fn add(&mut self, recipient: Recipient) {\n\n self.items.push(recipient)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n static RAW: &str = r#\"{\n\n \"totalCount\":1,\n\n \"totalSentCount\":1,\n\n \"totalDeliveredCount\":0,\n\n \"totalDeliveryFailedCount\":0,\n\n \"items\":[\n\n {\n\n \"recipient\": 31612345678,\n", "file_path": "src/sms/recipients.rs", "rank": 66, "score": 12.796665662218844 }, { "content": "use super::*;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\nuse std::fmt;\n\nuse std::string::ToString;\n\n\n\n/// Unique message identifier\n\n///\n\n/// Consists 32 alphanumeric characters (may change!)\n\n///\n\n/// Generate by the MessageBird backend on posting the message.\n\n#[derive(Debug, Eq, PartialEq, Hash)]\n\npub struct Identifier(String);\n\n\n\nimpl Default for Identifier {\n\n fn default() -> Self {\n\n Identifier(\"00000000000000000000000000000000\".to_string())\n\n }\n", "file_path": "src/sms/identifier.rs", "rank": 67, "score": 12.75478901636345 }, { "content": "}\n\n\n\nimpl FromStr for DateTime {\n\n type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n // workaround for messy messagebird API\n\n // see almost_rfc3339 test case\n\n let s_plus_recovered: String = s.replace(' ', \"+\");\n\n debug!(\"fmt datetime {} -> {}\", s, s_plus_recovered);\n\n let s = s_plus_recovered.as_str();\n\n chrono::DateTime::parse_from_rfc3339(s)\n\n .or_else(|_err| {\n\n chrono::naive::NaiveDateTime::parse_from_str(s, \"%Y%m%d%H%M%S\")\n\n .and_then(|naive| Ok(chrono::DateTime::from_utc(naive, FixedOffset::west(0))))\n\n })\n\n .map(|datetime| DateTime(datetime))\n\n .map_err(|_e| MessageBirdError::FormatError {\n\n chunk: \"Unexpected or invalid time format\".to_string(),\n\n })\n\n }\n", "file_path": "src/sms/datetime.rs", "rank": 68, "score": 12.70263381501849 }, { "content": " Self { errors }\n\n }\n\n}\n\n\n\nimpl From<ServiceErrors> for Vec<ServiceError> {\n\n fn from(errors: ServiceErrors) -> Self {\n\n errors.errors\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static RAW_ERRORS: &str = r#\"\n\n{\n\n \"errors\":[\n\n {\n\n \"code\": 2,\n\n \"description\": \"Request not allowed (incorrect access_key)\",\n", "file_path": "src/serviceerror.rs", "rank": 69, "score": 12.545626921884072 }, { "content": "use super::*;\n\nuse std::collections::{hash_set::Iter, HashSet};\n\nuse std::fmt;\n\n\n\nuse serde::de::{Deserialize, Deserializer, MapAccess, Visitor};\n\nuse serde::ser::{Serialize, SerializeMap, Serializer};\n\n\n\n/// Type Details\n\n///\n\n/// Additional message details for the SMS message when passing and querying the MessageBird system.\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub enum TypeDetail {\n\n /// additional user data specific bytes\n\n /// FIXME should not be a string but a proper type include serde impl according to https://en.wikipedia.org/wiki/User_Data_Header but is better to be done in a separate crate\n\n #[serde(rename = \"udh\")]\n\n UserDataHeader(String),\n\n}\n\n\n\nuse std::hash::{Hash, Hasher};\n\n\n", "file_path": "src/sms/typedetails.rs", "rank": 70, "score": 12.5363492287208 }, { "content": "}\n\n\n\nimpl ToString for ServiceError {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\n/// a collection of service errors\n\n///\n\n/// mostly needed for literal mapping to json\n\n/// TODO: use a hand written serialize/deserialize/visitor impl\n\n/// TODO: to to avoid the addition object\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n\npub struct ServiceErrors {\n\n errors: Vec<ServiceError>,\n\n}\n\n\n\nimpl Default for ServiceErrors {\n\n fn default() -> Self {\n", "file_path": "src/serviceerror.rs", "rank": 71, "score": 12.47320462033425 }, { "content": " type Err = MessageBirdError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n s.parse::<u64>()\n\n .map_err(|e| {\n\n debug!(\"ServiceErrorCode {:?}\", e);\n\n MessageBirdError::ParseError\n\n })\n\n .and_then(|x| Self::from_u64(x).ok_or(MessageBirdError::ParseError))\n\n }\n\n}\n\n\n\nimpl ServiceErrorCode {\n\n #[allow(dead_code)]\n\n #[allow(unreachable_patterns)]\n\n pub fn as_str(&self) -> &str {\n\n match self {\n\n ServiceErrorCode::RequestNotAllowed => \"Request not allowed\",\n\n ServiceErrorCode::MissingParameters => \"Missing params\",\n\n ServiceErrorCode::InvalidParameters => \"Invalid params\",\n\n ServiceErrorCode::NotFound => \"Not found\",\n", "file_path": "src/serviceerror.rs", "rank": 72, "score": 12.441257529107126 }, { "content": " direction: None,\n\n limit: None,\n\n offset: None,\n\n searchterms: vec![],\n\n payload_type: None,\n\n contact_id: None,\n\n status: None,\n\n start: None,\n\n end: None,\n\n }\n\n }\n\n}\n\n\n\nuse std::fmt;\n\nuse std::string::String;\n\n\n\nimpl fmt::Display for ListParameters {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let base = String::from(\"https://rest.messagebird.com/messages\");\n\n let query = serde_url_params::to_string(self).unwrap();\n", "file_path": "src/sms/parameter/list.rs", "rank": 74, "score": 12.274916790295714 }, { "content": " let accesskey = AccessKey::from_env()?;\n\n let fut = RequestMessageList::new(&q, &accesskey);\n\n let fut = fut\n\n .and_then(|msgs: MessageList| {\n\n info!(\"{:?}\", msgs);\n\n futures::future::ok(())\n\n })\n\n .map_err(|e| {\n\n debug!(\"err: {:?}\", e);\n\n e\n\n });\n\n let mut core = tokio_core::reactor::Core::new().unwrap();\n\n core.run(fut.map(|_| ()))\n\n}\n", "file_path": "examples/list.rs", "rank": 75, "score": 12.219747050854458 }, { "content": "}\n\n\n\nimpl<'de> Deserialize<'de> for Payload {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_str(PayloadVisitor)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n static RAW: &str = r#\"\n\n\"16483910\"\n\n\"#;\n\n deser_roundtrip!(payload_deser, Payload, RAW);\n\n serde_roundtrip!(payload_serde, Payload, Payload::default());\n\n}\n", "file_path": "src/sms/payload.rs", "rank": 76, "score": 12.174143519107764 }, { "content": "use super::*;\n\n\n\n/// Determines if the direction of the message\n\n///\n\n/// Mostly useful for filtering messages with `ListParamters`/`RequestMessageList`\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(rename = \"direction\")]\n\npub enum Direction {\n\n #[serde(rename = \"mt\")]\n\n SendToMobile,\n\n #[serde(rename = \"mo\")]\n\n ReceivedFromMobile,\n\n #[serde(rename = \"invalid\")]\n\n Invalid,\n\n}\n\n\n\nimpl Direction {\n\n pub fn as_str(&self) -> &str {\n\n match self {\n\n Direction::SendToMobile => \"mt\",\n", "file_path": "src/sms/message.rs", "rank": 77, "score": 12.122715224239451 }, { "content": "// pub type MessageBirdResult<T> = std::result::Result<T, MessageBirdError>;\n\nuse crate::serviceerror::*;\n\n\n\n#[derive(Debug, Fail)]\n\npub enum MessageBirdError {\n\n #[fail(display = \"invalid json format: {}\", chunk)]\n\n FormatError { chunk: String },\n\n\n\n #[fail(display = \"invalid paramter for type: {}\", msg)]\n\n TypeError { msg: String },\n\n\n\n #[fail(display = \"service return code\")]\n\n ServiceError(Vec<ServiceError>),\n\n\n\n #[fail(display = \"parsing failed\")]\n\n ParseError,\n\n\n\n #[fail(display = \"sending request failed\")]\n\n RequestError,\n\n\n\n #[fail(display = \"did not find a valid access key {}\", msg)]\n\n AccessKeyError { msg: String },\n\n}\n", "file_path": "src/errors.rs", "rank": 78, "score": 11.96214763288269 }, { "content": " where\n\n T: Into<QueryRecipient>,\n\n {\n\n self.0.recipient = Some(msisdn.into());\n\n self\n\n }\n\n\n\n /// The number of messages to skip before listing.\n\n /// Allows it to be used as paginating.\n\n pub fn skip(mut self, skip: u32) -> Self {\n\n self.0.offset = Some(skip as usize);\n\n self\n\n }\n\n\n\n /// Limits the number of returned messages\n\n pub fn count(mut self, upper_limit: u32) -> Self {\n\n self.0.limit = Some(upper_limit as usize);\n\n self\n\n }\n\n\n", "file_path": "src/sms/parameter/list.rs", "rank": 79, "score": 11.452156575841096 }, { "content": " }\n\n\n\n fn id(&self) -> &Identifier {\n\n &self.identifier\n\n }\n\n}\n\n\n\nimpl fmt::Display for ViewParameters {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let base = String::from(\"https://rest.messagebird.com/messages\");\n\n //let query = serde_url_params::to_string(self).unwrap();\n\n let query = self.id().to_string();\n\n write!(f, \"{}/{}\", base, query)\n\n }\n\n}\n\n\n\nimpl Query for ViewParameters {\n\n fn uri(&self) -> hyper::Uri {\n\n let uri: hyper::Uri = self.to_string().parse().unwrap();\n\n uri\n", "file_path": "src/sms/parameter/view.rs", "rank": 80, "score": 11.397900320303398 }, { "content": " serde_roundtrip!(serde_typedetails_with_udh, TypeDetails, DETAILS.clone());\n\n deser_roundtrip!(\n\n deser_typedetails_with_udh,\n\n TypeDetails,\n\n RAW_TYPE_DETAILS_WITH_UDH\n\n );\n\n\n\n serde_roundtrip!(\n\n serde_typedetail_udh,\n\n TypeDetail,\n\n TypeDetail::UserDataHeader(\"some\".to_string())\n\n );\n\n\n\n}\n", "file_path": "src/sms/typedetails.rs", "rank": 81, "score": 11.301412402842145 }, { "content": "use super::*;\n\n\n\nuse std::ops::Deref;\n\n\n\nuse num::{FromPrimitive, ToPrimitive};\n\nuse std::str::FromStr;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\nuse std::fmt;\n\n\n\n/// error codes\n\n///\n\n/// Error codes as returned as part of a response from the service in the payload.\n\n/// These are NOT http status codes.\n\n#[derive(Primitive, Debug, PartialEq, Eq, Clone)]\n\npub enum ServiceErrorCode {\n\n RequestNotAllowed = 2,\n\n MissingParameters = 9,\n\n InvalidParameters = 10,\n\n NotFound = 20,\n\n BadRequest = 21,\n\n NotEnoughBalance = 25,\n\n EndpointNotFound = 98,\n\n InternalError = 99,\n\n}\n\n\n\n/// Visitor for parsing the ServiceErrorCode from integers\n", "file_path": "src/serviceerror.rs", "rank": 82, "score": 11.181090945837424 }, { "content": " self.0.payload = payload;\n\n self\n\n }\n\n pub fn report_url(mut self, report_url: CallbackUrl) -> Self {\n\n self.0.report_url = Some(report_url);\n\n self\n\n }\n\n pub fn origin(mut self, originator: Originator) -> Self {\n\n self.0.originator = originator;\n\n self\n\n }\n\n pub fn add_recipient(mut self, recipient: QueryRecipient) -> Self {\n\n self.0.recipients.push(recipient);\n\n self\n\n }\n\n pub fn build(self) -> SendParameters {\n\n self.0\n\n }\n\n}\n\n\n", "file_path": "src/sms/parameter/send.rs", "rank": 84, "score": 10.515364052108339 }, { "content": "// }\n\n\n\nimpl ToString for Originator {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\nimpl From<TelephoneNumber> for Originator {\n\n fn from(telephonenumber: TelephoneNumber) -> Self {\n\n Originator::TelephoneNumber(telephonenumber)\n\n }\n\n}\n\n\n\nimpl From<AlphaNumeric> for Originator {\n\n fn from(alphanumeric: AlphaNumeric) -> Self {\n\n Originator::Other(alphanumeric)\n\n }\n\n}\n\n\n", "file_path": "src/sms/originator.rs", "rank": 85, "score": 10.194244369142407 }, { "content": "impl Default for SendParameters {\n\n fn default() -> Self {\n\n Self {\n\n payload_type: None,\n\n originator: Originator::default(),\n\n payload: Payload::Text(\"This is a default message\".to_string()),\n\n reference: None,\n\n report_url: None,\n\n validity: None,\n\n gateway: None,\n\n details: vec![],\n\n payload_encoding: None,\n\n class: None,\n\n scheduled_datetime: None,\n\n recipients: vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl SendParameters {\n", "file_path": "src/sms/parameter/send.rs", "rank": 86, "score": 10.143886697454214 }, { "content": "impl Serialize for TypeDetails {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut map = serializer.serialize_map(Some(self.inner.len()))?;\n\n for x in &self.inner {\n\n let (k, v) = x.clone().as_tuple();\n\n map.serialize_entry(&k, &v)?;\n\n }\n\n map.end()\n\n }\n\n}\n\n\n", "file_path": "src/sms/typedetails.rs", "rank": 87, "score": 10.136122347788667 }, { "content": "\n\n let url = Url::parse(RAW).expect(\"Expected a valid url\");\n\n // TODO the timestamp is modified due to the `+` sign which is replaced by a space character,\n\n // XXX as such parsing with rfc3339 fails\n\n let rh =\n\n serde_qs::from_str(url.query().unwrap()).expect(\"Failed to tokenize query string\");\n\n assert_eq!(lh, rh);\n\n }\n\n }\n\n mod short {\n\n use super::*;\n\n use crate::sms::DateTime;\n\n\n\n static RAW: &str = r#\"http://your-own.url/script?mid=123456789&shortcode=1008&keyword=MESSAGEBIRD&originator=31612345678&operator=20401&message=This+is+an+incoming+message&receive_datetime=20160503142657\"#;\n\n #[test]\n\n fn de() {\n\n let lh = NotificationQueryShort {\n\n mid: 123456789,\n\n shortcode: \"1008\".to_string(),\n\n keyword: \"MESSAGEBIRD\".to_string(),\n", "file_path": "src/sms/notification.rs", "rank": 88, "score": 10.130602452719735 }, { "content": " set.add(td);\n\n }\n\n\n\n Ok(set)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for TypeDetails {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_map(TypeDetailsVisitor)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/sms/typedetails.rs", "rank": 89, "score": 10.095373934120044 }, { "content": "//! Notification via a registered callback from message bird\n\n//!\n\n//! Message bird calls a callback url as specified with a certain\n\n//! set query parameters, virtual mobile number (VMN) and shortcode.\n\n//! Both are represented here as structs which can be easily deserialized.\n\n//!\n\n//! messagebird documentation at\n\n//! https://developers.messagebird.com/docs/sms-messaging#receive-a-message\n\n\n\n// trait CallbackSpecifier {}\n\n\n\n// pub struct ShortCode;\n\n// impl CallbackSpecifier for ShortCode {}\n\n\n\n// pub struct VirtualMobileNumber;\n\n// impl CallbackSpecifier for VirtualMobileNumber {}\n\n\n\nuse super::*;\n\nuse std::str::FromStr;\n\n\n", "file_path": "src/sms/notification.rs", "rank": 90, "score": 10.049607189192233 }, { "content": "## Features\n\n\n\nCurrently the planned features only include to send SMS and query SMS stati.\n\n\n\n\n\n## RoadMap\n\n\n\n- [x] serde impl for all relevant datatypes\n\n- [x] roundtrip decode encode tests\n\n- [x] use [`crate serde_plain`](https://docs.rs/serde_plain/0.3.0/serde_plain/) for `trait ToString` and `trait FromStr` for objects which are used in query filters and payload www urlencode\n\n- [x] future for SMS sending and retrieval based on [hyper.rs](https://hyper.rs)\n\n- [x] send SMS example using [tokio](https://tokio.rs) as executor\n\n- [x] future for listing SMS\n\n- [x] listing SMS examples using [tokio](https://tokio.rs) as executor\n\n- [x] write documentation\n\n- [x] future for notification callback on SMS reception\n\n- [x] callback example using [hyper](https://hyper.rs) (and thus imlicitly [tokio](https://tokio.rs)) as executor\n\n- [x] convert all service API return errors to typed errors\n\n- [ ] improve error information content\n\n- [ ] create sms callback notification test based on the receive example running on [clever cloud](clever-cloud.com) (the creators of [sozu](https://www.sozu.io/))\n\n\n\n## MessageBird APIs\n\n\n\n- [x] [`SMS`](https://rest.messagebird.com/messages)\n\n- [ ] [`Contacts`](https://rest.messagebird.com/contacts) (low prio)\n\n- [ ] [`MMS`](https://rest.messagebird.com/mms) (*)\n\n- [ ] [`Conversation`](https://developers.messagebird.com/docs/conversations) (*)\n\n- [ ] [`VoiceMessaging`](https://developers.messagebird.com/docs/voice-messaging) (*)\n\n- [ ] [`VoiceCalling`](https://developers.messagebird.com/docs/voice) (*)\n\n\n\n\\* = not planned, unless external contributors step up\n\n\n\n## License\n\n\n\nMIT or Apache-2.0\n\n\n\nIf neither of those work for you, feel free to request so via a github issue.\n\n\n\n## Donations\n\n\n\nSince I am not affiliated with MessageBird in any kind (other than being their customer)\n\nI'd be delighted if you'd leave a tipp to keep the automated send/receive routines going.\n\n\n\n[![LiberaPayButton](https://liberapay.com/assets/widgets/donate.svg)](https://liberapay.com/drahnr/donate)\n", "file_path": "README.md", "rank": 91, "score": 9.801616608237916 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n mod vmn {\n\n use super::*;\n\n\n\n static RAW: &str = r#\"http://your-own.url/script?id=e8077d803532c0b5937c639b60216938&recipient=31642500190&originator=31612345678&body=This+is+an+incoming+message&createdDatetime=2016-05-03T14:26:57+00:00\"#;\n\n #[test]\n\n fn de() {\n\n let lh = NotificationQueryVMN {\n\n id: \"e8077d803532c0b5937c639b60216938\".to_string(),\n\n recipient: Msisdn::try_from(31642500190).unwrap(),\n\n originator: 31612345678.into(),\n\n payload: Payload::from_str(\"This is an incoming message\").unwrap(),\n\n created_datetime: DateTime::from_str(\"2016-05-03T14:26:57+00:00\").unwrap(),\n\n };\n", "file_path": "src/sms/notification.rs", "rank": 92, "score": 9.773141694084819 }, { "content": " }\n\n}\n\n\n\nimpl ToString for Msisdn {\n\n fn to_string(&self) -> String {\n\n serde_plain::to_string(self).unwrap()\n\n }\n\n}\n\n\n\n/// Deliver Status of a SMS message\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum Status {\n\n /// not defined by the spec (you should never see this)\n\n Unknown,\n\n /// tracked in message birds system, but not delivered yet\n\n Scheduled,\n\n /// Sent, but not on the device just yet\n\n Sent,\n\n /// TODO\n", "file_path": "src/sms/recipient.rs", "rank": 93, "score": 9.66303549549734 }, { "content": "use super::*;\n\n\n\nuse std::fmt;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\n/// Notification Url\n\n///\n\n/// An Url to be called on certain events directly from the MessageBird infrastructure\n\n/// with some json.\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct CallbackUrl(Url);\n\n\n\nimpl Serialize for CallbackUrl {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(self.0.as_str())\n\n }\n\n}\n\n\n", "file_path": "src/sms/callbackurl.rs", "rank": 94, "score": 9.615814385389102 }, { "content": " } else {\n\n Err(MessageBirdError::FormatError {\n\n chunk: String::from(s),\n\n })\n\n }\n\n }\n\n}\n\n\n\n/// Origin of a message\n\n///\n\n/// Defines the source of a message, which can either be an arbitrary\n\n/// alphanumeric string or a telephone number\n\n#[derive(Debug, Clone, Serialize, Eq, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(untagged)]\n\npub enum Originator {\n\n TelephoneNumber(TelephoneNumber),\n\n Other(AlphaNumeric),\n\n}\n\n\n", "file_path": "src/sms/originator.rs", "rank": 95, "score": 9.587700859311923 }, { "content": "use super::*;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer, Unexpected, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\nuse std::fmt;\n\n\n\n/// SMS encoding enum\n\n///\n\n/// Defines how to interpret the message encoding for text messages.\n\n/// For binary SMS see `PayloadType`\n\n#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(rename = \"encoding\")]\n\npub enum PayloadEncoding {\n\n Plain,\n\n Unicode,\n\n Auto,\n\n}\n\n\n", "file_path": "src/sms/payload.rs", "rank": 96, "score": 9.496568088040894 }, { "content": " static RAW: &str = r#\"2016-05-03T14:26:57 00:00\"#;\n\n #[test]\n\n fn deserialize() {\n\n let datetime = DateTime::from_str(RAW).expect(\"Failed to parse funny format\");\n\n println!(\"Time parse from {} is {:?}\", RAW, datetime);\n\n }\n\n }\n\n mod custom1 {\n\n use super::*;\n\n static RAW: &str = r#\"20160503142657\"#;\n\n #[test]\n\n fn deserialize() {\n\n let datetime = DateTime::from_str(RAW).expect(\"Failed to parse funny format\");\n\n println!(\"Time parse from {} is {:?}\", RAW, datetime);\n\n }\n\n }\n\n}\n", "file_path": "src/sms/datetime.rs", "rank": 97, "score": 9.487366544117679 }, { "content": " {\n\n ServiceErrorCode::from_i64(value)\n\n .ok_or(de::Error::invalid_value(Unexpected::Signed(value), &self))\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n ServiceErrorCode::from_str(value)\n\n .map_err(|_e| de::Error::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ServiceErrorCode {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_u64(ErrorCodeVisitor)\n", "file_path": "src/serviceerror.rs", "rank": 98, "score": 9.465838855150684 }, { "content": " fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_u64(MessageClassVisitor)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n static RAW: &str = r#\"\n\n1\n\n\"#;\n\n\n\n deser_roundtrip!(messageclass_deser, MessageClass, RAW);\n\n serde_roundtrip!(messageclass_serde, MessageClass, MessageClass::default());\n\n}\n", "file_path": "src/sms/messageclass.rs", "rank": 99, "score": 9.432980270089974 } ]
Rust
crate/object_play/src/system/object_acceleration_system.rs
Lighty0410/autexousious
99d142d8fdbf2076f3fd929f61b8140d47cf6b86
use amethyst::{ ecs::{Join, Read, ReadStorage, System, World, WriteStorage}, shred::{ResourceId, SystemData}, shrev::{EventChannel, ReaderId}, }; use derivative::Derivative; use derive_new::new; use game_input_model::play::ControllerInput; use kinematic_model::config::{ ObjectAcceleration, ObjectAccelerationKind, ObjectAccelerationValue, ObjectAccelerationValueExpr, ObjectAccelerationValueMultiplier, Velocity, }; use mirrored_model::play::Mirrored; use sequence_model::play::SequenceUpdateEvent; #[derive(Debug, Default, new)] pub struct ObjectAccelerationSystem { #[new(default)] sequence_update_event_rid: Option<ReaderId<SequenceUpdateEvent>>, } #[derive(Derivative, SystemData)] #[derivative(Debug)] pub struct ObjectAccelerationSystemData<'s> { #[derivative(Debug = "ignore")] pub sequence_update_ec: Read<'s, EventChannel<SequenceUpdateEvent>>, #[derivative(Debug = "ignore")] pub controller_inputs: ReadStorage<'s, ControllerInput>, #[derivative(Debug = "ignore")] pub mirroreds: ReadStorage<'s, Mirrored>, #[derivative(Debug = "ignore")] pub object_accelerations: ReadStorage<'s, ObjectAcceleration>, #[derivative(Debug = "ignore")] pub velocities: WriteStorage<'s, Velocity<f32>>, } impl ObjectAccelerationSystem { fn update_velocity( controller_input: Option<ControllerInput>, mirrored: Option<Mirrored>, object_acceleration: ObjectAcceleration, velocity: &mut Velocity<f32>, ) { let negate = mirrored.map(|mirrored| mirrored.0).unwrap_or(false); let acc_x = Self::acceleration_value(controller_input, object_acceleration.x); if negate { velocity[0] -= acc_x; } else { velocity[0] += acc_x; } velocity[1] += Self::acceleration_value(controller_input, object_acceleration.y); velocity[2] += Self::acceleration_value(controller_input, object_acceleration.z); } fn acceleration_value( controller_input: Option<ControllerInput>, object_acceleration_value: ObjectAccelerationValue, ) -> f32 { match object_acceleration_value { ObjectAccelerationValue::Const(value) => value, ObjectAccelerationValue::Expr(ObjectAccelerationValueExpr { multiplier, value }) => { match multiplier { ObjectAccelerationValueMultiplier::One => value, ObjectAccelerationValueMultiplier::XAxis => { let multiplier = controller_input .map(|controller_input| controller_input.x_axis_value.abs()) .unwrap_or(0.); multiplier * value } ObjectAccelerationValueMultiplier::ZAxis => { let multiplier = controller_input .map(|controller_input| controller_input.z_axis_value) .unwrap_or(0.); multiplier * value } } } } } } impl<'s> System<'s> for ObjectAccelerationSystem { type SystemData = ObjectAccelerationSystemData<'s>; fn run( &mut self, ObjectAccelerationSystemData { sequence_update_ec, controller_inputs, mirroreds, object_accelerations, mut velocities, }: Self::SystemData, ) { sequence_update_ec .read( self.sequence_update_event_rid .as_mut() .expect("Expected `sequence_update_event_rid` to exist."), ) .for_each(|ev| { if let SequenceUpdateEvent::SequenceBegin { entity, .. } | SequenceUpdateEvent::FrameBegin { entity, .. } = ev { let entity = *entity; let object_acceleration = object_accelerations.get(entity); let velocity = velocities.get_mut(entity); let controller_input = controller_inputs.get(entity).copied(); let mirrored = mirroreds.get(entity).copied(); if let (Some(object_acceleration), Some(velocity)) = (object_acceleration, velocity) { if object_acceleration.kind == ObjectAccelerationKind::Once { Self::update_velocity( controller_input, mirrored, *object_acceleration, velocity, ); } } } }); ( &object_accelerations, &mut velocities, controller_inputs.maybe(), mirroreds.maybe(), ) .join() .filter(|(object_acceleration, _, _, _)| { object_acceleration.kind == ObjectAccelerationKind::Continuous }) .for_each( |(object_acceleration, velocity, controller_input, mirrored)| { Self::update_velocity( controller_input.copied(), mirrored.copied(), *object_acceleration, velocity, ); }, ); } fn setup(&mut self, world: &mut World) { Self::SystemData::setup(world); self.sequence_update_event_rid = Some( world .fetch_mut::<EventChannel<SequenceUpdateEvent>>() .register_reader(), ); } }
use amethyst::{ ecs::{Join, Read, ReadStorage, System, World, WriteStorage}, shred::{ResourceId, SystemData}, shrev::{EventChannel, ReaderId}, }; use derivative::Derivative; use derive_new::new; use game_input_model::play::ControllerInput; use kinematic_model::config::{ ObjectAcceleration, ObjectAccelerationKind, ObjectAccelerationValue, ObjectAccelerationValueExpr, ObjectAccelerationValueMultiplier, Velocity, }; use mirrored_model::play::Mirrored; use sequence_model::play::SequenceUpdateEvent; #[derive(Debug, Default, new)] pub struct ObjectAccelerationSystem { #[new(default)] sequence_update_event_rid: Option<ReaderId<SequenceUpdateEvent>>, } #[derive(Derivative, SystemData)] #[derivative(Debug)] pub struct ObjectAccelerationSystemData<'s> { #[derivative(Debug = "ignore")] pub sequence_update_ec: Read<'s, EventChannel<SequenceUpdateEvent>>, #[derivative(Debug = "ignore")] pub controller_inputs: ReadStorage<'s, ControllerInput>, #[derivative(Debug = "ignore")] pub mirroreds: ReadStorage<'s, Mirrored>, #[derivative(Debug = "ignore")] pub object_accelerations: ReadStorage<'s, ObjectAcceleration>, #[derivative(Debug = "ignore")] pub velocities: WriteStorage<'s, Velocity<f32>>, } impl ObjectAccelerationSystem {
fn acceleration_value( controller_input: Option<ControllerInput>, object_acceleration_value: ObjectAccelerationValue, ) -> f32 { match object_acceleration_value { ObjectAccelerationValue::Const(value) => value, ObjectAccelerationValue::Expr(ObjectAccelerationValueExpr { multiplier, value }) => { match multiplier { ObjectAccelerationValueMultiplier::One => value, ObjectAccelerationValueMultiplier::XAxis => { let multiplier = controller_input .map(|controller_input| controller_input.x_axis_value.abs()) .unwrap_or(0.); multiplier * value } ObjectAccelerationValueMultiplier::ZAxis => { let multiplier = controller_input .map(|controller_input| controller_input.z_axis_value) .unwrap_or(0.); multiplier * value } } } } } } impl<'s> System<'s> for ObjectAccelerationSystem { type SystemData = ObjectAccelerationSystemData<'s>; fn run( &mut self, ObjectAccelerationSystemData { sequence_update_ec, controller_inputs, mirroreds, object_accelerations, mut velocities, }: Self::SystemData, ) { sequence_update_ec .read( self.sequence_update_event_rid .as_mut() .expect("Expected `sequence_update_event_rid` to exist."), ) .for_each(|ev| { if let SequenceUpdateEvent::SequenceBegin { entity, .. } | SequenceUpdateEvent::FrameBegin { entity, .. } = ev { let entity = *entity; let object_acceleration = object_accelerations.get(entity); let velocity = velocities.get_mut(entity); let controller_input = controller_inputs.get(entity).copied(); let mirrored = mirroreds.get(entity).copied(); if let (Some(object_acceleration), Some(velocity)) = (object_acceleration, velocity) { if object_acceleration.kind == ObjectAccelerationKind::Once { Self::update_velocity( controller_input, mirrored, *object_acceleration, velocity, ); } } } }); ( &object_accelerations, &mut velocities, controller_inputs.maybe(), mirroreds.maybe(), ) .join() .filter(|(object_acceleration, _, _, _)| { object_acceleration.kind == ObjectAccelerationKind::Continuous }) .for_each( |(object_acceleration, velocity, controller_input, mirrored)| { Self::update_velocity( controller_input.copied(), mirrored.copied(), *object_acceleration, velocity, ); }, ); } fn setup(&mut self, world: &mut World) { Self::SystemData::setup(world); self.sequence_update_event_rid = Some( world .fetch_mut::<EventChannel<SequenceUpdateEvent>>() .register_reader(), ); } }
fn update_velocity( controller_input: Option<ControllerInput>, mirrored: Option<Mirrored>, object_acceleration: ObjectAcceleration, velocity: &mut Velocity<f32>, ) { let negate = mirrored.map(|mirrored| mirrored.0).unwrap_or(false); let acc_x = Self::acceleration_value(controller_input, object_acceleration.x); if negate { velocity[0] -= acc_x; } else { velocity[0] += acc_x; } velocity[1] += Self::acceleration_value(controller_input, object_acceleration.y); velocity[2] += Self::acceleration_value(controller_input, object_acceleration.z); }
function_block-full_function
[ { "content": "#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq)]\n\nstruct SessionCodeId(pub u64);\n\n\n\n/// Mappings from `SessionCode` to `NetSessionDevices`, and `SocketAddr` to `SessionCode`.\n\n#[derive(Clone, Debug, Default, new)]\n\npub struct SessionDeviceMappings {\n\n /// Mappings from `SessionCode` to `SessionCodeId`.\n\n #[new(default)]\n\n session_code_to_id: BiMap<SessionCode, SessionCodeId>,\n\n /// Mappings from `SessionCodeId` to `NetSessionDevices`.\n\n #[new(default)]\n\n session_code_id_to_devices: HashMap<SessionCodeId, NetSessionDevices>,\n\n /// Mappings from `SocketAddr` to `SessionCodeId`.\n\n #[new(default)]\n\n socket_addr_to_session_code_id: HashMap<SocketAddr, SessionCodeId>,\n\n}\n\n\n\nimpl SessionDeviceMappings {\n\n /// Returns a `SessionDeviceMappings` with pre-allocated capacity.\n\n ///\n\n /// The mappings are guaranteed to hold `capacity` elements without re-allocating.\n", "file_path": "app/session_server/src/model/session_device_mappings.rs", "rank": 0, "score": 131432.6823382326 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"Example 01: Read and Exit\")]\n\nstruct Opt {\n\n #[structopt(\n\n short = \"t\",\n\n long = \"timeout\",\n\n help = \"Timeout to automatically close the application\"\n\n )]\n\n timeout: Option<u64>,\n\n}\n\n\n\nimpl<'a, 'b> State<GameData<'a, 'b>, StateEvent> for EmptyState {\n\n fn on_start(&mut self, _data: StateData<'_, GameData<'_, '_>>) {\n\n println!(\"Reading from stdin. Type 'exit' to quit.\");\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n data: StateData<'_, GameData<'_, '_>>,\n\n ) -> Trans<GameData<'a, 'b>, StateEvent> {\n\n data.data.update(&data.world);\n\n Trans::None\n\n }\n\n}\n\n\n", "file_path": "crate/stdio_input/examples/01_read_and_exit.rs", "rank": 1, "score": 128058.3864727256 }, { "content": "#[derive(Debug)]\n\nstruct SequenceUpdateParams<'p> {\n\n entity: Entity,\n\n wait_sequence_handle: &'p WaitSequenceHandle,\n\n frame_index_clock: &'p mut FrameIndexClock,\n\n frame_wait_clock: &'p mut FrameWaitClock,\n\n sequence_status: &'p mut SequenceStatus,\n\n}\n\n\n\nimpl SequenceUpdateSystem {\n\n fn start_sequence(\n\n wait_sequence_assets: &AssetStorage<WaitSequence>,\n\n SequenceUpdateParams {\n\n entity: _entity,\n\n wait_sequence_handle,\n\n frame_index_clock,\n\n frame_wait_clock,\n\n sequence_status,\n\n }: SequenceUpdateParams,\n\n ) {\n\n frame_index_clock.reset();\n", "file_path": "crate/sequence_play/src/system/sequence_update_system.rs", "rank": 2, "score": 126803.40794640625 }, { "content": "#[derive(Debug)]\n\nstruct EmptyState;\n\n\n", "file_path": "crate/stdio_input/examples/01_read_and_exit.rs", "rank": 3, "score": 125743.31487715026 }, { "content": "/// Generates the `SequenceComponentData` implementation.\n\npub fn sequence_component_data_impl(\n\n mut ast: DeriveInput,\n\n args: ComponentDataAttributeArgs,\n\n) -> TokenStream {\n\n let ComponentDataAttributeArgs {\n\n component_path,\n\n component_copy,\n\n to_owned_fn,\n\n } = args;\n\n\n\n ast.assert_fields_unit();\n\n derive_append(&mut ast);\n\n fields_append(&mut ast, &component_path);\n\n\n\n let type_name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let to_owned_fn_impl = to_owned_fn_impl(component_copy, to_owned_fn);\n\n\n\n let fn_new_doc = format!(\"Returns a new `{}`.\", type_name);\n\n\n", "file_path": "crate/sequence_model_derive/src/sequence_component_data_impl.rs", "rank": 4, "score": 123148.53060103007 }, { "content": "/// Generates the `FrameComponentData` implementation.\n\npub fn frame_component_data_impl(\n\n mut ast: DeriveInput,\n\n args: ComponentDataAttributeArgs,\n\n) -> TokenStream {\n\n let ComponentDataAttributeArgs {\n\n component_path,\n\n component_copy,\n\n to_owned_fn,\n\n } = args;\n\n\n\n ast.assert_fields_unit();\n\n derive_append(&mut ast);\n\n fields_append(&mut ast, &component_path);\n\n\n\n let type_name = &ast.ident;\n\n let to_owned_fn_impl = to_owned_fn_impl(component_copy, to_owned_fn);\n\n\n\n let fn_new_doc = format!(\"Returns a new `{}`.\", type_name);\n\n\n\n let token_stream_2 = quote! {\n", "file_path": "crate/sequence_model_derive/src/frame_component_data_impl.rs", "rank": 5, "score": 123148.53060103007 }, { "content": "/// Workaround for `SystemData` while GAT is not yet available.\n\npub trait MapperSystemData<'s> {\n\n /// `SystemData` to read from the world.\n\n type SystemData: SystemData<'s>;\n\n}\n\n\n\nimpl<'s, T> MapperSystemData<'s> for T\n\nwhere\n\n T: SystemData<'s>,\n\n{\n\n type SystemData = T;\n\n}\n", "file_path": "crate/stdio_spi/src/mapper_system_data.rs", "rank": 6, "score": 122995.32316447998 }, { "content": "/// Trait of different asset preview widget spawn behaviours.\n\npub trait PreviewSpawner<'s> {\n\n type SystemData: SystemData<'s>;\n\n const ASSET_TYPE: AssetType;\n\n\n\n fn spawn_preview_entities(\n\n apw_previews: &mut WriteStorage<'_, ApwPreview>,\n\n asset_selection_parents: &mut WriteStorage<'_, AssetSelectionParent>,\n\n preview_spawn_resources: &mut Self::SystemData,\n\n ash_entity: Entity,\n\n apw_main_entity: Option<Entity>,\n\n asset_selection: AssetSelection,\n\n );\n\n}\n\n\n\n/// System to spawn character previews.\n\npub type ApwPreviewSpawnSystemCharacter = ApwPreviewSpawnSystem<CharacterPreviewSpawn>;\n\n\n\n/// System to spawn map previews.\n\npub type ApwPreviewSpawnSystemMap = ApwPreviewSpawnSystem<MapPreviewSpawn>;\n\n\n", "file_path": "crate/asset_selection_ui_play/src/system/apw_preview_spawn_system.rs", "rank": 7, "score": 117997.26011036063 }, { "content": "/// Loads part of an asset.\n\n///\n\n/// This is a partial GAT hack, combined with\n\n///\n\n/// For GAT status, see:\n\n///\n\n/// * <https://users.rust-lang.org/t/17444>\n\n/// * <https://github.com/rust-lang/rust/issues/44265>\n\n///\n\n/// As of 2019-01-19, this workaround was posted:\n\n///\n\n/// * <https://gist.github.com/ExpHP/7a464c184c876eaf27056a83c41356ee>\n\npub trait AssetPartLoader<'s> {\n\n /// `LoadStage` that this ``AssetPartLoader` handles.\n\n const LOAD_STAGE: LoadStage;\n\n /// `SystemData` to read from the world.\n\n type SystemData: SystemData<'s>;\n\n\n\n /// Prepares collections for processing, such as setting capacities.\n\n fn preprocess(\n\n _asset_loading_resources: &mut AssetLoadingResources,\n\n _system_data: &mut Self::SystemData,\n\n ) {\n\n }\n\n\n\n /// Loads the asset part.\n\n fn process(\n\n asset_loading_resources: &mut AssetLoadingResources,\n\n system_data: &mut Self::SystemData,\n\n asset_id: AssetId,\n\n );\n\n\n\n /// Returns if the asset part is loaded.\n\n fn is_complete(\n\n asset_loading_resources: &AssetLoadingResources,\n\n system_data: &Self::SystemData,\n\n asset_id: AssetId,\n\n ) -> bool;\n\n}\n", "file_path": "crate/loading/src/system/asset_part_loader.rs", "rank": 8, "score": 110523.8381646563 }, { "content": "use amethyst::{\n\n core::{math::RealField, transform::Transform},\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse mirrored_model::play::Mirrored;\n\n\n\n/// Rotates `Transform` (and hence, sprites) of `Object`s that are `Mirrored`.\n\n#[derive(Debug, Default, new)]\n\npub struct ObjectMirroringSystem;\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ObjectMirroringSystemData<'s> {\n\n /// `Mirrored` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub mirroreds: ReadStorage<'s, Mirrored>,\n\n /// `Transform` components.\n", "file_path": "crate/object_play/src/system/object_mirroring_system.rs", "rank": 9, "score": 108447.02195790688 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n renderer::camera::Camera,\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse camera_model::play::CameraTargetCoordinates;\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse kinematic_model::config::{Position, Velocity};\n\n\n\n/// How much to divide the target velocity by, to smoothen the acceleration.\n\nconst SMOOTHING_FACTOR_DEFAULT: f32 = 3.;\n\n\n\n/// Updates camera velocity to smoothen camera movement between its current and target position.\n\n#[derive(Debug, Derivative, new)]\n\n#[derivative(Default)]\n\npub struct CameraVelocitySystem {\n\n /// How much to divide the target velocity by, to smoothen the acceleration.\n\n #[derivative(Default(value = \"SMOOTHING_FACTOR_DEFAULT\"))]\n\n pub smoothing_factor: f32,\n", "file_path": "crate/camera_play/src/system/camera_velocity_system.rs", "rank": 10, "score": 108435.38745419683 }, { "content": "}\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct CameraVelocitySystemData<'s> {\n\n /// `Camera` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub cameras: ReadStorage<'s, Camera>,\n\n /// `CameraTargetCoordinates` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub camera_target_coordinateses: ReadStorage<'s, CameraTargetCoordinates>,\n\n /// `Position<f32>` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub positions: ReadStorage<'s, Position<f32>>,\n\n /// `Velocity<f32>` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub velocities: WriteStorage<'s, Velocity<f32>>,\n\n}\n\n\n\nimpl<'s> System<'s> for CameraVelocitySystem {\n", "file_path": "crate/camera_play/src/system/camera_velocity_system.rs", "rank": 11, "score": 108427.85328519586 }, { "content": " #[derivative(Debug = \"ignore\")]\n\n pub transforms: WriteStorage<'s, Transform>,\n\n}\n\n\n\nimpl<'s> System<'s> for ObjectMirroringSystem {\n\n type SystemData = ObjectMirroringSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n ObjectMirroringSystemData {\n\n mirroreds,\n\n mut transforms,\n\n }: Self::SystemData,\n\n ) {\n\n (&mirroreds, &mut transforms)\n\n .join()\n\n .for_each(|(mirrored, transform)| {\n\n if mirrored.0 {\n\n transform.set_rotation_y_axis(f32::pi());\n\n } else {\n\n transform.set_rotation_y_axis(0.);\n\n };\n\n });\n\n }\n\n}\n", "file_path": "crate/object_play/src/system/object_mirroring_system.rs", "rank": 12, "score": 108425.41645724172 }, { "content": " type SystemData = CameraVelocitySystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n CameraVelocitySystemData {\n\n cameras,\n\n camera_target_coordinateses,\n\n positions,\n\n mut velocities,\n\n }: Self::SystemData,\n\n ) {\n\n (\n\n &cameras,\n\n &camera_target_coordinateses,\n\n &positions,\n\n &mut velocities,\n\n )\n\n .join()\n\n .for_each(|(_, camera_target_coordinates, position, velocity)| {\n\n **velocity = {\n", "file_path": "crate/camera_play/src/system/camera_velocity_system.rs", "rank": 13, "score": 108408.30220419721 }, { "content": " // 1. Get distance between current position and target position.\n\n // Divide that by 10, this is the max velocity we will reach.\n\n //\n\n // e.g. if we have to move 1000 pixels, at most we will move 100 per tick.\n\n //\n\n // 2. Calculate an average between the current velocity and the target velocity.\n\n //\n\n // If our current velocity is 0, then we will increase to 33.\n\n // Next frame will be 44: (33 + 100) / 3\n\n let velocity_limit = (**camera_target_coordinates - **position) / 10.;\n\n (**velocity + velocity_limit) / self.smoothing_factor\n\n };\n\n });\n\n }\n\n}\n", "file_path": "crate/camera_play/src/system/camera_velocity_system.rs", "rank": 14, "score": 108401.89998071655 }, { "content": "/// Returns an function implementation for the `ComponentData::to_owned` trait method.\n\npub fn to_owned_fn_impl(component_copy: bool, to_owned_fn: Option<Path>) -> TokenStream {\n\n if component_copy {\n\n quote! {\n\n fn to_owned(component: &Self::Component) -> Self::Component {\n\n *component\n\n }\n\n }\n\n } else {\n\n let to_owned_fn = to_owned_fn.unwrap_or_else(|| parse_quote!(std::clone::Clone::clone));\n\n\n\n quote! {\n\n fn to_owned(component: &Self::Component) -> Self::Component {\n\n #to_owned_fn(component)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crate/sequence_model_derive/src/to_owned_fn_impl.rs", "rank": 15, "score": 105272.10501924195 }, { "content": " let (mut mirroreds, mut transforms) = world\n\n .system_data::<(WriteStorage<'_, Mirrored>, WriteStorage<'_, Transform>)>();\n\n (&mut mirroreds, &mut transforms).join().for_each(setup_fn)\n\n })\n\n .with_system_single(\n\n ObjectMirroringSystem::new(),\n\n any::type_name::<ObjectMirroringSystem>(),\n\n &[],\n\n ) // kcov-ignore\n\n .with_assertion(move |world| {\n\n let (mirroreds, transforms) =\n\n world.system_data::<(ReadStorage<'_, Mirrored>, ReadStorage<'_, Transform>)>();\n\n (&mirroreds, &transforms).join().for_each(assertion_fn)\n\n })\n\n .run_isolated()\n\n }\n\n}\n", "file_path": "crate/workspace_tests/src/object_play/system/object_mirroring_system.rs", "rank": 16, "score": 104657.55515288019 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::any;\n\n\n\n use amethyst::{\n\n core::{math::RealField, transform::Transform},\n\n ecs::{Join, ReadStorage, WriteStorage},\n\n Error,\n\n };\n\n use application_test_support::AutexousiousApplication;\n\n use approx::assert_relative_eq;\n\n use mirrored_model::play::Mirrored;\n\n\n\n use object_play::ObjectMirroringSystem;\n\n\n\n #[test]\n\n fn rotates_mirrored_objects_around_y_axis() -> Result<(), Error> {\n\n run_test(\n\n |(mirrored, _transform)| **mirrored = true,\n\n |(_mirrored, transform)| assert_relative_eq!(f32::pi(), transform.rotation().angle()),\n", "file_path": "crate/workspace_tests/src/object_play/system/object_mirroring_system.rs", "rank": 17, "score": 104649.08855478383 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::any;\n\n\n\n use amethyst::{\n\n ecs::{Entity, WorldExt, WriteStorage},\n\n window::ScreenDimensions,\n\n Error,\n\n };\n\n use amethyst_test::{AmethystApplication, HIDPI, SCREEN_HEIGHT, SCREEN_WIDTH};\n\n use camera_model::play::CameraTargetCoordinates;\n\n use kinematic_model::config::{Position, Velocity};\n\n use pretty_assertions::assert_eq;\n\n\n\n use camera_play::{CameraCreator, CameraVelocitySystem};\n\n\n\n #[test]\n\n fn velocity_remains_zero_when_position_matches_target() -> Result<(), Error> {\n\n run_test(\n\n SetupParams {\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 18, "score": 104644.66907265261 }, { "content": " .insert(camera_entity, position)\n\n .expect(\"Failed to insert `Position<f32>` component.\");\n\n velocities\n\n .insert(camera_entity, velocity_setup)\n\n .expect(\"Failed to insert `Velocity<f32>` component.\");\n\n }\n\n\n\n world.insert(camera_entity);\n\n });\n\n\n\n amethyst_application = velocity_steps.into_iter().fold(\n\n amethyst_application,\n\n |amethyst_application, velocity_expected| {\n\n amethyst_application.with_assertion(move |world| {\n\n let entity = *world.read_resource::<Entity>();\n\n let velocities = world.read_storage::<Velocity<f32>>();\n\n let velocity_actual = velocities\n\n .get(entity)\n\n .copied()\n\n .expect(\"Expected entity to have `Transform` component.\");\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 19, "score": 104644.47024020384 }, { "content": " Velocity::new(5., 10., 15.),\n\n Velocity::new(7.5, 15., 22.5),\n\n Velocity::new(8.75, 17.5, 26.25),\n\n ],\n\n },\n\n )\n\n }\n\n\n\n fn run_test(\n\n SetupParams {\n\n camera_target_coordinates,\n\n position,\n\n velocity: velocity_setup,\n\n }: SetupParams,\n\n ExpectedParams { velocity_steps }: ExpectedParams,\n\n ) -> Result<(), Error> {\n\n let mut amethyst_application = AmethystApplication::blank()\n\n .with_resource(ScreenDimensions::new(SCREEN_WIDTH, SCREEN_HEIGHT, HIDPI))\n\n .with_system(\n\n CameraVelocitySystem {\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 20, "score": 104644.35727971456 }, { "content": " smoothing_factor: 2.,\n\n },\n\n any::type_name::<CameraVelocitySystem>(),\n\n &[],\n\n ) // kcov-ignore\n\n .with_effect(move |world| {\n\n let camera_entity = CameraCreator::create_in_world(world);\n\n\n\n {\n\n let (mut camera_target_coordinateses, mut positions, mut velocities) = world\n\n .system_data::<(\n\n WriteStorage<'_, CameraTargetCoordinates>,\n\n WriteStorage<'_, Position<f32>>,\n\n WriteStorage<'_, Velocity<f32>>,\n\n )>();\n\n\n\n camera_target_coordinateses\n\n .insert(camera_entity, camera_target_coordinates)\n\n .expect(\"Failed to insert `CameraTargetCoordinates` component.\");\n\n positions\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 21, "score": 104640.70349654628 }, { "content": " )\n\n }\n\n\n\n #[test]\n\n fn resets_non_mirrored_objects_y_axis_rotation() -> Result<(), Error> {\n\n run_test(\n\n |(mirrored, transform)| {\n\n **mirrored = false;\n\n transform.set_rotation_y_axis(f32::pi());\n\n },\n\n |(_mirrored, transform)| assert_relative_eq!(0., transform.rotation().angle()),\n\n )\n\n }\n\n\n\n fn run_test(\n\n setup_fn: fn((&mut Mirrored, &mut Transform)),\n\n assertion_fn: fn((&Mirrored, &Transform)),\n\n ) -> Result<(), Error> {\n\n AutexousiousApplication::game_base()\n\n .with_effect(move |world| {\n", "file_path": "crate/workspace_tests/src/object_play/system/object_mirroring_system.rs", "rank": 22, "score": 104636.6452548155 }, { "content": " camera_target_coordinates: CameraTargetCoordinates::new(100., 200., 300.),\n\n position: Position::new(100., 200., 300.),\n\n velocity: Velocity::new(0., 0., 0.),\n\n },\n\n ExpectedParams {\n\n velocity_steps: vec![Velocity::new(0., 0., 0.), Velocity::new(0., 0., 0.)],\n\n },\n\n )\n\n }\n\n\n\n #[test]\n\n fn velocity_increments_smoothly_when_target_is_far() -> Result<(), Error> {\n\n run_test(\n\n SetupParams {\n\n camera_target_coordinates: CameraTargetCoordinates::new(100., 200., 300.),\n\n position: Position::new(0., 0., 0.),\n\n velocity: Velocity::new(0., 0., 0.),\n\n },\n\n ExpectedParams {\n\n velocity_steps: vec![\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 23, "score": 104634.80314556454 }, { "content": "\n\n assert_eq!(velocity_expected, velocity_actual);\n\n })\n\n },\n\n );\n\n\n\n amethyst_application.run()\n\n }\n\n\n\n struct SetupParams {\n\n camera_target_coordinates: CameraTargetCoordinates,\n\n position: Position<f32>,\n\n velocity: Velocity<f32>,\n\n }\n\n\n\n struct ExpectedParams {\n\n velocity_steps: Vec<Velocity<f32>>,\n\n }\n\n}\n", "file_path": "crate/workspace_tests/src/camera_play/system/camera_velocity_system.rs", "rank": 24, "score": 104632.39645844199 }, { "content": "/// Asserts that the `Trans` objects, disregarding their `State`, are equal.\n\n///\n\n/// This `panic!`s with a readable error message when the assertion fails.\n\n///\n\n/// # Parameters\n\n///\n\n/// * `expected`: The `Trans` that is desired.\n\n/// * `actual`: The `Trans` that was acquired.\n\n///\n\n/// # Examples\n\n///\n\n/// Successful assertion:\n\n///\n\n/// ```rust\n\n/// # extern crate amethyst;\n\n/// # extern crate debug_util_amethyst;\n\n/// #\n\n/// # use amethyst::prelude::*;\n\n/// # use debug_util_amethyst::assert_eq_trans;\n\n/// #\n\n/// // ok\n\n/// assert_eq_trans::<(), ()>(&Trans::None, &Trans::None);\n\n/// ```\n\n///\n\n/// Failing assertion:\n\n///\n\n/// ```rust,should_panic\n\n/// # extern crate amethyst;\n\n/// # extern crate debug_util_amethyst;\n\n/// #\n\n/// # use amethyst::prelude::*;\n\n/// # use debug_util_amethyst::assert_eq_trans;\n\n/// #\n\n/// // panic: Expected `Trans::None` but got `Trans::Pop`.\n\n/// assert_eq_trans::<(), ()>(&Trans::None, &Trans::Pop);\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// When the expected and actual `Trans` differ.\n\npub fn assert_eq_trans<T, E>(expected: &Trans<T, E>, actual: &Trans<T, E>) {\n\n assert_eq!(\n\n discriminant(expected),\n\n discriminant(actual),\n\n \"Expected `{:?}` but got `{:?}`.\",\n\n expected,\n\n actual\n\n );\n\n}\n\n\n", "file_path": "crate/debug_util_amethyst/src/lib.rs", "rank": 25, "score": 89740.16870210449 }, { "content": "/// Asserts that the `Trans` objects contained in the `Option`s, disregarding their `State`, are\n\n/// equal.\n\n///\n\n/// This `panic!`s with a readable error message when the assertion fails.\n\n///\n\n/// # Parameters\n\n///\n\n/// * `expected`: The `Option<Trans>` that is desired.\n\n/// * `actual`: The `Option<Trans>` that was acquired.\n\n///\n\n/// # Examples\n\n///\n\n/// Successful assertion:\n\n///\n\n/// ```rust\n\n/// # extern crate amethyst;\n\n/// # extern crate debug_util_amethyst;\n\n/// #\n\n/// # use amethyst::prelude::*;\n\n/// # use debug_util_amethyst::assert_eq_opt_trans;\n\n/// #\n\n/// assert_eq_opt_trans::<(), ()>(None, None);\n\n/// assert_eq_opt_trans::<(), ()>(Some(Trans::None).as_ref(), Some(Trans::None).as_ref());\n\n/// ```\n\n///\n\n/// Failing assertion:\n\n///\n\n/// ```rust,should_panic\n\n/// # extern crate amethyst;\n\n/// # extern crate debug_util_amethyst;\n\n/// #\n\n/// # use amethyst::prelude::*;\n\n/// # use debug_util_amethyst::assert_eq_opt_trans;\n\n/// #\n\n/// // panic: Expected `Some(Trans::None)` but got `Some(Trans::Pop)`.\n\n/// assert_eq_opt_trans::<(), ()>(Some(Trans::None).as_ref(), Some(Trans::Pop).as_ref());\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// When the expected and actual `Trans` differ.\n\npub fn assert_eq_opt_trans<T, E>(expected: Option<&Trans<T, E>>, actual: Option<&Trans<T, E>>) {\n\n match (expected, actual) {\n\n (Some(expected_trans), Some(actual_trans)) => assert_eq!(\n\n discriminant(expected_trans),\n\n discriminant(actual_trans),\n\n \"Expected `{:?}` but got `{:?}`.\",\n\n expected,\n\n actual\n\n ),\n\n (Some(_), None) => panic!(\"Expected `{:?}` but got `None`.\", expected),\n\n (None, Some(_)) => panic!(\"Expected `None` but got `{:?}`.\", actual),\n\n (None, None) => {}\n\n };\n\n}\n", "file_path": "crate/debug_util_amethyst/src/lib.rs", "rank": 26, "score": 84775.34210509527 }, { "content": "#[derive(Debug)]\n\nstruct EofMatcher {\n\n eof: Vec<u8>,\n\n complete: bool,\n\n match_idx: usize,\n\n}\n\n\n\nimpl EofMatcher {\n\n fn new() -> Self {\n\n EofMatcher {\n\n eof: Vec::with_capacity(10),\n\n complete: false,\n\n match_idx: 0,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn next(&mut self, c: u8) -> bool {\n\n if self.complete && self.eof.get(self.match_idx) == Some(&c) {\n\n self.match_idx += 1;\n\n } else if self.complete {\n", "file_path": "crate/stdio_input/src/ion/quotes.rs", "rank": 27, "score": 76030.22314612332 }, { "content": "struct HitLimitVisitor;\n\n\n\nmacro_rules! impl_visit_numeric {\n\n ($visit_name:ident, $ty:ident) => {\n\n fn $visit_name<E>(self, value: $ty) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n if value >= $ty::from(std::u32::MIN) && value <= $ty::from(std::u32::MAX) {\n\n Ok(HitLimit::Limit(value as u32))\n\n } else {\n\n Err(E::custom(format!(\"u32 out of range: {}\", value)))\n\n }\n\n }\n\n };\n\n}\n\n\n\nimpl<'de> Visitor<'de> for HitLimitVisitor {\n\n type Value = HitLimit;\n\n\n", "file_path": "crate/collision_model/src/config/hit_limit.rs", "rank": 28, "score": 73876.19517890384 }, { "content": "/// Maps tokens from stdin to a state specific event.\n\npub trait StdinMapper {\n\n /// Resource needed by the mapper to construct the state specific event.\n\n ///\n\n /// TODO: Pending <https://gitlab.com/azriel91/autexousious/issues/76>\n\n ///\n\n /// Ideally we can have this be the `SystemData` of an ECS system. However, we cannot add\n\n /// a `Resources: for<'res> SystemData<'res>` trait bound as generic associated types (GATs)\n\n /// are not yet implemented. See:\n\n ///\n\n /// * <https://users.rust-lang.org/t/17444>\n\n /// * <https://github.com/rust-lang/rust/issues/44265>\n\n ///\n\n /// As of 2019-01-19, this workaround was posted:\n\n ///\n\n /// * <https://gist.github.com/ExpHP/7a464c184c876eaf27056a83c41356ee>\n\n type SystemData: for<'s> MapperSystemData<'s>;\n\n /// State specific event type that this maps tokens to.\n\n type Event: Send + Sync + 'static;\n\n /// Data structure representing the arguments.\n\n type Args: StructOpt;\n", "file_path": "crate/stdio_spi/src/stdin_mapper.rs", "rank": 29, "score": 70561.25635975736 }, { "content": "/// Sequence transition behaviour calculation.\n\npub trait CharacterSequenceHandler {\n\n /// Returns the status update for a character based on current input or lack thereof.\n\n ///\n\n /// Returns `Some(..)` when there is an update, `None` otherwise.\n\n ///\n\n /// # Parameters\n\n ///\n\n /// * `components`: Components used to compute character sequence updates.\n\n fn update(_components: CharacterSequenceUpdateComponents<'_>) -> Option<CharacterSequenceName> {\n\n None\n\n }\n\n}\n", "file_path": "crate/character_play/src/sequence_handler.rs", "rank": 30, "score": 69508.71800961229 }, { "content": "/// Marker trait for everywhere that uses sequence names.\n\n///\n\n/// Sequence names may be well-known (enum variant), or arbitrary (fallback enum variant that holds\n\n/// the string).\n\n///\n\n/// TODO: RFC 1733 will allow us to define an alias instead of a new trait. See:\n\n///\n\n/// * <https://github.com/rust-lang/rfcs/blob/master/text/1733-trait-alias.md>\n\n/// * <https://github.com/rust-lang/rust/issues/41517>\n\npub trait SequenceName:\n\n Copy + Debug + Default + Display + Eq + FromStr + Into<&'static str> + Hash + Send + Sync + 'static\n\n{\n\n}\n", "file_path": "crate/sequence_model_core/src/config/sequence_name.rs", "rank": 31, "score": 68508.12219469306 }, { "content": "/// Components common to object types' sequences, associated with a sequence ID.\n\npub trait GameObjectSequence {\n\n /// Sequence ID that this `GameObjectSequence` uses.\n\n type SequenceName: SequenceName;\n\n /// Type of the sequence frame.\n\n type GameObjectFrame: AsRef<Wait> + Default + GameObjectFrame;\n\n\n\n /// Returns the `ObjectSequence` for this `GameObjectSequence`.\n\n fn object_sequence(&self) -> &ObjectSequence<Self::SequenceName, Self::GameObjectFrame>;\n\n}\n", "file_path": "crate/object_model/src/config/game_object_sequence.rs", "rank": 32, "score": 67534.776239658 }, { "content": "/// Fields common to object types' frames.\n\npub trait GameObjectFrame {\n\n /// Returns the `ObjectFrame` for this `GameObjectFrame`.\n\n fn object_frame(&self) -> &ObjectFrame;\n\n}\n\n\n\nimpl GameObjectFrame for ObjectFrame {\n\n fn object_frame(&self) -> &ObjectFrame {\n\n self\n\n }\n\n}\n", "file_path": "crate/object_model/src/config/game_object_frame.rs", "rank": 33, "score": 67534.776239658 }, { "content": "/// Allows constraint clause query the component type.\n\npub trait ComponentDataExt {\n\n /// The ECS component type of this data.\n\n type Component: Component;\n\n\n\n /// Returns a new instance of this type.\n\n fn new(sequence: Vec<Self::Component>) -> Self;\n\n\n\n /// Returns an owned version of the component.\n\n fn to_owned(component: &Self::Component) -> Self::Component;\n\n}\n", "file_path": "crate/sequence_model_spi/src/loaded/component_data_ext.rs", "rank": 34, "score": 66608.03760539195 }, { "content": "/// An independent part of an item that may be loaded from an asset, such as `WaitSequenceHandles`.\n\npub trait ItemComponent<'s>: Component {\n\n /// `SystemData` needed when augmenting an entity with `Component`s.\n\n type SystemData: SystemData<'s>;\n\n\n\n /// Augments an entity with `Component`s given it has this `ItemComponent`.\n\n ///\n\n /// For example, given an entity has `WaitSequenceHandles`, it should be augmented with:\n\n ///\n\n /// * `FrameIndexClock`\n\n /// * `FrameWaitClock`\n\n fn augment(&self, _system_data: &mut Self::SystemData, _entity: Entity) {}\n\n}\n", "file_path": "crate/asset_model/src/item_component.rs", "rank": 35, "score": 66159.10360884378 }, { "content": "/// Additional functions for working with `EventChannel`s.\n\npub trait EventChannelExt<E> {\n\n /// Returns the last event from the event channel if any.\n\n ///\n\n /// # Parameters\n\n ///\n\n /// * `event_rid`: `ReaderId` registered for the event channel.\n\n fn last_event(&self, event_rid: &mut ReaderId<E>) -> Option<&E>;\n\n}\n\n\n\nimpl<E> EventChannelExt<E> for EventChannel<E>\n\nwhere\n\n E: Event,\n\n{\n\n fn last_event(&self, event_rid: &mut ReaderId<E>) -> Option<&E> {\n\n let mut events_iterator = self.read(event_rid);\n\n let event_count = events_iterator.len();\n\n\n\n if event_count > 1 {\n\n warn!(\n\n \"{} events received, only processing the last event.\",\n\n event_count\n\n );\n\n }\n\n\n\n events_iterator.nth(event_count.saturating_sub(1))\n\n }\n\n}\n", "file_path": "crate/shrev_support/src/event_channel_ext.rs", "rank": 36, "score": 65721.81389228335 }, { "content": "/// Loads frame component data, typically a sequence of a particular frame component.\n\npub trait FrameComponentDataLoader {\n\n /// The component that changes per frame.\n\n type Component: Component;\n\n /// The data type that holds the frame components.\n\n type ComponentData: Asset\n\n + ComponentDataExt<Component = Self::Component>\n\n + Send\n\n + Sync\n\n + 'static;\n\n\n\n /// Loads frame component `Sequence` as an asset and returns its handle.\n\n fn load<SequenceIterator, FrameRef, FnFrameToComponent>(\n\n loader: &Loader,\n\n frame_component_data_assets: &AssetStorage<Self::ComponentData>,\n\n fn_frame_to_component: FnFrameToComponent,\n\n sequence_iterator: SequenceIterator,\n\n ) -> Handle<Self::ComponentData>\n\n where\n\n SequenceIterator: Iterator<Item = FrameRef>,\n\n FnFrameToComponent: Fn(FrameRef) -> Self::Component,\n", "file_path": "crate/sequence_loading_spi/src/frame_component_data_loader.rs", "rank": 37, "score": 65718.25155400833 }, { "content": "/// Loads sequence component data, typically a collection of a particular sequence component.\n\n///\n\n/// This is distinct from `FrameComponentDataLoader` as the sequence component is not necessarily\n\n/// a `Handle<FrameComponentData>`.\n\npub trait SequenceComponentDataLoader {\n\n /// The component that changes per sequence.\n\n type Component: Component;\n\n /// The data type that holds the sequence components.\n\n type ComponentData: ComponentDataExt<Component = Self::Component> + Send + Sync + 'static;\n\n\n\n /// Loads and returns sequence component data.\n\n fn load<SequencesIterator, SequenceRef, FnSequenceToComponent>(\n\n fn_sequence_to_component: FnSequenceToComponent,\n\n sequence_iterator: SequencesIterator,\n\n ) -> Self::ComponentData\n\n where\n\n SequencesIterator: Iterator<Item = SequenceRef>,\n\n FnSequenceToComponent: Fn(SequenceRef) -> Self::Component,\n\n {\n\n <Self::ComponentData as ComponentDataExt>::new(\n\n sequence_iterator\n\n .map(fn_sequence_to_component)\n\n .collect::<Vec<Self::Component>>(),\n\n )\n\n }\n\n}\n", "file_path": "crate/sequence_loading_spi/src/sequence_component_data_loader.rs", "rank": 38, "score": 65718.25155400833 }, { "content": "// Ignore default implementation coverage.\n\n// kcov-ignore-start\n\n/// Trait for types that intercept and manipulate application behaviour.\n\n///\n\n/// Types that implement this trait are invoked at the beginning of each [`State`][state] function\n\n/// so they may record application state or override the behaviour of the state.\n\n///\n\n/// [state]: https://docs.rs/amethyst/0.6.0/amethyst/trait.State.html\n\npub trait Intercept<T, E>: Debug\n\nwhere\n\n E: Send + Sync + 'static,\n\n{\n\n /// Invoked before the delegate state's `on_start(..)` invocation.\n\n fn on_start_begin(&mut self, _data: &mut StateData<'_, T>) {}\n\n /// Invoked after the delegate state's `on_start(..)` invocation.\n\n fn on_start_end(&mut self) {}\n\n /// Invoked before the delegate state's `on_stop(..)` invocation.\n\n fn on_stop_begin(&mut self, _data: &mut StateData<'_, T>) {}\n\n /// Invoked after the delegate state's `on_stop(..)` invocation.\n\n fn on_stop_end(&mut self) {}\n\n /// Invoked before the delegate state's `on_pause(..)` invocation.\n\n fn on_pause_begin(&mut self, _data: &mut StateData<'_, T>) {}\n\n /// Invoked after the delegate state's `on_pause(..)` invocation.\n\n fn on_pause_end(&mut self) {}\n\n /// Invoked before the delegate state's `on_resume(..)` invocation.\n\n fn on_resume_begin(&mut self, _data: &mut StateData<'_, T>) {}\n\n /// Invoked after the delegate state's `on_resume(..)` invocation.\n\n fn on_resume_end(&mut self) {}\n", "file_path": "crate/application_robot/src/intercept.rs", "rank": 39, "score": 64855.34140178162 }, { "content": "/// Conditions to be met for an input reaction to happen.\n\npub trait InputReactionRequirement<'s> {\n\n /// `SystemData` to query to check if requirement is met.\n\n type SystemData: SystemData<'s>;\n\n\n\n /// Whether the requirement is met.\n\n fn requirement_met(&self, system_data: &mut Self::SystemData, entity: Entity) -> bool;\n\n}\n\n\n\nimpl<'s> InputReactionRequirement<'s> for () {\n\n type SystemData = ();\n\n\n\n fn requirement_met(&self, _: &mut Self::SystemData, _: Entity) -> bool {\n\n true\n\n }\n\n}\n", "file_path": "crate/input_reaction_model/src/config/input_reaction_requirement.rs", "rank": 40, "score": 64795.07525801731 }, { "content": "#[derivative(Debug)]\n\npub struct MirroredSystemData<'s> {\n\n /// `Mirrored` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub mirroreds: WriteStorage<'s, Mirrored>,\n\n}\n\n\n\nimpl<'s> ItemComponent<'s> for Mirrored {\n\n type SystemData = MirroredSystemData<'s>;\n\n\n\n fn augment(&self, system_data: &mut Self::SystemData, entity: Entity) {\n\n let MirroredSystemData { mirroreds } = system_data;\n\n\n\n if mirroreds.get(entity).is_none() {\n\n mirroreds\n\n .insert(entity, *self)\n\n .expect(\"Failed to insert `Mirrored` component.\");\n\n }\n\n }\n\n}\n", "file_path": "crate/mirrored_model/src/play/mirrored.rs", "rank": 41, "score": 64166.876718788015 }, { "content": " BitXorAssign,\n\n Clone,\n\n Component,\n\n Copy,\n\n Debug,\n\n Deref,\n\n DerefMut,\n\n Default,\n\n Display,\n\n From,\n\n PartialEq,\n\n Eq,\n\n Not,\n\n new,\n\n)]\n\n#[storage(VecStorage)]\n\npub struct Mirrored(pub bool);\n\n\n\n/// `MirroredSystemData`.\n\n#[derive(Derivative, SystemData)]\n", "file_path": "crate/mirrored_model/src/play/mirrored.rs", "rank": 42, "score": 64166.21853452535 }, { "content": "use amethyst::ecs::{\n\n shred::{ResourceId, SystemData},\n\n storage::VecStorage,\n\n Component, Entity, World, WriteStorage,\n\n};\n\nuse asset_model::ItemComponent;\n\nuse derivative::Derivative;\n\nuse derive_deref::{Deref, DerefMut};\n\nuse derive_more::{\n\n BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Display, From, Not,\n\n};\n\nuse derive_new::new;\n\n\n\n/// Whether the object is mirrored.\n\n#[derive(\n\n BitAnd,\n\n BitAndAssign,\n\n BitOr,\n\n BitOrAssign,\n\n BitXor,\n", "file_path": "crate/mirrored_model/src/play/mirrored.rs", "rank": 43, "score": 64165.4014669996 }, { "content": "#[proc_macro_derive(Asset, attributes(asset_data))]\n\npub fn derive_asset(item: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(item as DeriveInput);\n\n\n\n let type_name = &ast.ident;\n\n let asset_data = ast\n\n .attrs\n\n .iter()\n\n .find(|attr| attr.path.is_ident(ASSET_DATA_ATTRIBUTE))\n\n .map(|attr| {\n\n attr.parse_meta().unwrap_or_else(|e| {\n\n panic!(\n\n \"Failed to parse `{}` attribute as meta. Error: {}\",\n\n ASSET_DATA_ATTRIBUTE, e\n\n )\n\n })\n\n })\n\n .map(|meta| {\n\n if let Meta::Path(path) = meta {\n\n path\n\n } else {\n", "file_path": "crate/asset_derive/src/lib.rs", "rank": 44, "score": 61939.38632911902 }, { "content": "type MapperSystemSystemData<'s, E, SD> = (\n\n Read<'s, EventChannel<VariantAndTokens>>,\n\n Write<'s, EventChannel<E>>,\n\n <SD as MapperSystemData<'s>>::SystemData,\n\n);\n\n\n\nimpl<'s, M> System<'s> for MapperSystem<M>\n\nwhere\n\n M: StdinMapper,\n\n{\n\n type SystemData = MapperSystemSystemData<'s, M::Event, M::SystemData>;\n\n\n\n fn run(\n\n &mut self,\n\n (variant_channel, mut app_event_channel, mapper_system_data): Self::SystemData,\n\n ) {\n\n let mut events = variant_channel\n\n .read(self.reader_id.as_mut().unwrap())\n\n .filter_map(|&(variant, ref tokens)| {\n\n if variant == self.variant {\n", "file_path": "crate/stdio_spi/src/mapper_system.rs", "rank": 45, "score": 61579.99298452909 }, { "content": "type HitDetectionSystemData<'s> = (\n\n Read<'s, EventChannel<ContactEvent>>,\n\n ReadStorage<'s, HitRepeatTrackers>,\n\n WriteStorage<'s, HitObjectCount>,\n\n Write<'s, EventChannel<HitEvent>>,\n\n);\n\n\n\nimpl HitDetectionSystem {\n\n fn update_hit_object_count(\n\n hit_object_counts: &mut WriteStorage<'_, HitObjectCount>,\n\n entity_hitter: Entity,\n\n ) -> HitObjectCount {\n\n if let Some(hit_object_count) = hit_object_counts.get_mut(entity_hitter) {\n\n *hit_object_count += 1;\n\n *hit_object_count\n\n } else {\n\n let hit_object_count = HitObjectCount::new(1);\n\n hit_object_counts\n\n .insert(entity_hitter, hit_object_count)\n\n .expect(\"Failed to insert `HitObjectCount` component.\");\n", "file_path": "crate/collision_play/src/system/hit_detection_system.rs", "rank": 46, "score": 61579.99298452909 }, { "content": "type HitSfxSystemData<'s> = (\n\n Read<'s, EventChannel<HitEvent>>,\n\n Read<'s, CollisionSfxMap>,\n\n Read<'s, AssetStorage<Source>>,\n\n Option<Read<'s, Output>>,\n\n);\n\n\n\nimpl<'s> System<'s> for HitSfxSystem {\n\n type SystemData = HitSfxSystemData<'s>;\n\n\n\n fn run(&mut self, (hit_ec, collision_sfx_map, source_assets, output): Self::SystemData) {\n\n // Make sure we empty the event channel, even if we don't have an output device.\n\n let events_iterator = hit_ec.read(\n\n self.hit_event_rid\n\n .as_mut()\n\n .expect(\"Expected reader ID to exist for HitSfxSystem.\"),\n\n );\n\n\n\n if let Some(output) = output {\n\n events_iterator.for_each(|ev| {\n", "file_path": "crate/collision_audio_play/src/system/hit_sfx_system.rs", "rank": 47, "score": 60974.542800699055 }, { "content": "#[derive(new)]\n\nstruct SequenceEndTransitionVisitor<SeqName>(PhantomData<SeqName>)\n\nwhere\n\n SeqName: SequenceName;\n\n\n\nimpl<'de, SeqName> Visitor<'de> for SequenceEndTransitionVisitor<SeqName>\n\nwhere\n\n SeqName: SequenceName,\n\n{\n\n type Value = SequenceEndTransition<SeqName>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"one of `none`, `repeat`, `delete`, or a sequence ID\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n SequenceEndTransition::from_str(value)\n\n .or_else(|_| {\n\n SequenceNameString::from_str(value).map(SequenceEndTransition::SequenceName)\n\n })\n\n .map_err(|_| E::invalid_value(Unexpected::Str(value), &self))\n\n }\n\n}\n", "file_path": "crate/sequence_model/src/config/sequence_end_transition.rs", "rank": 48, "score": 60840.4928682935 }, { "content": "type ControllerInputUpdateSystemData<'s> = (\n\n Read<'s, EventChannel<ControlInputEvent>>,\n\n WriteStorage<'s, ControllerInput>,\n\n);\n\n\n\nimpl ControllerInputUpdateSystem {\n\n fn get_or_insert_mut<'s, C>(comp_storage: &'s mut WriteStorage<C>, entity: Entity) -> &'s mut C\n\n where\n\n C: Component + Default,\n\n {\n\n if let Ok(entry) = comp_storage.entry(entity) {\n\n entry.or_insert(C::default());\n\n }\n\n comp_storage\n\n .get_mut(entity)\n\n .expect(\"Unreachable: Component either previously existed, or was just inserted.\")\n\n }\n\n}\n\n\n\nimpl<'s> System<'s> for ControllerInputUpdateSystem {\n", "file_path": "crate/game_input/src/system/controller_input_update_system.rs", "rank": 49, "score": 60380.88219032618 }, { "content": "use std::path::PathBuf;\n\n\n\nuse amethyst::{\n\n ecs::{System, World, Write},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse asset_loading::AssetDiscovery;\n\nuse asset_model::{\n\n config::AssetIndex,\n\n loaded::{AssetId, AssetIdMappings, AssetTypeMappings},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse loading_model::loaded::{AssetLoadStage, LoadStage};\n\nuse log::debug;\n\nuse slotmap::SecondaryMap;\n\n\n\n/// Discovers assets and writes to `Option<AssetIndex>`.\n\n#[derive(Debug, Default, new)]\n\npub struct AssetDiscoverySystem {\n", "file_path": "crate/loading/src/system/asset_discovery_system.rs", "rank": 50, "score": 59574.90270343563 }, { "content": "use std::{any, marker::PhantomData};\n\n\n\nuse amethyst::{\n\n ecs::{LazyUpdate, Read, ReadExpect, System, World, WriteExpect},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\n\n\nuse crate::Prev;\n\n\n\n/// Tracks the value of a resource, and adds a `Prev<T>` resource with that value.\n\n///\n\n/// The order that systems should be dispatched is:\n\n///\n\n/// 1. System that updates `T`\n\n/// 2. System that reads `T` and `Prev<T>`\n\n/// 3. `PrevTrackerSystem<T>`\n\n///\n\n/// The order is important, otherwise the value that is stored in `Prev<T>` will be exactly the same\n", "file_path": "crate/tracker/src/system/prev_tracker_system.rs", "rank": 51, "score": 59574.69788999948 }, { "content": "use std::{any, marker::PhantomData};\n\n\n\nuse amethyst::{\n\n ecs::{Component, Entities, Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\n\n\nuse crate::Last;\n\n\n\n/// Tracks the value of an entity's component, and adds a `Last<T>` component with that value.\n\n///\n\n/// The order that systems should be dispatched is:\n\n///\n\n/// 1. System that updates `T`\n\n/// 2. System that reads `T` and `Last<T>`\n\n/// 3. `LastTrackerSystem<T>`\n\n///\n\n/// The order is important, otherwise the value that is stored in `Last<T>` will be exactly the same\n", "file_path": "crate/tracker/src/system/last_tracker_system.rs", "rank": 52, "score": 59573.80135946733 }, { "content": "/// Rendering system.\n\n#[derive(Debug)]\n\npub struct StdinSystem {\n\n /// Channel receiver for output/input messages for this system.\n\n rx: Receiver<String>,\n\n}\n\n\n\nimpl StdinSystem {\n\n /// Returns a new `StdinSystem` that listens to stdin on a separate thread.\n\n // kcov-ignore-start\n\n pub fn new() -> Self {\n\n // kcov-ignore-end\n\n Self::default()\n\n }\n\n\n\n /// Returns a new `StdinSystem`. Visible for testing.\n\n ///\n\n /// Allows tests to retain control over the channel sender.\n\n pub fn internal_new<F>(rx: Receiver<String>, reader_spawn_fn: F) -> Self\n\n where\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 53, "score": 59571.37212940523 }, { "content": " T: Clone + Send + Sync + 'static,\n\n{\n\n /// `T` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub resource: Option<ReadExpect<'s, T>>,\n\n /// `Prev<T>` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub resource_prev: Option<WriteExpect<'s, Prev<T>>>,\n\n /// `LazyUpdate` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub lazy_update: Read<'s, LazyUpdate>,\n\n}\n\n\n\nimpl<'s, T> PrevTrackerSystem<T>\n\nwhere\n\n T: Clone + Send + Sync + 'static,\n\n{\n\n /// Returns a String representing this system's name.\n\n pub fn system_name(&self) -> String {\n\n format!(\"{}<{}>\", any::type_name::<Self>(), self.resource_name)\n", "file_path": "crate/tracker/src/system/prev_tracker_system.rs", "rank": 54, "score": 59569.86689111174 }, { "content": " T: Clone + Component + Send + Sync,\n\n{\n\n /// `Entities`.\n\n #[derivative(Debug = \"ignore\")]\n\n pub entities: Entities<'s>,\n\n /// `T` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub components: ReadStorage<'s, T>,\n\n /// `Last<T>` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub components_lasts: WriteStorage<'s, Last<T>>,\n\n}\n\n\n\nimpl<'s, T> LastTrackerSystem<T>\n\nwhere\n\n T: Component + Clone + Send + Sync,\n\n{\n\n /// Returns a String representing this system's name.\n\n pub fn system_name(&self) -> String {\n\n format!(\"{}<{}>\", any::type_name::<Self>(), self.component_name)\n", "file_path": "crate/tracker/src/system/last_tracker_system.rs", "rank": 55, "score": 59569.2524117091 }, { "content": " F: FnOnce(),\n\n {\n\n reader_spawn_fn();\n\n StdinSystem { rx }\n\n }\n\n}\n\n\n\nimpl Default for StdinSystem {\n\n fn default() -> Self {\n\n let (tx, rx) = mpsc::channel();\n\n let reader_spawn_fn = || {\n\n thread::Builder::new()\n\n .name(reader::NAME.to_string())\n\n .spawn(|| StdinReader::new(tx).start())\n\n // TODO: replace new() with build() and return Result<..>\n\n .expect(\"Failed to spawn StdinReader thread.\");\n\n };\n\n Self::internal_new(rx, reader_spawn_fn)\n\n } // kcov-ignore\n\n}\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 56, "score": 59567.101106893795 }, { "content": "/// as `T`, providing no trackable detection.\n\n///\n\n/// This should be used conservatively if the tracked type is `Clone` and not `Copy`, as the memory\n\n/// allocations can be a performance hit.\n\n#[derive(Clone, Debug, Default, new)]\n\npub struct LastTrackerSystem<T>\n\nwhere\n\n T: Component + Clone + Send + Sync,\n\n{\n\n /// Stringified name of the `Component` tracked by this system.\n\n component_name: &'static str,\n\n /// Component tracked by this system.\n\n component: PhantomData<T>,\n\n}\n\n\n\n/// `LastTrackerSystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct LastTrackerSystemData<'s, T>\n\nwhere\n", "file_path": "crate/tracker/src/system/last_tracker_system.rs", "rank": 57, "score": 59566.57059070413 }, { "content": "/// as `T`, providing no trackable detection.\n\n///\n\n/// This should be used conservatively if the tracked type is `Clone` and not `Copy`, as the memory\n\n/// allocations can be a performance hit.\n\n#[derive(Clone, Debug, Default, new)]\n\npub struct PrevTrackerSystem<T>\n\nwhere\n\n T: Clone + Send + Sync + 'static,\n\n{\n\n /// Stringified name of the `Component` tracked by this system.\n\n resource_name: &'static str,\n\n /// Component tracked by this system.\n\n resource: PhantomData<T>,\n\n}\n\n\n\n/// `PrevTrackerSystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct PrevTrackerSystemData<'s, T>\n\nwhere\n", "file_path": "crate/tracker/src/system/prev_tracker_system.rs", "rank": 58, "score": 59566.57059070413 }, { "content": "use std::{\n\n sync::mpsc::{self, Receiver, TryRecvError},\n\n thread,\n\n};\n\n\n\nuse amethyst::{\n\n ecs::{ReadExpect, System, World, Write},\n\n shred::{ResourceId, SystemData},\n\n shrev::EventChannel,\n\n};\n\nuse application_input::ApplicationEvent;\n\nuse derivative::Derivative;\n\nuse log::{debug, error, info, trace, warn};\n\nuse state_registry::StateId;\n\nuse stdio_command_model::StdinCommandBarrier;\n\nuse stdio_spi::VariantAndTokens;\n\n\n\nuse crate::{\n\n reader::{self, StdinReader},\n\n IoAppEventUtils, StatementSplitter, StatementVariant,\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 59, "score": 59566.180947653505 }, { "content": "};\n\n\n\n/// `StdinSystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct StdinSystemData<'s> {\n\n /// `StateId` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub state_id: Option<ReadExpect<'s, StateId>>,\n\n /// `StdinCommandBarrier` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub stdin_command_barrier: Write<'s, StdinCommandBarrier>,\n\n /// `ApplicationEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub application_ec: Write<'s, EventChannel<ApplicationEvent>>,\n\n /// `VariantAndTokens` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub variant_and_tokens_ec: Write<'s, EventChannel<VariantAndTokens>>,\n\n}\n\n\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 60, "score": 59565.673199814984 }, { "content": " }\n\n}\n\n\n\nimpl<'s, T> System<'s> for PrevTrackerSystem<T>\n\nwhere\n\n T: Clone + Send + Sync + 'static,\n\n{\n\n type SystemData = PrevTrackerSystemData<'s, T>;\n\n\n\n fn run(\n\n &mut self,\n\n PrevTrackerSystemData {\n\n resource,\n\n resource_prev,\n\n lazy_update,\n\n }: Self::SystemData,\n\n ) {\n\n if let Some(resource) = resource.as_ref() {\n\n let resource_prev_next = Prev::new((*resource).clone());\n\n if let Some(mut resource_prev) = resource_prev {\n\n *resource_prev = resource_prev_next;\n\n } else {\n\n lazy_update.exec_mut(move |world| world.insert(resource_prev_next));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crate/tracker/src/system/prev_tracker_system.rs", "rank": 61, "score": 59563.06086993884 }, { "content": " /// Path to the assets directory.\n\n assets_dir: PathBuf,\n\n}\n\n\n\n/// `AssetDiscoverySystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct AssetDiscoverySystemData<'s> {\n\n /// `Option<AssetIndex>` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_index: Write<'s, Option<AssetIndex>>,\n\n /// `AssetIdMappings` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_id_mappings: Write<'s, AssetIdMappings>,\n\n /// `AssetTypeMappings` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_type_mappings: Write<'s, AssetTypeMappings>,\n\n /// `AssetLoadStage` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_load_stage: Write<'s, AssetLoadStage>,\n", "file_path": "crate/loading/src/system/asset_discovery_system.rs", "rank": 62, "score": 59561.940018589485 }, { "content": " /// `SecondaryMap<AssetId, PathBuf>` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_id_to_path: Write<'s, SecondaryMap<AssetId, PathBuf>>,\n\n}\n\n\n\nimpl<'s> System<'s> for AssetDiscoverySystem {\n\n type SystemData = AssetDiscoverySystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n AssetDiscoverySystemData {\n\n mut asset_index,\n\n mut asset_id_mappings,\n\n mut asset_type_mappings,\n\n mut asset_load_stage,\n\n mut asset_id_to_path,\n\n }: Self::SystemData,\n\n ) {\n\n // TODO: Do a diff between existing index and directory based on a file watch / notify.\n\n // TODO: See <https://github.com/polachok/derive-diff>\n", "file_path": "crate/loading/src/system/asset_discovery_system.rs", "rank": 63, "score": 59560.6654236875 }, { "content": "\n\nimpl<'s> System<'s> for StdinSystem {\n\n type SystemData = StdinSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n StdinSystemData {\n\n state_id,\n\n mut stdin_command_barrier,\n\n mut application_ec,\n\n mut variant_and_tokens_ec,\n\n }: Self::SystemData,\n\n ) {\n\n // Get an `Option<StateId>` from `Option<Read<StateId>>`.\n\n let state_id = state_id.as_deref().copied();\n\n if let Some(state_id) = state_id {\n\n let state_id = state_id;\n\n if let Some(state_id_barrier) = (*stdin_command_barrier).state_id {\n\n if state_id == state_id_barrier {\n\n debug!(\"State `{:?}` running, removing `StateIdBarrier`.\", state_id);\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 64, "score": 59557.84222513028 }, { "content": " }\n\n}\n\n\n\nimpl<'s, T> System<'s> for LastTrackerSystem<T>\n\nwhere\n\n T: Component + Clone + Send + Sync,\n\n{\n\n type SystemData = LastTrackerSystemData<'s, T>;\n\n\n\n fn run(\n\n &mut self,\n\n LastTrackerSystemData {\n\n entities,\n\n components,\n\n mut components_lasts,\n\n }: Self::SystemData,\n\n ) {\n\n (&*entities, &components)\n\n .join()\n\n .for_each(|(entity, component)| {\n", "file_path": "crate/tracker/src/system/last_tracker_system.rs", "rank": 65, "score": 59556.298595854685 }, { "content": "\n\n if command_chain == StdinReader::EXIT_PHRASE {\n\n application_ec.single_write(ApplicationEvent::Exit);\n\n return;\n\n }\n\n\n\n // TODO: Proper command for this.\n\n if command_chain == \"current_state\" {\n\n if let Some(state_id) = state_id {\n\n info!(\"StateId: {}\", state_id);\n\n return;\n\n }\n\n }\n\n\n\n let statements = StatementSplitter::new(&command_chain).collect::<Vec<_>>();\n\n statements\n\n .into_iter()\n\n .filter_map(|statement| match statement {\n\n Ok(StatementVariant::Default(command))\n\n | Ok(StatementVariant::And(command))\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 66, "score": 59552.4787205012 }, { "content": " components_lasts\n\n .insert(entity, Last(component.clone()))\n\n .unwrap_or_else(|_| {\n\n // kcov-ignore-start\n\n panic!(\n\n \"Failed to insert `{}<{}>` component.\",\n\n any::type_name::<Last::<T>>(),\n\n self.component_name\n\n )\n\n // kcov-ignore-end\n\n }); // kcov-ignore\n\n });\n\n }\n\n}\n", "file_path": "crate/tracker/src/system/last_tracker_system.rs", "rank": 67, "score": 59551.82970905532 }, { "content": " debug!(\n\n \"Asset ID ({:?}): slug: `{}`, type: `{:?}`\",\n\n asset_id, &asset_record.asset_slug, asset_type\n\n );\n\n\n\n asset_type_mappings.insert(asset_id, asset_type);\n\n asset_load_stage.insert(asset_id, LoadStage::New);\n\n asset_id_to_path.insert(asset_id, asset_record.path.clone());\n\n });\n\n\n\n *asset_index = Some(asset_index_discovered);\n\n }\n\n }\n\n}\n", "file_path": "crate/loading/src/system/asset_discovery_system.rs", "rank": 68, "score": 59549.077137265806 }, { "content": " | Ok(StatementVariant::Or(command)) => Some(command),\n\n Err(statement_error) => {\n\n error!(\"{}\", statement_error);\n\n None\n\n }\n\n })\n\n .for_each(|command| {\n\n match IoAppEventUtils::input_to_variant_and_tokens(&command) {\n\n Ok(variant_and_tokens) => {\n\n if let Some(variant_and_tokens) = variant_and_tokens {\n\n variant_and_tokens_ec.single_write(variant_and_tokens);\n\n }\n\n }\n\n Err(e) => error!(\"Failed to parse command. Error: `{}`.\", e),\n\n }\n\n });\n\n }\n\n Err(TryRecvError::Empty) => {\n\n // do nothing\n\n trace!(\"No message from StdinReader\");\n\n }\n\n Err(TryRecvError::Disconnected) => {\n\n warn!(\"Channel receiver to `StdinReader` disconnected.\");\n\n }\n\n };\n\n }\n\n}\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 69, "score": 59545.27391707201 }, { "content": " if asset_index.is_none() {\n\n let asset_index_discovered = AssetDiscovery::asset_index(&self.assets_dir);\n\n debug!(\"Indexed assets: {:?}\", &asset_index_discovered);\n\n\n\n let capacity = asset_index_discovered\n\n .values()\n\n .fold(0, |acc, asset_records| acc + asset_records.len());\n\n asset_id_mappings.reserve(capacity);\n\n\n\n asset_index_discovered\n\n .iter()\n\n .flat_map(|(asset_type, asset_records)| {\n\n let asset_type = *asset_type;\n\n asset_records\n\n .iter()\n\n .map(move |asset_record| (asset_type, asset_record))\n\n })\n\n .for_each(|(asset_type, asset_record)| {\n\n let asset_id = asset_id_mappings.insert(asset_record.asset_slug.clone());\n\n\n", "file_path": "crate/loading/src/system/asset_discovery_system.rs", "rank": 70, "score": 59545.27391707201 }, { "content": "\n\n // Reset to `None` because we have reached this barrier.\n\n (*stdin_command_barrier).state_id = None;\n\n } else {\n\n debug!(\n\n \"Current state: `{:?}`, waiting for `{:?}`.\",\n\n state_id, state_id_barrier\n\n );\n\n\n\n // Skip sending events.\n\n return;\n\n }\n\n };\n\n } else {\n\n warn!(\"`StateId` resource is not set.\");\n\n }\n\n\n\n match self.rx.try_recv() {\n\n Ok(command_chain) => {\n\n debug!(\"`command_chain` from StdinReader: `{:?}`.\", &command_chain);\n", "file_path": "crate/stdio_input/src/system/stdin_system.rs", "rank": 71, "score": 59545.27391707201 }, { "content": "type SharedControllerInputUpdateSystemData<'s> = (\n\n ReadStorage<'s, InputControlled>,\n\n WriteStorage<'s, ControllerInput>,\n\n ReadStorage<'s, SharedInputControlled>,\n\n Entities<'s>,\n\n);\n\n\n\nimpl<'s> System<'s> for SharedControllerInputUpdateSystem {\n\n type SystemData = SharedControllerInputUpdateSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n (input_controlleds, mut controller_inputs, shared_input_controlleds, entities): Self::SystemData,\n\n ) {\n\n let mut merged_input = (&controller_inputs, &input_controlleds).join().fold(\n\n ControllerInput::default(),\n\n |mut merged, (controller_input, _)| {\n\n merged.x_axis_value += controller_input.x_axis_value;\n\n merged.z_axis_value += controller_input.z_axis_value;\n\n merged.defend |= controller_input.defend;\n", "file_path": "crate/game_input/src/system/shared_controller_input_update_system.rs", "rank": 72, "score": 59227.57857112984 }, { "content": "type HitRepeatTrackersAugmentSystemData<'s> = (\n\n Read<'s, EventChannel<HitEvent>>,\n\n WriteStorage<'s, HitRepeatTrackers>,\n\n);\n\n\n\nimpl HitRepeatTrackersAugmentSystem {\n\n fn hit_repeat_tracker(entity_to: Entity, repeat_delay: HitRepeatDelay) -> HitRepeatTracker {\n\n let hit_repeat_clock = HitRepeatClock::new(*repeat_delay as usize);\n\n HitRepeatTracker::new(entity_to, hit_repeat_clock)\n\n }\n\n}\n\n\n\nimpl<'s> System<'s> for HitRepeatTrackersAugmentSystem {\n\n type SystemData = HitRepeatTrackersAugmentSystemData<'s>;\n\n\n\n fn run(&mut self, (collision_ec, mut hit_repeat_trackerses): Self::SystemData) {\n\n // Read from channel\n\n collision_ec\n\n .read(\n\n self.hit_event_rid\n", "file_path": "crate/collision_play/src/system/hit_repeat_trackers_augment_system.rs", "rank": 73, "score": 59227.57857112984 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse kinematic_model::config::Velocity;\n\nuse object_model::play::Grounding;\n\n\n\n/// Updates `Velocity<f32>` based on grounding.\n\n#[derive(Debug, Default, new)]\n\npub struct GroundingFrictionSystem;\n\n\n\n/// `GroundingFrictionSystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct GroundingFrictionSystemData<'s> {\n\n /// `Grounding` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub groundings: ReadStorage<'s, Grounding>,\n", "file_path": "crate/game_play/src/system/grounding_friction_system.rs", "rank": 74, "score": 58745.139947271186 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse kinematic_model::config::Velocity;\n\nuse object_model::{config::Mass, play::Grounding};\n\n\n\n/// Increases velocity of `Object`s that have `Mass` and are `Airborne`.\n\n#[derive(Debug, Default, new)]\n\npub struct ObjectGravitySystem;\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ObjectGravitySystemData<'s> {\n\n /// `Grounding` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub groundings: ReadStorage<'s, Grounding>,\n\n /// `Mass` components.\n", "file_path": "crate/object_play/src/system/object_gravity_system.rs", "rank": 75, "score": 58744.8929112407 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse charge_model::play::{ChargeRetention, ChargeStatus, ChargeTrackerClock};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\n\n\n/// Reduces charge when not charging.\n\n#[derive(Debug, Default, new)]\n\npub struct ChargeRetentionSystem;\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ChargeRetentionSystemData<'s> {\n\n /// `ChargeStatus` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub charge_statuses: ReadStorage<'s, ChargeStatus>,\n\n /// `ChargeRetention` components.\n\n #[derivative(Debug = \"ignore\")]\n", "file_path": "crate/charge_play/src/system/charge_retention_system.rs", "rank": 76, "score": 58740.318783512776 }, { "content": "use amethyst::{\n\n ecs::{Join, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse charge_model::play::{ChargeDelayClock, ChargeStatus, ChargeTrackerClock};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\n\n\n/// Ticks `ChargeTrackerClock` while `Charging`.\n\n#[derive(Debug, Default, new)]\n\npub struct ChargeIncrementSystem;\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ChargeIncrementSystemData<'s> {\n\n /// `ChargeStatus` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub charge_statuses: ReadStorage<'s, ChargeStatus>,\n\n /// `ChargeDelayClock` components.\n\n #[derivative(Debug = \"ignore\")]\n", "file_path": "crate/charge_play/src/system/charge_increment_system.rs", "rank": 78, "score": 58739.60718686788 }, { "content": "use amethyst::{\n\n ecs::{Join, Read, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse kinematic_model::config::Position;\n\nuse map_model::loaded::AssetMargins;\n\nuse map_selection_model::MapSelection;\n\nuse object_model::play::Grounding;\n\n\n\n/// Updates `Grounding` to `Airborne` for objects above the map bottom boundary.\n\n#[derive(Debug, Default, new)]\n\npub struct ObjectGroundingSystem;\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ObjectGroundingSystemData<'s> {\n\n /// `MapSelection` resource.\n\n #[derivative(Debug = \"ignore\")]\n", "file_path": "crate/object_play/src/system/object_grounding_system.rs", "rank": 79, "score": 58738.767326603374 }, { "content": "use amethyst::{\n\n core::math::Vector3,\n\n ecs::{Join, Read, ReadStorage, System, World, WriteStorage},\n\n renderer::camera::Camera,\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse camera_model::play::{CameraTargetCoordinates, CameraTracked, CameraZoomDimensions};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse kinematic_model::config::Position;\n\nuse map_model::{\n\n config::MapBounds,\n\n loaded::{AssetMapBounds, AssetMargins, Margins},\n\n};\n\nuse map_selection_model::MapSelection;\n\nuse mirrored_model::play::Mirrored;\n\n\n\n/// Focuses the camera at the average position of tracked entities.\n\n#[derive(Debug, Default, new)]\n\npub struct CameraTrackingSystem;\n", "file_path": "crate/camera_play/src/system/camera_tracking_system.rs", "rank": 80, "score": 58736.632046251274 }, { "content": "use amethyst::{\n\n assets::{AssetStorage, Handle},\n\n core::{math::Vector3, transform::Transform},\n\n ecs::{Entities, Join, Read, ReadStorage, System, World, Write},\n\n renderer::{SpriteRender, SpriteSheet},\n\n shred::{ResourceId, SystemData},\n\n shrev::EventChannel,\n\n};\n\nuse collision_model::{\n\n config::{Body, Interaction, Interactions},\n\n play::CollisionEvent,\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse log::debug;\n\nuse mirrored_model::play::Mirrored;\n\nuse shape_model::Volume;\n\n\n\n/// Detects collisions for all objects.\n\n#[derive(Debug, Default, new)]\n", "file_path": "crate/collision_play/src/system/collision_detection_system.rs", "rank": 81, "score": 58735.38328750497 }, { "content": "use amethyst::{\n\n ecs::{Read, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse collision_model::{loaded::HitTransition, play::HitEvent};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse sequence_model::loaded::SequenceId;\n\n\n\n/// Determines the next sequence for entities when they are hit.\n\n#[derive(Debug, Default, new)]\n\npub struct HitEffectSystem {\n\n /// Reader ID for the `HitEvent` event channel.\n\n #[new(default)]\n\n hit_event_rid: Option<ReaderId<HitEvent>>,\n\n}\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n", "file_path": "crate/collision_play/src/system/hit_effect_system.rs", "rank": 82, "score": 58733.68111453092 }, { "content": "use amethyst::{\n\n ecs::{Read, ReadStorage, System, World, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse collision_model::{loaded::HittingTransition, play::HitEvent};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse sequence_model::loaded::SequenceId;\n\n\n\n/// Determines the next sequence for entities when they hit another object.\n\n#[derive(Debug, Default, new)]\n\npub struct HittingEffectSystem {\n\n /// Reader ID for the `HitEvent` event channel.\n\n #[new(default)]\n\n hit_event_rid: Option<ReaderId<HitEvent>>,\n\n}\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n", "file_path": "crate/collision_play/src/system/hitting_effect_system.rs", "rank": 83, "score": 58733.52869649195 }, { "content": "use amethyst::{\n\n ecs::{ReadStorage, System, World, Write, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse charge_model::{\n\n config::ChargeUseMode,\n\n play::{ChargeTrackerClock, ChargeUseEvent},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse log::warn;\n\n\n\n/// Subtracts `ChargeTrackerClock` when used.\n\n#[derive(Debug, Default, new)]\n\npub struct ChargeUsageSystem {\n\n /// Reader ID for the `ChargeUseEvent` channel.\n\n #[new(default)]\n\n charge_event_rid: Option<ReaderId<ChargeUseEvent>>,\n\n}\n", "file_path": "crate/charge_play/src/system/charge_usage_system.rs", "rank": 84, "score": 58733.08611023488 }, { "content": "use amethyst::{\n\n ecs::{Read, ReadStorage, System, World, Write},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse collision_model::play::{CollisionEvent, ContactEvent};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse spawn_model::play::SpawnParent;\n\nuse team_model::play::Team;\n\n\n\n/// Detects whether a `ContactEvent` occurs when a `CollisionEvent` happens.\n\n///\n\n/// This system determines if contact happens or not -- e.g. objects on the same team may or may not\n\n/// contact each other depending on the type of `Interaction`.\n\n#[derive(Debug, Default, new)]\n\npub struct ContactDetectionSystem {\n\n /// Reader ID for the `CollisionEvent` event channel.\n\n #[new(default)]\n\n collision_event_rid: Option<ReaderId<CollisionEvent>>,\n", "file_path": "crate/collision_play/src/system/contact_detection_system.rs", "rank": 85, "score": 58732.495278047434 }, { "content": "use amethyst::{\n\n ecs::{Entity, Read, ReadStorage, System, SystemData, World, Write, WriteStorage},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse collision_model::{\n\n config::{Hit, HitLimit, Interaction, InteractionKind},\n\n play::{ContactEvent, HitEvent, HitObjectCount, HitRepeatTrackers},\n\n};\n\nuse derive_new::new;\n\n\n\n/// Detects whether a `HitEvent` occurs there is contact between an `Interaction` and a `Volume`.\n\n#[derive(Debug, Default, new)]\n\npub struct HitDetectionSystem {\n\n /// Reader ID for the `ContactEvent` event channel.\n\n #[new(default)]\n\n contact_event_rid: Option<ReaderId<ContactEvent>>,\n\n}\n\n\n", "file_path": "crate/collision_play/src/system/hit_detection_system.rs", "rank": 86, "score": 58732.49222699501 }, { "content": "use amethyst::{\n\n ecs::{Read, System, World, Write, WriteExpect},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse asset_model::{config::AssetType, loaded::AssetTypeMappings};\n\nuse asset_selection_model::play::{AssetSelection, AssetSelectionEvent};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse log::warn;\n\nuse map_selection_model::MapSelection;\n\n\n\nuse crate::MapSelectionStatus;\n\n\n\n/// Updates the `MapSelection` resource based on user selection.\n\n#[derive(Debug, Default, new)]\n\npub struct MapSelectionSystem {\n\n /// ID for reading map selection events.\n\n #[new(default)]\n\n asset_selection_event_rid: Option<ReaderId<AssetSelectionEvent>>,\n", "file_path": "crate/map_selection/src/system/map_selection_system.rs", "rank": 87, "score": 58731.68616931299 }, { "content": " #[derivative(Debug = \"ignore\")]\n\n pub masses: ReadStorage<'s, Mass>,\n\n /// `Velocity<f32>` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub velocities: WriteStorage<'s, Velocity<f32>>,\n\n}\n\n\n\nimpl<'s> System<'s> for ObjectGravitySystem {\n\n type SystemData = ObjectGravitySystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n ObjectGravitySystemData {\n\n groundings,\n\n masses,\n\n mut velocities,\n\n }: Self::SystemData,\n\n ) {\n\n (&groundings, &masses, &mut velocities)\n\n .join()\n", "file_path": "crate/object_play/src/system/object_gravity_system.rs", "rank": 88, "score": 58731.00852908418 }, { "content": "use amethyst::{\n\n ecs::{Read, System, World, Write},\n\n shred::{ResourceId, SystemData},\n\n shrev::{EventChannel, ReaderId},\n\n};\n\nuse asset_model::{config::AssetType, loaded::AssetTypeMappings};\n\nuse asset_selection_model::play::{AssetSelection, AssetSelectionEvent};\n\nuse character_selection_model::CharacterSelections;\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse log::warn;\n\nuse object_type::ObjectType;\n\n\n\n/// Populates the `CharacterSelections` based on user input.\n\n#[derive(Debug, Default, new)]\n\npub struct CharacterSelectionSystem {\n\n /// Reader ID for the `AssetSelectionEvent` event channel.\n\n #[new(default)]\n\n asset_selection_event_rid: Option<ReaderId<AssetSelectionEvent>>,\n\n}\n", "file_path": "crate/character_selection/src/system/character_selection_system.rs", "rank": 89, "score": 58730.57691287775 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst::{\n\n ecs::{Read, System, World, Write},\n\n shred::{ResourceId, SystemData},\n\n};\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse loading_model::loaded::{AssetLoadStage, AssetLoadStatus, LoadStatus};\n\n\n\nuse crate::{AssetLoadingResources, AssetPartLoader};\n\n\n\n/// Loads part of a collective asset.\n\n#[derive(Derivative, new)]\n\n#[derivative(Debug)]\n\npub struct AssetPartLoadingSystem<R>\n\nwhere\n\n R: for<'s> AssetPartLoader<'s>,\n\n{\n\n /// Marker.\n", "file_path": "crate/loading/src/system/asset_part_loading_system.rs", "rank": 90, "score": 58730.331933350295 }, { "content": "\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ChargeUsageSystemData<'s> {\n\n /// `ChargeUseEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub charge_ec: Write<'s, EventChannel<ChargeUseEvent>>,\n\n /// `ChargeUseMode` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub charge_use_modes: ReadStorage<'s, ChargeUseMode>,\n\n /// `ChargeTrackerClock` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub charge_tracker_clocks: WriteStorage<'s, ChargeTrackerClock>,\n\n}\n\n\n\nimpl<'s> System<'s> for ChargeUsageSystem {\n\n type SystemData = ChargeUsageSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n", "file_path": "crate/charge_play/src/system/charge_usage_system.rs", "rank": 92, "score": 58729.641873348424 }, { "content": " /// `Body` assets.\n\n #[derivative(Debug = \"ignore\")]\n\n pub body_assets: Read<'s, AssetStorage<Body>>,\n\n /// `SpriteRender` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub sprite_renders: ReadStorage<'s, SpriteRender>,\n\n /// `Mirrored` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub mirroreds: ReadStorage<'s, Mirrored>,\n\n /// `SpriteSheet` assets.\n\n #[derivative(Debug = \"ignore\")]\n\n pub sprite_sheet_assets: Read<'s, AssetStorage<SpriteSheet>>,\n\n /// `CollisionEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub collision_ec: Write<'s, EventChannel<CollisionEvent>>,\n\n}\n\n\n\nimpl CollisionDetectionSystem {\n\n fn intersects(\n\n relative_pos: &Vector3<f32>,\n", "file_path": "crate/collision_play/src/system/collision_detection_system.rs", "rank": 93, "score": 58729.59446676459 }, { "content": " #[derivative(Debug = \"ignore\")]\n\n pub positions: ReadStorage<'s, Position<f32>>,\n\n /// `Mirrored` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub mirroreds: ReadStorage<'s, Mirrored>,\n\n /// `Camera` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub cameras: ReadStorage<'s, Camera>,\n\n /// `CameraTargetCoordinates` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub camera_target_coordinateses: WriteStorage<'s, CameraTargetCoordinates>,\n\n}\n\n\n\nimpl CameraTrackingSystem {\n\n /// Returns the mean position of `CameraTracked` entities.\n\n fn position_average(\n\n camera_trackeds: &ReadStorage<'_, CameraTracked>,\n\n positions: &ReadStorage<'_, Position<f32>>,\n\n ) -> Vector3<f32> {\n\n let positions = (camera_trackeds, positions)\n", "file_path": "crate/camera_play/src/system/camera_tracking_system.rs", "rank": 94, "score": 58729.135282998985 }, { "content": " pub map_selection: Read<'s, MapSelection>,\n\n /// `AssetMargins` resource.\n\n #[derivative(Debug = \"ignore\")]\n\n pub asset_margins: Read<'s, AssetMargins>,\n\n /// `Position<f32>` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub positions: ReadStorage<'s, Position<f32>>,\n\n /// `Grounding` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub groundings: WriteStorage<'s, Grounding>,\n\n}\n\n\n\nimpl<'s> System<'s> for ObjectGroundingSystem {\n\n type SystemData = ObjectGroundingSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n ObjectGroundingSystemData {\n\n map_selection,\n\n asset_margins,\n", "file_path": "crate/object_play/src/system/object_grounding_system.rs", "rank": 95, "score": 58726.67686274451 }, { "content": "/// * `FrameWaitClock`\n\n/// * `FrameIndexClock`\n\n///\n\n/// This system **must** be run before all systems that update the frame components that are\n\n/// attached to entities, as the `SequenceUpdateEvent`s include the new frame index, which is only\n\n/// guaranteed to be valid for the current dispatcher run.\n\n#[derive(Debug, Default, new)]\n\npub struct SequenceUpdateSystem;\n\n\n\n/// `SequenceUpdateSystemData`.\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct SequenceUpdateSystemData<'s> {\n\n /// `Entities`.\n\n #[derivative(Debug = \"ignore\")]\n\n pub entities: Entities<'s>,\n\n /// `WaitSequenceHandle` component storage.\n\n #[derivative(Debug = \"ignore\")]\n\n pub wait_sequence_handles: ReadStorage<'s, WaitSequenceHandle>,\n\n /// `WaitSequence` assets.\n", "file_path": "crate/sequence_play/src/system/sequence_update_system.rs", "rank": 96, "score": 58726.66884817107 }, { "content": "}\n\n\n\n#[derive(Derivative, SystemData)]\n\n#[derivative(Debug)]\n\npub struct ContactDetectionSystemData<'s> {\n\n /// `CollisionEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub collision_ec: Read<'s, EventChannel<CollisionEvent>>,\n\n /// `SpawnParent` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub spawn_parents: ReadStorage<'s, SpawnParent>,\n\n /// `Team` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub teams: ReadStorage<'s, Team>,\n\n /// `ContactEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub contact_ec: Write<'s, EventChannel<ContactEvent>>,\n\n}\n\n\n\nimpl<'s> System<'s> for ContactDetectionSystem {\n", "file_path": "crate/collision_play/src/system/contact_detection_system.rs", "rank": 97, "score": 58726.540678802165 }, { "content": "pub struct HittingEffectSystemData<'s> {\n\n /// `HitEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub hit_ec: Read<'s, EventChannel<HitEvent>>,\n\n /// `HittingTransition` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub hitting_transitions: ReadStorage<'s, HittingTransition>,\n\n /// `SequenceId` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub sequence_ids: WriteStorage<'s, SequenceId>,\n\n}\n\n\n\nimpl<'s> System<'s> for HittingEffectSystem {\n\n type SystemData = HittingEffectSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n HittingEffectSystemData {\n\n hit_ec,\n\n hitting_transitions,\n", "file_path": "crate/collision_play/src/system/hitting_effect_system.rs", "rank": 98, "score": 58725.62027352879 }, { "content": "pub struct HitEffectSystemData<'s> {\n\n /// `HitEvent` channel.\n\n #[derivative(Debug = \"ignore\")]\n\n pub hit_ec: Read<'s, EventChannel<HitEvent>>,\n\n /// `HitTransition` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub hit_transitions: ReadStorage<'s, HitTransition>,\n\n /// `SequenceId` components.\n\n #[derivative(Debug = \"ignore\")]\n\n pub sequence_ids: WriteStorage<'s, SequenceId>,\n\n}\n\n\n\nimpl<'s> System<'s> for HitEffectSystem {\n\n type SystemData = HitEffectSystemData<'s>;\n\n\n\n fn run(\n\n &mut self,\n\n HitEffectSystemData {\n\n hit_ec,\n\n hit_transitions,\n", "file_path": "crate/collision_play/src/system/hit_effect_system.rs", "rank": 99, "score": 58725.62027352879 } ]
Rust
src/server.rs
dethoter/tokio_test
c0185eb40b9e5bc994cf3b2b4916c924de3affa0
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] #![deny(deprecated)] extern crate tokio_proto; extern crate tokio_io; extern crate tokio_service; extern crate tokio_timer; extern crate futures; extern crate futures_cpupool; extern crate bytes; extern crate bincode; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate serde; #[macro_use] extern crate serde_derive; mod protocol; use std::io; use std::net::SocketAddr; use std::time::{Duration, Instant}; use futures::{future, Future}; use futures_cpupool::CpuPool; use tokio_io::{AsyncRead, AsyncWrite}; use tokio_io::codec::{Framed, Encoder, Decoder}; use tokio_timer::Timer; use tokio_proto::TcpServer; use tokio_proto::multiplex::{RequestId, ServerProto}; use tokio_service::Service; use structopt::StructOpt; use bytes::{BytesMut, Buf, BufMut, BigEndian}; use protocol::{CountRequest, CountResponse}; #[derive(StructOpt, Debug)] #[structopt(name = "serv", about = "Server that counts")] struct Args { #[structopt(short = "p", long = "port", help = "Server's port", default_value = "5234")] port: u16, #[structopt(short = "t", long = "timeout", help = "Timeout per 1 task", default_value = "5")] timeout: usize, #[structopt(short = "j", long = "threads", help = "Number of threads", default_value = "0")] threads: usize, } struct ServerCodec; impl Decoder for ServerCodec { type Item = (RequestId, CountRequest); type Error = io::Error; fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, io::Error> { if buf.len() >= 4 { let length = io::Cursor::new(&buf.split_to(4)).get_u32::<BigEndian>() as usize; if buf.len() >= length { return Ok(bincode::deserialize(&buf.split_to(length)).ok()); } } Ok(None) } } impl Encoder for ServerCodec { type Item = (RequestId, CountResponse); type Error = io::Error; fn encode(&mut self, item: Self::Item, buf: &mut BytesMut) -> io::Result<()> { let bytes = bincode::serialize(&item, bincode::Infinite).map_err(|_| { io::ErrorKind::InvalidData })?; let length = bytes.len(); buf.reserve(4 + length); buf.put_u32::<BigEndian>(length as u32); buf.put_slice(&bytes); Ok(()) } } struct ServerProtocol; impl<T: AsyncRead + AsyncWrite + 'static> ServerProto<T> for ServerProtocol { type Request = CountRequest; type Response = CountResponse; type Transport = Framed<T, ServerCodec>; type BindTransport = Result<Self::Transport, io::Error>; fn bind_transport(&self, io: T) -> Self::BindTransport { Ok(io.framed(ServerCodec)) } } struct Counter { thread_pool: CpuPool, timeout: Duration, } impl Service for Counter { type Request = CountRequest; type Response = CountResponse; type Error = io::Error; type Future = Box<Future<Item = Self::Response, Error = Self::Error>>; fn call(&self, req: Self::Request) -> Self::Future { let CountRequest(num) = req; println!("Request: {:?}", num); let timeout = Timer::default().sleep(self.timeout).then(|_| Err(())); let task = self.thread_pool .spawn_fn(move || Ok(count(num))) .select(timeout) .map(|(r, _)| r); Box::new( task.and_then(move |d| { println!("Response: Duration({:?},{:?})", num, &d); future::ok(CountResponse::Duration(num, d)) }).or_else(move |_| { println!("Response: Timeout({:?})", num); future::ok(CountResponse::Timeout(num)) }), ) } } fn count(max: u64) -> Duration { let now = Instant::now(); let mut i = 0; while i < max { i += 1; } now.elapsed() } fn serve(address: SocketAddr, pool: CpuPool, duration: Duration) { TcpServer::new(ServerProtocol, address).serve(move || { Ok(Counter { thread_pool: pool.clone(), timeout: duration, }) }); } fn main() { let args = Args::from_args(); let pool = if args.threads == 0 { CpuPool::new_num_cpus() } else { CpuPool::new(args.threads) }; let duration = Duration::from_secs(args.timeout as u64); let address = format!("0.0.0.0:{}", args.port).parse().unwrap(); println!( "Address: {:?}\nTimeout: {:?}\nThreads: {:?}", &address, args.timeout, if args.threads != 0 { args.threads.to_string() } else { "native".into() } ); serve(address, pool, duration); }
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] #![deny(deprecated)] extern crate tokio_proto; extern crate tokio_io; extern crate tokio_service; extern crate tokio_timer; extern crate futures; extern crate futures_cpupool; extern crate bytes; extern crate bincode; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate serde; #[macro_use] extern crate serde_derive; mod protocol; use std::io; use std::net::SocketAddr; use std::time::{Duration, Instant}; use futures::{future, Future}; use futures_cpupool::CpuPool; use tokio_io::{AsyncRead, AsyncWrite}; use tokio_io::codec::{Framed, Encoder, Decoder}; use tokio_timer::Timer; use tokio_proto::TcpServer; use tokio_proto::multiplex::{RequestId, ServerProto}; use tokio_service::Service; use structopt::StructOpt; use bytes::{BytesMut, Buf, BufMut, BigEndian}; use protocol::{CountRequest, CountResponse}; #[derive(StructOpt, Debug)] #[structopt(name = "serv", about = "Server that counts")] struct Args { #[structopt(short = "p", long = "port", help = "Server's port", default_value = "5234")] port: u16, #[structopt(short = "t", long = "timeout", help = "Timeout per 1 task", default_value = "5")] timeout: usize, #[structopt(short = "j", long = "threads", help = "Number of threads", default_value = "0")] threads: usize, } struct ServerCodec; impl Decoder for ServerCodec { type Item = (RequestId, CountRequest); type Error = io::Error; fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, io::Error> { if buf.len() >= 4 { let length = io::Cursor::new(&buf.split_to(4)).get_u32::<BigEndian>() as usize; if buf.len() >= length { return Ok(bincode::deserialize(&buf.split_to(length)).ok()); } } Ok(None) } } impl Encoder for ServerCodec { type Item = (RequestId, CountResponse); type Error = io::Error; fn encode(&mut self, item: Self::Item, buf: &mut BytesMut) -> io::Result<()> { let bytes = bincode::serialize(&item, bincode::Infinite).map_err(|_| { io::ErrorKind::InvalidData })?; let length = bytes.len(); buf.reserve(4 + length); buf.put_u32::<BigEndian>(length as u32); buf.put_slice(&bytes); Ok(()) } } struct ServerProtocol; impl<T: AsyncRead + AsyncWrite + 'static> ServerProto<T> for ServerProtocol { type Request = CountRequest; type Response = CountResponse; type Transport = Framed<T, ServerCodec>; type BindTransport = Result<Self::Transport, io::Error>; fn bind_transport(&self, io: T) -> Self::BindTransport { Ok(io.framed(ServerCodec)) } } struct Counter { thread_pool: CpuPool, timeout: Duration, } impl Service for Counter { type Request = CountRequest; type Response = CountResponse; type Error = io::Error; type Future = Box<Future<Item = Self::Response, Error = Self::Error>>; fn call(&self, req: Self::Request) -> Self::Future { let CountRequest(num) = req; println!("Request: {:?}", num); let timeout = Timer::default().sleep(self.timeout).then(|_| Err(())); let task = self.thread_pool .spawn_fn(move || Ok(cou
} fn count(max: u64) -> Duration { let now = Instant::now(); let mut i = 0; while i < max { i += 1; } now.elapsed() } fn serve(address: SocketAddr, pool: CpuPool, duration: Duration) { TcpServer::new(ServerProtocol, address).serve(move || { Ok(Counter { thread_pool: pool.clone(), timeout: duration, }) }); } fn main() { let args = Args::from_args(); let pool = if args.threads == 0 { CpuPool::new_num_cpus() } else { CpuPool::new(args.threads) }; let duration = Duration::from_secs(args.timeout as u64); let address = format!("0.0.0.0:{}", args.port).parse().unwrap(); println!( "Address: {:?}\nTimeout: {:?}\nThreads: {:?}", &address, args.timeout, if args.threads != 0 { args.threads.to_string() } else { "native".into() } ); serve(address, pool, duration); }
nt(num))) .select(timeout) .map(|(r, _)| r); Box::new( task.and_then(move |d| { println!("Response: Duration({:?},{:?})", num, &d); future::ok(CountResponse::Duration(num, d)) }).or_else(move |_| { println!("Response: Timeout({:?})", num); future::ok(CountResponse::Timeout(num)) }), ) }
function_block-function_prefixed
[ { "content": "fn request_count(address: SocketAddr, range_max: usize, requests: usize) {\n\n let mut core = Core::new().unwrap();\n\n let handle = core.handle();\n\n\n\n let promises = (0..requests)\n\n .map(move |_| {\n\n TcpClient::new(ClientProtocol)\n\n .connect(&address, &handle)\n\n .and_then(move |client| {\n\n let request = thread_rng().gen_range(0, range_max as u64);\n\n println!(\"Request: {:?}\", request);\n\n client.call(CountRequest(request))\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let mut work = futures_unordered(promises).into_future();\n\n loop {\n\n match core.run(work) {\n\n Ok((None, _)) => {\n\n break;\n", "file_path": "src/client.rs", "rank": 4, "score": 59471.20406122072 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"cli\", about = \"Console client for counter server\")]\n\nstruct Args {\n\n #[structopt(short = \"a\", long = \"address\", help = \"Server address\",\n\n default_value = \"127.0.0.1:5234\")]\n\n address: String,\n\n\n\n #[structopt(short = \"r\", long = \"range_max\",\n\n help = \"Max value for randomization range used for requests\",\n\n default_value = \"9999999\")]\n\n range_max: usize,\n\n\n\n #[structopt(short = \"n\", long = \"requests\", help = \"Number of sending requests\",\n\n default_value = \"5\")]\n\n requests: usize,\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 7, "score": 42509.1373719902 }, { "content": "struct ClientProtocol;\n\nimpl<T: AsyncRead + AsyncWrite + 'static> ClientProto<T> for ClientProtocol {\n\n type Request = CountRequest;\n\n type Response = CountResponse;\n\n type Transport = Framed<T, ClientCodec>;\n\n type BindTransport = Result<Self::Transport, io::Error>;\n\n fn bind_transport(&self, io: T) -> Self::BindTransport {\n\n Ok(io.framed(ClientCodec))\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 9, "score": 40417.68263931759 }, { "content": "fn main() {\n\n let args = Args::from_args();\n\n\n\n let address = args.address.parse().unwrap();\n\n println!(\n\n \"Address: {:?}\\nRange: [0..{:?}]\\nRequests: {:?}\",\n\n address,\n\n args.range_max,\n\n args.requests\n\n );\n\n\n\n request_count(address, args.range_max, args.requests);\n\n}\n", "file_path": "src/client.rs", "rank": 10, "score": 28065.304102628055 }, { "content": "struct ClientCodec;\n\nimpl Decoder for ClientCodec {\n\n type Item = (RequestId, CountResponse);\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, io::Error> {\n\n if buf.len() >= 4 {\n\n let length = io::Cursor::new(&buf.split_to(4)).get_u32::<BigEndian>() as usize;\n\n if buf.len() >= length {\n\n return Ok(bincode::deserialize(&buf.split_to(length)).ok());\n\n }\n\n }\n\n\n\n Ok(None)\n\n }\n\n}\n\n\n\nimpl Encoder for ClientCodec {\n\n type Item = (RequestId, CountRequest);\n\n type Error = io::Error;\n", "file_path": "src/client.rs", "rank": 11, "score": 25174.09031435406 }, { "content": "use std::time::Duration;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub enum CountResponse {\n\n Duration(u64, Duration),\n\n Timeout(u64),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct CountRequest(pub u64);\n", "file_path": "src/protocol.rs", "rank": 12, "score": 17268.651419583504 }, { "content": "\n\n fn encode(&mut self, item: Self::Item, buf: &mut BytesMut) -> io::Result<()> {\n\n let bytes = bincode::serialize(&item, bincode::Infinite).map_err(|_| {\n\n io::ErrorKind::InvalidData\n\n })?;\n\n let length = bytes.len();\n\n buf.reserve(4 + length);\n\n buf.put_u32::<BigEndian>(length as u32);\n\n buf.put_slice(&bytes);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 17, "score": 16.68613934454246 }, { "content": "\n\nuse std::io;\n\nuse std::net::SocketAddr;\n\n\n\nuse futures::{Future, Stream};\n\nuse futures::stream::futures_unordered;\n\nuse tokio_io::{AsyncRead, AsyncWrite};\n\nuse tokio_io::codec::{Framed, Encoder, Decoder};\n\nuse tokio_core::reactor::Core;\n\nuse tokio_proto::TcpClient;\n\nuse tokio_proto::multiplex::{RequestId, ClientProto};\n\nuse tokio_service::Service;\n\nuse structopt::StructOpt;\n\nuse rand::{thread_rng, Rng};\n\nuse bytes::{BytesMut, Buf, BufMut, BigEndian};\n\nuse protocol::{CountRequest, CountResponse};\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"cli\", about = \"Console client for counter server\")]\n", "file_path": "src/client.rs", "rank": 18, "score": 14.936385458971165 }, { "content": "#![cfg_attr(feature=\"clippy\", feature(plugin))]\n\n#![cfg_attr(feature=\"clippy\", plugin(clippy))]\n\n#![deny(deprecated)]\n\nextern crate tokio_proto;\n\nextern crate tokio_io;\n\nextern crate tokio_core;\n\nextern crate tokio_service;\n\nextern crate tokio_timer;\n\nextern crate futures;\n\nextern crate futures_cpupool;\n\nextern crate bytes;\n\nextern crate bincode;\n\nextern crate rand;\n\nextern crate structopt;\n\n#[macro_use]\n\nextern crate structopt_derive;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nmod protocol;\n", "file_path": "src/client.rs", "rank": 19, "score": 12.212973180881827 }, { "content": " }\n\n Ok((Some(response), next_requests)) => {\n\n println!(\"Response: {:?}\", response);\n\n work = next_requests.into_future();\n\n }\n\n Err((e, next_requests)) => {\n\n println!(\"Error: {:?}\", e);\n\n work = next_requests.into_future();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 20, "score": 7.1321683865916325 }, { "content": "Run `clippy`:\n\n\n\n```sh\n\n$ rustup run nightly cargo build --features \"clippy\"\n\n```\n\n\n\nRun server:\n\n\n\n```sh\n\n$ cargo run --bin serv\n\n```\n\n\n\nRun client:\n\n\n\n```sh\n\n$ cargo run --bin cli\n\n```\n", "file_path": "README.md", "rank": 21, "score": 2.308748921481641 } ]
Rust
d3-core/src/scheduler/sched.rs
BruceBrown/d3
688ee218a994f3aab2fddc75feac308c58174333
use self::traits::*; use super::*; use crossbeam::channel::RecvTimeoutError; type MachineMap = super_slab::SuperSlab<ShareableMachine>; #[allow(dead_code)] #[allow(non_upper_case_globals)] pub static machine_count_estimate: AtomicCell<usize> = AtomicCell::new(5000); #[allow(dead_code)] pub fn get_machine_count_estimate() -> usize { machine_count_estimate.load() } #[allow(dead_code)] pub fn set_machine_count_estimate(new: usize) { machine_count_estimate.store(new); } #[allow(dead_code, non_upper_case_globals)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub static selector_maintenance_duration: AtomicCell<Duration> = AtomicCell::new(Duration::from_millis(500)); #[allow(dead_code, non_upper_case_globals, deprecated)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub fn get_selector_maintenance_duration() -> Duration { selector_maintenance_duration.load() } #[allow(dead_code, non_upper_case_globals, deprecated)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub fn set_selector_maintenance_duration(new: Duration) { selector_maintenance_duration.store(new); } #[allow(dead_code, non_upper_case_globals)] pub static live_machine_count: AtomicUsize = AtomicUsize::new(0); #[allow(dead_code, non_upper_case_globals)] pub fn get_machine_count() -> usize { live_machine_count.load(Ordering::SeqCst) } #[derive(Debug, Default, Copy, Clone)] pub struct SchedStats { pub maint_time: Duration, pub add_time: Duration, pub remove_time: Duration, pub total_time: Duration, } #[allow(dead_code)] pub struct DefaultScheduler { sender: SchedSender, wait_queue: SchedTaskInjector, thread: Option<thread::JoinHandle<()>>, } impl DefaultScheduler { fn stop(&self) { log::info!("stopping scheduler"); self.sender.send(SchedCmd::Stop).unwrap(); } pub fn new( sender: SchedSender, receiver: SchedReceiver, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector), ) -> Self { live_machine_count.store(0, Ordering::SeqCst); let wait_queue = Arc::clone(&queues.1); let thread = SchedulerThread::spawn(receiver, monitor, queues); sender.send(SchedCmd::Start).unwrap(); Self { wait_queue, sender, thread, } } } impl Scheduler for DefaultScheduler { fn assign_machine(&self, machine: ShareableMachine) { self.sender.send(SchedCmd::New(machine)).unwrap(); } fn request_stats(&self) { self.sender.send(SchedCmd::RequestStats).unwrap(); } fn request_machine_info(&self) { self.sender.send(SchedCmd::RequestMachineInfo).unwrap(); } fn stop(&self) { self.stop(); } } impl Drop for DefaultScheduler { fn drop(&mut self) { if let Some(thread) = self.thread.take() { if self.sender.send(SchedCmd::Terminate(false)).is_err() {} log::info!("synchronizing Scheduler shutdown"); if thread.join().is_err() { log::trace!("failed to join Scheduler thread"); } } log::info!("Scheduler shutdown complete"); } } const MAX_SELECT_HANDLES: usize = usize::MAX - 16; #[allow(dead_code)] struct SchedulerThread { receiver: SchedReceiver, monitor: MonitorSender, wait_queue: SchedTaskInjector, run_queue: ExecutorInjector, is_running: bool, is_started: bool, machines: MachineMap, } impl SchedulerThread { fn spawn( receiver: SchedReceiver, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector), ) -> Option<thread::JoinHandle<()>> { log::info!("Starting scheduler"); let thread = std::thread::spawn(move || { let mut sched_thread = Self { receiver, monitor, run_queue: queues.0, wait_queue: queues.1, is_running: true, is_started: false, machines: MachineMap::with_capacity(get_machine_count_estimate()), }; sched_thread.run(); }); Some(thread) } fn run(&mut self) { log::info!("running schdeuler"); let mut stats_timer = SimpleEventTimer::default(); let start = Instant::now(); let mut stats = SchedStats::default(); while self.is_running { if stats_timer.check() && self.monitor.send(MonitorMessage::SchedStats(stats)).is_err() { log::debug!("failed to send sched stats to mointor"); } match self.receiver.recv_timeout(stats_timer.remaining()) { Ok(cmd) => self.maintenance(cmd, &mut stats), Err(RecvTimeoutError::Timeout) => (), Err(RecvTimeoutError::Disconnected) => self.is_running = false, } } stats.total_time = start.elapsed(); log::info!("machines remaining: {}", self.machines.len()); for (_, m) in self.machines.iter() { log::info!( "machine={} key={} state={:#?} q_len={} task_id={} disconnected={}", m.get_id(), m.get_key(), m.get_state(), m.channel_len(), m.get_task_id(), m.is_disconnected() ); } log::info!("{:#?}", stats); log::info!("completed running schdeuler"); } fn maintenance(&mut self, cmd: SchedCmd, stats: &mut SchedStats) { let t = Instant::now(); match cmd { SchedCmd::Start => (), SchedCmd::Stop => self.is_running = false, SchedCmd::Terminate(_key) => (), SchedCmd::New(machine) => self.insert_machine(machine, stats), SchedCmd::SendComplete(key) => self.schedule_sendblock_machine(key), SchedCmd::Remove(key) => self.remove_machine(key, stats), SchedCmd::RecvBlock(key) => self.schedule_recvblock_machine(key), SchedCmd::RequestStats => if self.monitor.send(MonitorMessage::SchedStats(*stats)).is_err() {}, SchedCmd::RequestMachineInfo => self.send_machine_info(), _ => (), }; stats.maint_time += t.elapsed(); } fn insert_machine(&mut self, machine: ShareableMachine, stats: &mut SchedStats) { let t = Instant::now(); let entry = self.machines.vacant_entry(); log::trace!("inserted machine {} key={}", machine.get_id(), entry.key()); machine.key.store(entry.key(), Ordering::SeqCst); entry.insert(Arc::clone(&machine)); if let Err(state) = machine.compare_and_exchange_state(MachineState::New, MachineState::Ready) { log::error!("insert_machine: expected state New, found state {:#?}", state); } live_machine_count.fetch_add(1, Ordering::SeqCst); schedule_machine(machine, &self.run_queue); stats.add_time += t.elapsed(); } fn remove_machine(&mut self, key: usize, stats: &mut SchedStats) { let t = Instant::now(); if let Some(machine) = self.machines.get(key) { log::trace!( "removed machine {} key={} task={}", machine.get_id(), machine.get_key(), machine.get_task_id() ); } else { log::warn!("machine key {} not in collective", key); stats.remove_time += t.elapsed(); return; } self.machines.remove(key); live_machine_count.fetch_sub(1, Ordering::SeqCst); stats.remove_time += t.elapsed(); } fn send_machine_info(&self) { for (_, m) in &self.machines { if self.monitor.send(MonitorMessage::MachineInfo(Arc::clone(m))).is_err() { log::debug!("unable to send machine info to monitor"); } } } fn run_task(&self, machine: ShareableMachine) { if let Err(state) = machine.compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) { if state != MachineState::Ready { log::error!("sched run_task expected RecvBlock or Ready state{:#?}", state); } } schedule_machine(machine, &self.run_queue); } fn schedule_sendblock_machine(&self, key: usize) { let machine = self.machines.get(key).unwrap(); if let Err(state) = machine.compare_and_exchange_state(MachineState::SendBlock, MachineState::RecvBlock) { log::error!("sched: (SendBlock) expecting state SendBlock, found {:#?}", state); return; } if !machine.is_channel_empty() && machine .compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) .is_ok() { schedule_machine(Arc::clone(machine), &self.run_queue); } } fn schedule_recvblock_machine(&self, key: usize) { let machine = self.machines.get(key).unwrap(); if machine .compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) .is_ok() { schedule_machine(Arc::clone(machine), &self.run_queue); } } } #[cfg(test)] mod tests { use self::executor::SystemExecutorFactory; use self::machine::get_default_channel_capacity; use self::overwatch::SystemMonitorFactory; use self::sched_factory::create_sched_factory; use super::*; use crossbeam::deque; use d3_derive::*; use std::time::Duration; use self::channel::{receiver::Receiver, sender::Sender}; #[test] fn can_terminate() { let monitor_factory = SystemMonitorFactory::new(); let executor_factory = SystemExecutorFactory::new(); let scheduler_factory = create_sched_factory(); let scheduler = scheduler_factory.start(monitor_factory.get_sender(), executor_factory.get_queues()); thread::sleep(Duration::from_millis(100)); log::info!("stopping scheduler via control"); scheduler.stop(); thread::sleep(Duration::from_millis(100)); } #[derive(Debug, MachineImpl)] pub enum TestMessage { Test, } struct Alice {} impl Machine<TestMessage> for Alice { fn receive(&self, _message: TestMessage) {} } #[allow(clippy::type_complexity)] pub fn build_machine<T, P>( machine: T, ) -> ( Arc<T>, Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>, MachineAdapter, ) where T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>, P: MachineImpl, <P as MachineImpl>::Adapter: MachineBuilder, { let channel_max = get_default_channel_capacity(); let (machine, sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_raw(machine, channel_max); (machine, sender, collective_adapter) } #[test] fn test_scheduler() { let (monitor_sender, _monitor_receiver) = crossbeam::channel::unbounded::<MonitorMessage>(); let (sched_sender, sched_receiver) = crossbeam::channel::unbounded::<SchedCmd>(); let run_queue = new_executor_injector(); let wait_queue = Arc::new(deque::Injector::<SchedTask>::new()); let thread = SchedulerThread::spawn(sched_receiver, monitor_sender, (run_queue, wait_queue)); std::thread::sleep(std::time::Duration::from_millis(10)); let mut senders: Vec<Sender<TestMessage>> = Vec::new(); let mut machines: Vec<Arc<Alice>> = Vec::new(); for _ in 1 ..= 5 { let alice = Alice {}; let (alice, mut sender, adapter) = build_machine(alice); let adapter = Arc::new(adapter); sender.bind(Arc::clone(&adapter)); senders.push(sender); machines.push(alice); sched_sender.send(SchedCmd::New(adapter)).unwrap(); } let s = &senders[2]; s.send(TestMessage::Test).unwrap(); std::thread::sleep(std::time::Duration::from_millis(500)); sched_sender.send(SchedCmd::Stop).unwrap(); if let Some(thread) = thread { thread.join().unwrap(); } } }
use self::traits::*; use super::*; use crossbeam::channel::RecvTimeoutError; type MachineMap = super_slab::SuperSlab<ShareableMachine>; #[allow(dead_code)] #[allow(non_upper_case_globals)] pub static machine_count_estimate: AtomicCell<usize> = AtomicCell::new(5000); #[allow(dead_code)] pub fn get_machine_count_estimate() -> usize { machine_count_estimate.load() } #[allow(dead_code)] pub fn set_machine_count_estimate(new: usize) { machine_count_estimate.store(new); } #[allow(dead_code, non_upper_case_globals)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub static selector_maintenance_duration: AtomicCell<Duration> = AtomicCell::new(Duration::from_millis(500)); #[allow(dead_code, non_upper_case_globals, deprecated)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub fn get_selector_maintenance_duration() -> Duration { selector_maintenance_duration.load() } #[allow(dead_code, non_upper_case_globals, deprecated)] #[deprecated(since = "0.1.2", note = "select is no longer used by the scheduler")] pub fn set_selector_maintenance_duration(new: Duration) { selector_maintenance_duration.store(new); } #[allow(dead_code, non_upper_case_globals)] pub static live_machine_count: AtomicUsize = AtomicUsize::new(0); #[allow(dead_code, non_upper_case_globals)] pub fn get_machine_count() -> usize { live_machine_count.load(Ordering::SeqCst) } #[derive(Debug, Default, Copy, Clone)] pub struct SchedStats { pub maint_time: Duration, pub add_time: Duration, pub remove_time: Duration, pub total_time: Duration, } #[allow(dead_code)] pub struct DefaultScheduler { sender: SchedSender, wait_queue: SchedTaskInjector, thread: Option<thread::JoinHandle<()>>, } impl DefaultScheduler { fn stop(&self) { log::info!("stopping scheduler"); self.sender.send(SchedCmd::Stop).unwrap(); } pub fn new( sender: SchedSender, receiver: SchedReceiver, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector), ) -> Self { live_machine_count.store(0, Ordering::SeqCst); let wait_queue = Arc::clone(&queues.1); let thread = SchedulerThread::spawn(receiver, monitor, queues); sender.send(SchedCmd::Start).unwrap(); Self { wait_queue, sender, thread, } } } impl Scheduler for DefaultScheduler { fn assign_machine(&self, machine: ShareableMachine) { self.sender.send(SchedCmd::New(machine)).unwrap(); } fn request_stats(&self) { self.sender.send(SchedCmd::RequestStats).unwrap(); } fn request_machine_info(&self) { self.sender.send(SchedCmd::RequestMachineInfo).unwrap(); } fn stop(&self) { self.stop(); } } impl Drop for DefaultScheduler { fn drop(&mut self) { if let Some(thread) = self.thread.take() { if self.sender.send(SchedCmd::Terminate(false)).is_err() {} log::info!("synchronizing Scheduler shutdown"); if thread.join().is_err() { log::trace!("failed to join Scheduler thread"); } } log::info!("Scheduler shutdown complete"); } } const MAX_SELECT_HANDLES: usize = usize::MAX - 16; #[allow(dead_code)] struct SchedulerThread { receiver: SchedReceiver, monitor: MonitorSender, wait_queue: SchedTaskInjector, run_queue: ExecutorInjector, is_running: bool, is_started: bool, machines: MachineMap, } impl SchedulerThread { fn spawn( receiver: SchedReceiver, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector), ) -> Option<thread::JoinHandle<()>> { log::info!("Starting scheduler"); let thread = std::thread::spawn(move || { let mut sched_thread = Self { receiver, monitor, run_queue: queues.0, wait_queue: queues.1, is_running: true, is_started: false, machines: MachineMap::with_capacity(get_machine_count_estimate()), }; sched_thread.run(); }); Some(thread) } fn run(&mut self) { log::info!("running schdeuler"); let mut stats_timer = SimpleEventTimer::default(); let start = Instant::now(); let mut stats = SchedStats::default(); while self.is_running { if stats_timer.check() && self.monitor.send(MonitorMessage::SchedStats(stats)).is_err() { log::debug!("failed to send sched stats to mointor"); } match self.receiver.recv_timeout(stats_timer.remaining()) { Ok(cmd) => self.maintenance(cmd, &mut stats), Err(RecvTimeoutError::Timeout) => (), Err(RecvTimeoutError::Disconnected) => self.is_running = false, } } stats.total_time = start.elapsed(); log::info!("machines remaining: {}", self.machines.len()); for (_, m) in self.machines.iter() { log::info!( "machine={} key={} state={:#?} q_len={} task_id={} disconnected={}", m.get_id(), m.get_key(), m.get_state(), m.channel_len(), m.get_task_id(), m.is_disconnected() ); } log::info!("{:#?}", stats); log::info!("completed running schdeuler"); } fn maintenance(&mut self, cmd: SchedCmd, stats: &mut SchedStats) { let t = Instant::now(); match cmd { SchedCmd::Start => (), SchedCmd::Stop => self.is_running = false, SchedCmd::Terminate(_key) => (), SchedCmd::New(machine) => self.insert_machine(machine, stats), SchedCmd::SendComplete(key) => self.schedule_sendblock_machine(key), SchedCmd::Remove(key) => self.remove_machine(key, stats), SchedCmd::RecvBlock(key) => self.schedule_recvblock_machine(key), SchedCmd::RequestStats => if self.monitor.send(MonitorMessage::SchedStats(*stats)).is_err() {}, SchedCmd::RequestMachineInfo => self.send_machine_info(), _ => (), }; stats.maint_time += t.elapsed(); } fn insert_machine(&mut self, machine: ShareableMachine, stats: &mut SchedStats) { let t = Instant::now(); let entry = self.machines.vacant_entry(); log::trace!("inserted machine {} key={}", machine.get_id(), entry.key()); machine.key.store(entry.key(), Ordering::SeqCst); entry.insert(Arc::clone(&machine)); if let Err(state) = machine.compare_and_exchange_state(MachineState::New, MachineState::Ready) { log::error!("insert_machine: expected state New, found state {:#?}", state); } live_machine_count.fetch_add(1, Ordering::SeqCst); schedule_machine(machine, &self.run_queue); stats.add_time += t.elapsed(); } fn remove_machine(&mut self, key: usize, stats: &mut SchedStats) { let t = Instant::now(); if let Some(machine) = self.machines.get(key) { log::trace!( "removed machine {} key={} task={}", machine.get_id(), machine.get_key(), machine.get_task_id() ); } else { log::warn!("machine key {} not in collective", key); stats.remove_time += t.elapsed(); return; } self.machines.remove(key); live_machine_count.fetch_sub(1, Ordering::SeqCst); stats.remove_time += t.elapsed(); } fn send_machine_info(&self) { for (_, m) in &self.machines { if self.monitor.send(MonitorMessage::MachineInfo(Arc::clone(m))).is_err() { log::debug!("unable to send machine info to monitor"); } } } fn run_task(&self, machine: ShareableMachine) { if let Err(state) = machine.compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) { if state != MachineState::Ready { log::error!("sched run_task expected RecvBlock or Ready state{:#?}", state); } } schedule_machine(machine, &self.run_queue); } fn schedule_sendblock_machine(&self, key: usize) { let machine = self.machines.get(key).unwrap(); if let Err(state) = machine.compare_and_exchange_state(MachineState::SendBlock, MachineState::RecvBlock) { log::error!("sched: (SendBlock) expecting state SendBlock, found {:#?}", state); return; } if !machine.is_channel_empty() && machine .compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) .is_ok() { schedule_machine(Arc::clone(machine), &self.run_queue); } } fn schedule_recvblock_machine(&self, key: usize) { let machine = self.machines.get(key).unwrap(); if machine .compare_and_exchange_state(MachineState::RecvBlock, MachineState::Ready) .is_ok() { schedule_machine(Arc::clone(machine), &self.run_queue); } } } #[cfg(test)] mod tests { use self::executor::SystemExecutorFactory; use self::machine::get_default_channel_capacity; use self::overwatch::SystemMonitorFactory; use self::sched_factory::create_sched_factory; use super::*; use crossbeam::deque; use d3_derive::*; use std::time::Duration; use self::channel::{receiver::Receiver, sender::Sender}; #[test] fn can_terminate() { let monitor_factory = SystemMonitorFactory::new(); let executor_factory = SystemExecutorFactory::new(); let scheduler_factory = create_sched_factory(); let scheduler = scheduler_factory.start(monitor_factory.get_sender(), executor_factory.get_queues()); thread::sleep(Duration::from_millis(100)); log::info!("stopping scheduler via control"); scheduler.stop(); thread::sleep(Duration::from_millis(100)); } #[derive(Debug, MachineImpl)] pub enum TestMessage { Test, } struct Alice {} impl Machine<TestMessage> for Alice { fn receive(&self, _message: TestMessage) {} } #[allow(clippy::type_complexity)] pub fn build_machine<T, P>( machine: T, ) -> ( Arc<T>, Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>, MachineAdapter, ) where T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>, P: MachineImpl, <P as MachineImpl>::Adapter: MachineBuilder, { let channel_max = get_default_channel_capacity(); let (machine, sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_raw(machine, channel_max); (machine, sender, collective_adapter) } #[test] fn test_scheduler() { let (monitor_sender, _monitor_receiver) = crossbeam::channel::unbounded::<MonitorMessage>(); le
ter)); senders.push(sender); machines.push(alice); sched_sender.send(SchedCmd::New(adapter)).unwrap(); } let s = &senders[2]; s.send(TestMessage::Test).unwrap(); std::thread::sleep(std::time::Duration::from_millis(500)); sched_sender.send(SchedCmd::Stop).unwrap(); if let Some(thread) = thread { thread.join().unwrap(); } } }
t (sched_sender, sched_receiver) = crossbeam::channel::unbounded::<SchedCmd>(); let run_queue = new_executor_injector(); let wait_queue = Arc::new(deque::Injector::<SchedTask>::new()); let thread = SchedulerThread::spawn(sched_receiver, monitor_sender, (run_queue, wait_queue)); std::thread::sleep(std::time::Duration::from_millis(10)); let mut senders: Vec<Sender<TestMessage>> = Vec::new(); let mut machines: Vec<Arc<Alice>> = Vec::new(); for _ in 1 ..= 5 { let alice = Alice {}; let (alice, mut sender, adapter) = build_machine(alice); let adapter = Arc::new(adapter); sender.bind(Arc::clone(&adap
function_block-random_span
[ { "content": "type RecvCmdFn = Box<dyn Fn(&ShareableMachine, bool, Duration, &mut ExecutorStats) + Send + Sync + 'static>;\n\n\n\n#[derive(Debug)]\n\npub struct DefaultMachineDependentAdapter {}\n\nimpl MachineDependentAdapter for DefaultMachineDependentAdapter {\n\n fn receive_cmd(&self, _machine: &ShareableMachine, _once: bool, _time_slice: Duration, _stats: &mut ExecutorStats) {}\n\n // determine if channel is empty\n\n fn is_channel_empty(&self) -> bool { true }\n\n // get the number of elements in the channel\n\n fn channel_len(&self) -> usize { 0 }\n\n}\n\n\n\n// The MachineAdapter is the model for Machine in the Collective\n\n#[doc(hidden)]\n\n#[derive(SmartDefault)]\n\npub struct MachineAdapter {\n\n // The id is assigned on creation, and is intended for to be used in logging\n\n id: Uuid,\n\n\n\n // The once flag, used for signalling connected.\n", "file_path": "d3-core/src/tls/collective.rs", "rank": 0, "score": 415810.58594121767 }, { "content": "#[inline]\n\npub fn send_cmd<T>(sender: &Sender<T>, cmd: T)\n\nwhere\n\n T: MachineImpl + MachineImpl<InstructionSet = T> + std::fmt::Debug,\n\n{\n\n if let Err(e) = sender.send(cmd) {\n\n log::info!(\"failed to send instruction: {}\", e);\n\n }\n\n}\n", "file_path": "d3-core/src/collective/machine.rs", "rank": 1, "score": 328368.1360843981 }, { "content": "pub fn schedule_machine(machine: ShareableMachine, run_queue: &ExecutorInjector) { schedule_task(machine, run_queue) }\n", "file_path": "d3-core/src/tls/task.rs", "rank": 2, "score": 310859.3836794216 }, { "content": "/// The and_connect method adds an additional instruction set and communication channel to the machine.\n\n/// The communicate channel ib bound to a default size.\n\npub fn and_connect<T, P>(machine: &Arc<T>) -> Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let channel_max = default_channel_max.load();\n\n let (mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_addition(machine, channel_max);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n sender\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 3, "score": 303889.09209309553 }, { "content": "/// The and_connect_unbounded method adds an additional instruction set and sender to the machine.\n\n/// The communication channel is unbound.\n\npub fn and_connect_unbounded<T, P>(machine: &Arc<T>) -> Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let (mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_addition_unbounded(machine);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n sender\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 4, "score": 300520.99324916716 }, { "content": "type ChannelEmptyFn = Box<dyn Fn() -> bool + Send + Sync + 'static>;\n", "file_path": "d3-core/src/tls/collective.rs", "rank": 5, "score": 299762.1543376746 }, { "content": "#[allow(dead_code)]\n\npub fn set_default_channel_capacity(new: usize) { default_channel_max.store(new); }\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 6, "score": 298980.3565295844 }, { "content": "/// The connect method creates a machine, implementing an instruction set.\n\n/// The machine has a bound communication channel of a default size receiving those instructions.\n\npub fn connect<T, P>(machine: T) -> (Arc<T>, Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>)\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let channel_max = default_channel_max.load();\n\n let (machine, mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_raw(machine, channel_max);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n (machine, sender)\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 8, "score": 297743.6854007236 }, { "content": "/// The connect_unbounded method creates a machine with an unbounded queue. It can result\n\n/// in a panic if system resources become exhausted.\n\npub fn connect_unbounded<T, P>(machine: T) -> (Arc<T>, Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>)\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let (machine, mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_unbounded(machine);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n (machine, sender)\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 9, "score": 294481.40117108927 }, { "content": "/// create the factory for creating and starting the scheduler\n\npub fn create_sched_factory() -> impl SchedulerFactory { Factory::new() }\n\n\n", "file_path": "d3-core/src/scheduler/sched_factory.rs", "rank": 10, "score": 292638.37834232557 }, { "content": "/// The remove_core_stats_sender function removes a sender from the list of senders receiving\n\n/// core statistic updates.\n\npub fn remove_core_stats_sender(sender: CoreStatsSender) { Server::remove_core_stats_sender(sender); }\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 11, "score": 281517.68929430255 }, { "content": "type ChannelLenFn = Box<dyn Fn() -> usize + Send + Sync + 'static>;\n", "file_path": "d3-core/src/tls/collective.rs", "rank": 12, "score": 277598.7392309044 }, { "content": "#[doc(hidden)]\n\npub trait MachineImpl: 'static + Send + Sync {\n\n type Adapter;\n\n type SenderAdapter;\n\n type InstructionSet: Send + Sync;\n\n\n\n // Park a sender. If the sender can't be parked, the instruction\n\n // is returned.\n\n fn park_sender(\n\n channel_id: usize, receiver_machine: Arc<self::tls::collective::MachineAdapter>,\n\n sender: crossbeam::channel::Sender<Self::InstructionSet>, instruction: Self::InstructionSet,\n\n ) -> Result<(), Self::InstructionSet>;\n\n}\n\n\n", "file_path": "d3-core/src/foundation/machine.rs", "rank": 13, "score": 275917.3165447551 }, { "content": "fn schedule_task(machine: ShareableMachine, run_queue: &ExecutorInjector) {\n\n RUN_QUEUE_LEN.fetch_add(1, Ordering::SeqCst);\n\n run_queue.push(machine);\n\n if EXECUTORS_SNOOZING.load(Ordering::SeqCst) != 0 {\n\n Server::wake_executor_threads();\n\n }\n\n}\n\n\n", "file_path": "d3-core/src/tls/task.rs", "rank": 15, "score": 254295.23639821843 }, { "content": "/// The add_core_stats_sender function adds a sender to the list of senders receiving\n\n/// core statistic updates.\n\npub fn add_core_stats_sender(sender: CoreStatsSender) { Server::add_core_stats_sender(sender); }\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 17, "score": 243549.11884935305 }, { "content": "/// The connect_with_capacity method creates a machine with a bounded queue of the specified size.\n\npub fn connect_with_capacity<T, P>(\n\n machine: T, capacity: usize,\n\n) -> (Arc<T>, Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>)\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let (machine, mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_raw(machine, capacity);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n (machine, sender)\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 18, "score": 242865.9645788856 }, { "content": "/// The and_connect_with_capacity method adds an additional instruction set and sender to the machine.\n\n/// The communication channel is bound to the specified size.\n\npub fn and_connect_with_capacity<T, P>(\n\n machine: &Arc<T>, capacity: usize,\n\n) -> Sender<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>\n\nwhere\n\n T: 'static + Machine<P> + Machine<<<P as MachineImpl>::Adapter as MachineBuilder>::InstructionSet>,\n\n P: MachineImpl,\n\n <P as MachineImpl>::Adapter: MachineBuilder,\n\n{\n\n let (mut sender, collective_adapter) = <<P as MachineImpl>::Adapter as MachineBuilder>::build_addition(machine, capacity);\n\n bind_and_assign(collective_adapter, &mut sender);\n\n sender\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 19, "score": 242865.29628756724 }, { "content": "// bind the adapter to the sender and assign the adapter to the collective\n\nfn bind_and_assign<T>(adapter: MachineAdapter, sender: &mut Sender<T>)\n\nwhere\n\n T: MachineImpl,\n\n{\n\n let adapter = Arc::new(adapter);\n\n sender.bind(Arc::clone(&adapter));\n\n Server::assign_machine(adapter);\n\n}\n\n\n\n/// CHANNEL_MAX is the default size for bound communication channels.\n\npub const CHANNEL_MAX: usize = 250;\n\n\n\n#[allow(dead_code)]\n\n#[allow(non_upper_case_globals)]\n\n/// The default_channel_max static is the default used for creating bound channels.\n\npub static default_channel_max: AtomicCell<usize> = AtomicCell::new(CHANNEL_MAX);\n\n/// The get_default_channel_capacity function returns the default value.\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 20, "score": 242746.64445372412 }, { "content": "// wait for a notification to arrive, indicating that a run iteration completed\n\npub fn wait_for_notification(receiver: &TestMessageReceiver, messages: usize, duration: Duration) -> Result<(), ()> {\n\n let start = Instant::now();\n\n if let Ok(m) = receiver.recv_timeout(duration) {\n\n assert_eq!(m, TestMessage::TestData(messages));\n\n Ok(())\n\n } else {\n\n log::error!(\"wait_for_notification failed, started at {:#?}\", start);\n\n log::error!(\"run queue len {}\", executor::get_run_queue_len());\n\n d3::core::executor::stats::request_stats_now();\n\n d3::core::executor::stats::request_machine_info();\n\n thread::sleep(Duration::from_millis(100));\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "d3-test-drivers/src/common.rs", "rank": 21, "score": 241954.1437993548 }, { "content": "#[allow(dead_code)]\n\npub fn get_default_channel_capacity() -> usize { default_channel_max.load() }\n\n/// The set_default_channel_capacity function sets a new default value.\n\n/// setting should be performed before starting the server.\n", "file_path": "d3-core/src/scheduler/machine.rs", "rank": 22, "score": 238692.72245062486 }, { "content": "/// Create a channel with a fixed capacity.\n\npub fn channel_with_capacity<T>(capacity: usize) -> (Sender<T>, Receiver<T>)\n\nwhere\n\n T: MachineImpl,\n\n{\n\n let (s, r) = crossbeam::channel::bounded::<T>(capacity);\n\n wrap(s, r)\n\n}\n\n\n", "file_path": "d3-core/src/channel/machine_channel.rs", "rank": 24, "score": 234978.23095634495 }, { "content": "pub fn get_run_queue_len() -> usize { RUN_QUEUE_LEN.load(Ordering::SeqCst) }\n\n/// The EXECUTORS_SNOOZING static is the current number of executors that are idle, it is considered read-only.\n\npub static EXECUTORS_SNOOZING: AtomicUsize = AtomicUsize::new(0);\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 25, "score": 233826.30687519436 }, { "content": "#[allow(dead_code, non_upper_case_globals)]\n\npub fn set_executor_count(new: usize) { executor_count.store(new); }\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use simplelog::*;\n\n use std::panic;\n\n\n\n // common function for wrapping a test with setup/teardown logic\n\n pub fn run_test<T>(test: T)\n\n where\n\n T: FnOnce() + panic::UnwindSafe,\n\n {\n\n // install a simple logger\n\n if CombinedLogger::init(vec![TermLogger::new(LevelFilter::Error, Config::default(), TerminalMode::Mixed)]).is_err() {}\n\n setup();\n\n\n\n let result = panic::catch_unwind(|| test());\n\n\n\n teardown();\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 27, "score": 232837.44001225207 }, { "content": "/// The set_time_slice function sets the current timeslice value. This should be\n\n/// performed before starting the server.\n\npub fn set_time_slice(new: std::time::Duration) { TIMESLICE_IN_MILLIS.store(new.as_millis() as usize) }\n\n\n\n/// The RUN_QUEUE_LEN static is the current length of the run queue, it is considered read-only..\n\npub static RUN_QUEUE_LEN: AtomicUsize = AtomicUsize::new(0);\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 28, "score": 232320.70210634836 }, { "content": "pub fn new_executor_injector() -> ExecutorInjector { Arc::new(deque::Injector::<ExecutorTask>::new()) }\n", "file_path": "d3-core/src/tls/task.rs", "rank": 29, "score": 227663.84560790574 }, { "content": "/// Request machine_info will request the scheduler to send machine information\n\npub fn request_machine_info() { Server::request_machine_info(); }\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 30, "score": 221693.48401226502 }, { "content": "/// Create a channel with an unlimited capacity. This should be\n\n/// used with caution, as it can cause a panic when sending.\n\npub fn channel<T>() -> (Sender<T>, Receiver<T>)\n\nwhere\n\n T: MachineImpl,\n\n{\n\n let (s, r) = crossbeam::channel::unbounded::<T>();\n\n wrap(s, r)\n\n}\n\n\n", "file_path": "d3-core/src/channel/machine_channel.rs", "rank": 31, "score": 220846.46721419733 }, { "content": "// The controller for the system monitor.\n\npub trait MonitorControl: Send + Sync {\n\n /// stop the system monitor\n\n fn stop(&self);\n\n /// add a stats sender to the system monitor\n\n fn add_sender(&self, sender: CoreStatsSender);\n\n /// remove a stats sender to the system monitor\n\n fn remove_sender(&self, sender: CoreStatsSender);\n\n}\n\npub type MonitorControlObj = Arc<dyn MonitorControl>;\n\n\n", "file_path": "d3-core/src/scheduler/traits.rs", "rank": 32, "score": 220443.35628085374 }, { "content": "// wait for the machine count to be increased to the provided count\n\npub fn wait_for_machine_setup(machine_count: usize) -> Result<(), ()> {\n\n let start = Instant::now();\n\n while start.elapsed() < Duration::from_secs(5) {\n\n thread::sleep(Duration::from_nanos(50));\n\n if executor::get_machine_count() >= machine_count {\n\n return Ok(());\n\n }\n\n }\n\n if log_enabled!(log::Level::Error) {\n\n log::error!(\n\n \"wait_for_machine_setup failed: count={}, expecting={}\",\n\n executor::get_machine_count(),\n\n machine_count\n\n );\n\n } else {\n\n println!(\n\n \"wait_for_machine_setup failed: count={}, expecting={}\",\n\n executor::get_machine_count(),\n\n machine_count\n\n );\n\n }\n\n Err(())\n\n}\n\n\n", "file_path": "d3-test-drivers/src/common.rs", "rank": 35, "score": 212083.20043395815 }, { "content": "pub fn wrap_sender<T>(sender: crossbeam::channel::Sender<T>, channel_id: usize, connection: ThreadSafeConnection) -> Sender<T>\n\nwhere\n\n T: MachineImpl,\n\n{\n\n log::trace!(\"creating sender {}\", channel_id);\n\n Sender::<T> {\n\n channel_id,\n\n clone_count: Arc::new(AtomicUsize::new(0)),\n\n connection,\n\n sender,\n\n receiver_machine: Arc::new(MachineAdapter::default()),\n\n }\n\n}\n", "file_path": "d3-core/src/channel/sender.rs", "rank": 36, "score": 211639.2258088545 }, { "content": "// wait for the machine count to be reduced to the provided count\n\npub fn wait_for_machine_teardown(test_name: &str, machine_count: usize) -> Result<(), ()> {\n\n let start = Instant::now();\n\n while start.elapsed() < Duration::from_secs(10) {\n\n thread::sleep(Duration::from_nanos(50));\n\n if executor::get_machine_count() <= machine_count {\n\n return Ok(());\n\n }\n\n }\n\n if log_enabled!(log::Level::Error) {\n\n log::error!(\n\n \"wait_for_machine_teardown failed: for {} count={}, expecting={}\",\n\n test_name,\n\n executor::get_machine_count(),\n\n machine_count\n\n );\n\n } else {\n\n println!(\n\n \"wait_for_machine_teardown failed: count={}, expecting={}\",\n\n executor::get_machine_count(),\n\n machine_count\n\n );\n\n }\n\n Err(())\n\n}\n", "file_path": "d3-test-drivers/src/common.rs", "rank": 37, "score": 208161.06319166682 }, { "content": "// attempt state transition\n\nfn wait_for_ownership(curr: ServerState, new: ServerState, duration: Duration) -> Result<(), ()> {\n\n let start = Instant::now();\n\n while start.elapsed() < duration {\n\n if curr == server_state.compare_and_swap(curr, new) {\n\n return Ok(());\n\n }\n\n thread::sleep(Duration::from_nanos(50));\n\n }\n\n Err(())\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 38, "score": 207173.49771221302 }, { "content": "/// Take the setting from the variant and turn them into a concrete stuct that we can\n\n/// pass around. Probably could do this with a trait...\n\npub fn run(settings: &settings::Settings) {\n\n log::info!(\"running forwarder\");\n\n // let things settle down before diving in...\n\n std::thread::sleep(std::time::Duration::from_millis(750));\n\n // pull the forwarder info out of the additoanl hash map\n\n //\n\n for a in &settings.additional {\n\n if let Some(v) = a.get(&settings::Additional::Forwarder) {\n\n // v is a variant in AdditionalVariant, need to extract things info the Forwarder\n\n let f = match v.clone() {\n\n settings::AdditionalVariant::Forwarder {\n\n run,\n\n default,\n\n daisy_chain,\n\n fanout_fanin,\n\n chaos_monkey,\n\n } => ForwarderSettings {\n\n run,\n\n default,\n\n daisy_chain,\n", "file_path": "examples/test-server/src/forwarder.rs", "rank": 39, "score": 205163.7013177912 }, { "content": "/// The start_server function starts the server, putting it in a state where it can create machines\n\n/// that are connected to the collective.\n\npub fn start_server() {\n\n log::info!(\"starting server\");\n\n // tests sometimes run in parallel, so we wait\n\n let res = wait_for_ownership(ServerState::Stopped, ServerState::Initializing, Duration::from_secs(5));\n\n if res.is_err() {\n\n log::error!(\"force stopping server, current state is {:#?}\", server_state.load());\n\n stop_server();\n\n }\n\n log::info!(\"aquired server\");\n\n reset_core();\n\n if get_executor_count() == 0 {\n\n let num = num_cpus::get();\n\n // Give them all to the executor, everything else is low-cost overhead\n\n set_executor_count(num);\n\n log::info!(\"setting executor count to {}\", num);\n\n }\n\n let monitor_factory = SystemMonitorFactory::new();\n\n let executor_factory = SystemExecutorFactory::new();\n\n let scheduler_factory = sched_factory::create_sched_factory();\n\n executor_factory.with_workers(get_executor_count());\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 40, "score": 191409.6739038804 }, { "content": "pub fn configure(settings: &Settings, components: &[ComponentInfo]) -> Result<Option<ComponentSender>, ComponentError> {\n\n // ensure our service is configure in services\n\n if !settings.services.contains(\"MonitorService\") {\n\n log::debug!(\"monitor service is not configured\");\n\n return Ok(None);\n\n }\n\n // find ourself in the list\n\n for c in &settings.coordinator {\n\n if let Some(value) = c.get(\"MonitorCoordinator\") {\n\n let maybe_coordinator = match value {\n\n CoordinatorVariant::SimpleTcpConfig { tcp_address, .. } => Some(MonitorCoordinator {\n\n net_sender: get_network_sender(),\n\n bind_addr: tcp_address.to_string(),\n\n components: components.to_owned(),\n\n mutable: Arc::new(Mutex::new(MutableData::default())),\n\n }),\n\n #[allow(unreachable_patterns)] // in case it becomes reachable, we want to know\n\n _ => None,\n\n };\n\n if let Some(coordinator) = maybe_coordinator {\n", "file_path": "examples/monitor-service/src/monitor.rs", "rank": 41, "score": 187758.11702344616 }, { "content": "// The ugliest part of a coordinator is the configuration parsing.\n\npub fn configure(settings: &Settings, _components: &[ComponentInfo]) -> Result<Option<ComponentSender>, ComponentError> {\n\n // ensure our service is configure in services\n\n if !settings.services.contains(\"AliceService\") {\n\n log::debug!(\"alice service is not configured\");\n\n return Ok(None);\n\n }\n\n // find ourself in the list\n\n for c in &settings.coordinator {\n\n if let Some(value) = c.get(\"AliceCoordinator\") {\n\n let maybe_coordinator = match value {\n\n CoordinatorVariant::SimpleTcpConfig { tcp_address, .. } => Some(AliceCoordinator {\n\n net_sender: get_network_sender(),\n\n bind_addr: tcp_address.to_string(),\n\n }),\n\n #[allow(unreachable_patterns)] // in case it becomes reachable, we want to know\n\n _ => None,\n\n };\n\n\n\n if let Some(coordinator) = maybe_coordinator {\n\n if coordinator.bind_addr.parse::<SocketAddr>().is_err() {\n", "file_path": "examples/alice-service/src/alice.rs", "rank": 42, "score": 187709.99254086157 }, { "content": "pub trait ExecutorControl: Send + Sync {\n\n /// notifies the executor that an executor is parked\n\n fn parked_executor(&self, id: usize);\n\n /// Wake parked threads\n\n fn wake_parked_threads(&self);\n\n /// notifies the executor that an executor completed and can be joined\n\n fn joinable_executor(&self, id: usize);\n\n /// get run_queue\n\n fn get_run_queue(&self) -> ExecutorInjector;\n\n /// stop the executor\n\n fn stop(&self);\n\n /// request stats from the executors\n\n fn request_stats(&self);\n\n}\n\npub type ExecutorControlObj = Arc<dyn ExecutorControl>;\n\n\n\n/// The schdeuler and executor commands\n\n#[allow(dead_code)]\n\n#[derive(Debug)]\n\npub enum SchedCmd {\n", "file_path": "d3-core/src/scheduler/traits.rs", "rank": 43, "score": 186158.880559217 }, { "content": "// The controller for the network.\n\npub trait NetworkControl: Send + Sync {\n\n /// stop the network\n\n fn stop(&self);\n\n}\n\n/// The trait object for the network controller\n\npub type NetworkControlObj = Arc<dyn NetworkControl>;\n", "file_path": "d3-components/src/network_start_stop.rs", "rank": 44, "score": 184181.5837685204 }, { "content": "/// Request stats will request the subcomponents to send their stats now, rather than waiting\n\n/// for their periodic sending.\n\npub fn request_stats_now() { Server::request_stats(); }\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 45, "score": 183085.62830989686 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n setup();\n\n let mut group = c.benchmark_group(\"sched_exec_tests\");\n\n // try to limit the length of test runs\n\n // group.significance_level(0.1).sample_size(10).measurement_time(Duration::from_secs(30));\n\n // group.significance_level(0.1).sample_size(10);\n\n group.significance_level(0.1);\n\n\n\n log::info!(\"create_destroy_2000_machines: setup complete\");\n\n group.bench_function(\"create_destroy_2000_machines\", |b| b.iter(create_destroy_2000_machines));\n\n log::info!(\"create_destroy_2000_machines: tear-down complete\");\n\n\n\n let mut send_cmd = DaisyChainDriver::default();\n\n send_cmd.machine_count = 1;\n\n send_cmd.message_count = 200;\n\n send_cmd.duration = Duration::from_secs(30);\n\n send_cmd.setup();\n\n group.bench_function(\"send 200 cmds\", |b| b.iter(|| send_cmd.run()));\n\n DaisyChainDriver::teardown(send_cmd);\n\n\n", "file_path": "benches/forwarder.rs", "rank": 46, "score": 182508.3406921575 }, { "content": "pub fn wrap_receiver<T>(receiver: crossbeam::channel::Receiver<T>, channel_id: usize, connection: ThreadSafeConnection) -> Receiver<T>\n\nwhere\n\n T: MachineImpl,\n\n{\n\n Receiver::<T> {\n\n channel_id,\n\n connection,\n\n receiver,\n\n }\n\n}\n", "file_path": "d3-core/src/channel/receiver.rs", "rank": 47, "score": 181828.29906515748 }, { "content": "type TestMessageSender = Sender<TestMessage>;\n", "file_path": "d3-test-drivers/src/lib.rs", "rank": 48, "score": 181413.87478602838 }, { "content": "/// Obtain the network's sender. The returned sender is a clone,\n\n/// that you are free to use, further clone, or drop. Tip: cache\n\n/// the result and clone it when you need to send it.\n\npub fn get_network_sender() -> NetSender { Network::get_sender() }\n\n\n", "file_path": "d3-components/src/network_start_stop.rs", "rank": 49, "score": 181176.6465275828 }, { "content": "fn wrap<T>(sender: crossbeam::channel::Sender<T>, receiver: crossbeam::channel::Receiver<T>) -> (Sender<T>, Receiver<T>)\n\nwhere\n\n T: MachineImpl,\n\n{\n\n let channel_id = CHANNEL_ID.fetch_add(1, Ordering::SeqCst);\n\n let (sc, rc) = Connection::new();\n\n (\n\n wrap_sender::<T>(sender, channel_id, sc),\n\n wrap_receiver::<T>(receiver, channel_id, rc),\n\n )\n\n}\n", "file_path": "d3-core/src/channel/machine_channel.rs", "rank": 50, "score": 180837.69441439828 }, { "content": "#[proc_macro_derive(MachineImpl)]\n\npub fn derive_machine_impl_fn(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let name = &input.ident;\n\n let adapter_ident = format_ident!(\"MachineBuilder{}\", name.to_string());\n\n let sender_adapter_ident = format_ident!(\"SenderAdapter{}\", name.to_string());\n\n // let recv_wait_ident = format_ident!(\"RecvWait{}\", name.to_string());\n\n let expanded = quote! {\n\n impl MachineImpl for #name {\n\n type Adapter = #adapter_ident;\n\n type SenderAdapter = #sender_adapter_ident;\n\n type InstructionSet = #name;\n\n fn park_sender(\n\n channel_id: usize,\n\n receiver_machine: std::sync::Arc<MachineAdapter>,\n\n sender: crossbeam::channel::Sender<Self::InstructionSet>,\n\n instruction: Self::InstructionSet) -> Result<(),Self::InstructionSet> {\n\n //Err(instruction)\n\n tls_executor_data.with(|t|{\n\n let mut tls = t.borrow_mut();\n\n // if its the main thread, let it block.\n", "file_path": "d3-derive/src/lib.rs", "rank": 51, "score": 178270.5465028309 }, { "content": "pub fn new_executor_worker() -> ExecutorWorker { ExecutorWorker::new_fifo() }\n\n\n\npub static TASK_ID: AtomicUsize = AtomicUsize::new(1);\n\n\n", "file_path": "d3-core/src/tls/task.rs", "rank": 52, "score": 176252.93906981187 }, { "content": "type MonitorReceiver = crossbeam::channel::Receiver<MonitorMessage>;\n\n\n\n// The factory for creating the system monitor.\n\npub struct SystemMonitorFactory {\n\n sender: MonitorSender,\n\n receiver: MonitorReceiver,\n\n}\n\nimpl SystemMonitorFactory {\n\n #[allow(clippy::new_ret_no_self)]\n\n pub fn new() -> MonitorFactoryObj {\n\n let (sender, receiver) = crossbeam::channel::unbounded::<MonitorMessage>();\n\n Arc::new(Self { sender, receiver })\n\n }\n\n}\n\n\n\nimpl MonitorFactory for SystemMonitorFactory {\n\n fn get_sender(&self) -> MonitorSender { self.sender.clone() }\n\n fn start(&self, executor: ExecutorControlObj) -> MonitorControlObj {\n\n SystemMonitor::start(self.sender.clone(), self.receiver.clone(), executor)\n\n }\n", "file_path": "d3-core/src/scheduler/overwatch.rs", "rank": 53, "score": 175738.86692428653 }, { "content": "#[doc(hidden)]\n\npub trait MachineDependentAdapter: Send + Sync + fmt::Debug {\n\n // Deliver the instruction into the machine.\n\n fn receive_cmd(&self, machine: &ShareableMachine, once: bool, time_slice: Duration, stats: &mut ExecutorStats);\n\n // determine if channel is empty\n\n fn is_channel_empty(&self) -> bool;\n\n // get the number of elements in the channel\n\n fn channel_len(&self) -> usize;\n\n}\n", "file_path": "d3-core/src/tls/collective.rs", "rank": 54, "score": 172284.9627843539 }, { "content": "pub fn set_cells(cells: &[LifeSender], alive: &[(usize, usize)]) {\n\n for (row, col) in alive.iter().cloned() {\n\n let idx = row * GRID_SIZE + col;\n\n log::debug!(\"sending SetAlive({}) to id={} rc=({}, {})\", true, idx, row, col);\n\n cells[idx].send(LifeCmd::SetState(CellState::Alive)).unwrap();\n\n }\n\n sync();\n\n // publish changes to neighbors\n\n cells.iter().for_each(|s| s.send(LifeCmd::Tock).unwrap());\n\n sync();\n\n}\n\n\n", "file_path": "examples/d3-life/src/life_cell.rs", "rank": 55, "score": 171326.78507130133 }, { "content": "// The trait that allows the executor to perform notifications\n\npub trait ExecutorNotifier: Send + Sync + 'static {\n\n // Send a notificiation that the executor is parked\n\n fn notify_parked(&self, executor_id: usize);\n\n // Send a notification that a parked sender is no long parked, and can be scheduled\n\n fn notify_can_schedule_sender(&self, machine_key: usize);\n\n // Send a notification that a parked sender's receiver may need to be scheduled\n\n fn notify_can_schedule_receiver(&self, machine_key: usize);\n\n}\n\npub type ExecutorNotifierObj = std::sync::Arc<dyn ExecutorNotifier>;\n\n\n\nthread_local! {\n\n #[doc(hidden)]\n\n #[allow(non_upper_case_globals)]\n\n pub static tls_executor_data: RefCell<ExecutorData> = RefCell::new(ExecutorData::default());\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[allow(unused_imports)] use super::*;\n\n}\n", "file_path": "d3-core/src/tls/tls_executor.rs", "rank": 56, "score": 171207.06509918408 }, { "content": "#[allow(dead_code, non_upper_case_globals)]\n\npub fn get_executor_count() -> usize { executor_count.load() }\n\n\n\n/// The set_executor_count sets the number of executor threads.\n\n/// This should be performed prior to starting the server.\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 57, "score": 169790.22038354885 }, { "content": "// initialize cells and start the server\n\npub fn init_cells() -> (GraphicsReceiver, Vec<LifeSender>) {\n\n // configure the core and get it running\n\n executor::set_default_channel_capacity(10);\n\n executor::set_machine_count_estimate(GRID_SIZE + 10);\n\n executor::start_server();\n\n\n\n let (graphics_sender, graphics_receiver) = machine_impl::channel::<GraphicsCmd>();\n\n let mut cells: Vec<LifeSender> = Vec::with_capacity(GRID_SIZE * GRID_SIZE);\n\n\n\n // create all of the machines\n\n for idx in 0 .. GRID_SIZE * GRID_SIZE {\n\n let row = (idx / GRID_SIZE) as u32;\n\n let col = (idx % GRID_SIZE) as u32;\n\n let rect = Rect::new(\n\n (BORDER_SIZE + (RECT_SIZE + 1) * col) as i32,\n\n (BORDER_SIZE + (RECT_SIZE + 1) * row) as i32,\n\n RECT_SIZE,\n\n RECT_SIZE,\n\n );\n\n let (_, s) = executor::connect_unbounded(LifeCell::new(idx, graphics_sender.clone(), rect));\n", "file_path": "examples/d3-life/src/life_cell.rs", "rank": 58, "score": 168072.02423568256 }, { "content": "/// Start the network. Starting does not perform any network bindings, it\n\n/// prepares the network to accept bindings.\n\npub fn start_network() {\n\n network_state.store(NetworkState::Initializing);\n\n let network_factory = new_network_factory();\n\n let network_sender = network_factory.get_sender();\n\n let network_control = network_factory.start();\n\n let mut s = network.borrow_mut();\n\n s.network_control = NetworkField::Network(network_control);\n\n s.network_sender = NetworkField::NetworkSender(network_sender);\n\n network_state.store(NetworkState::Running);\n\n}\n\n\n", "file_path": "d3-components/src/network_start_stop.rs", "rank": 59, "score": 167408.1163921854 }, { "content": "#[enum_dispatch(SchedulerEnum)]\n\npub trait Scheduler: Send + Sync {\n\n /// assigns a new machine, making it eligable for scheduling and running\n\n fn assign_machine(&self, machine: ShareableMachine);\n\n /// request stats from the scheduler\n\n fn request_stats(&self);\n\n /// request machine info\n\n fn request_machine_info(&self);\n\n /// stop the scheduler\n\n fn stop(&self);\n\n}\n\n\n\n#[enum_dispatch]\n\npub enum SchedulerEnum {\n\n DefaultScheduler,\n\n}\n", "file_path": "d3-core/src/scheduler/traits.rs", "rank": 60, "score": 163830.207828698 }, { "content": "/// The machine is the common trait all machines must implement\n\n/// and describes how instuctions are delivered to a machine, via\n\n/// the receive method.\n\npub trait Machine<T>: Send + Sync\n\nwhere\n\n T: 'static + Send + Sync,\n\n{\n\n /// The receive method receives instructions sent to it by itself or other machines.\n\n fn receive(&self, cmd: T);\n\n /// The disconnected method is called to notify the machine that its receiver has become\n\n /// disconnect; it will no longer receive instructions.\n\n /// This could be a result of server shutdown, or all senders dropping their senders.\n\n fn disconnected(&self) {}\n\n /// The connected method is called once, before receive messages. It provides a notification that the\n\n /// machine has become connected and may receive instructions. It includes a Uuid for the machine,\n\n /// which may be used in logging. A machine implementing several instruction sets will receive a differnt\n\n /// Uuid for each instruction set implemented.\n\n fn connected(&self, _uuid: Uuid) {}\n\n}\n\n\n\n// Adding the machine implementation to Mutex\n\n// makes it possible to hide the underlying wrapping.\n\nimpl<T, P> Machine<P> for Mutex<T>\n", "file_path": "d3-core/src/foundation/machine.rs", "rank": 61, "score": 159755.2476700701 }, { "content": "pub fn get_executors_snoozing() -> usize { EXECUTORS_SNOOZING.load(Ordering::SeqCst) }\n\n\n\n// Unlike most of the system, which uses u128 ids, the executor uses usize. If atomic u128 were\n\n// available, it would likely use u128 as well. The decision to use atomic is based upon this\n\n// being the place where threads are used, including outside threads, such as the system monitor.\n\n\n\n// The factory for the executor\n\npub struct SystemExecutorFactory {\n\n workers: RefCell<usize>,\n\n run_queue: ExecutorInjector,\n\n wait_queue: SchedTaskInjector,\n\n}\n\nimpl SystemExecutorFactory {\n\n // expose the factory as a trait object.\n\n #[allow(clippy::new_ret_no_self)]\n\n pub fn new() -> ExecutorFactoryObj {\n\n Arc::new(Self {\n\n workers: RefCell::new(4),\n\n run_queue: new_executor_injector(),\n\n wait_queue: Arc::new(deque::Injector::<SchedTask>::new()),\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 62, "score": 159608.13450078265 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, SmartDefault)]\n\nenum AliceState {\n\n #[default]\n\n SendForm,\n\n Init,\n\n Start,\n\n Stop,\n\n Bye,\n\n}\n\n\n\n// The Alice structure is created each time there is a new connection.\n", "file_path": "examples/alice-service/src/alice.rs", "rank": 63, "score": 157929.77348565785 }, { "content": "/// Stop the network. Stopping will close all network connections and\n\n/// listeners and no further network activity is allowed.\n\npub fn stop_network() {\n\n network_state.store(NetworkState::Stopping);\n\n if let NetworkField::Network(network_control) = &(network.borrow()).network_control {\n\n network_control.stop()\n\n }\n\n\n\n let mut s = network.borrow_mut();\n\n s.network_control = NetworkField::Uninitialized;\n\n s.network_sender = NetworkField::Uninitialized;\n\n network_state.store(NetworkState::Stopped);\n\n // give machines a chance to react to network shutdown\n\n thread::sleep(Duration::from_millis(100));\n\n}\n\n\n", "file_path": "d3-components/src/network_start_stop.rs", "rank": 64, "score": 157507.24210107088 }, { "content": "/// The stop_server function stops the server, releasing all resources.\n\npub fn stop_server() {\n\n log::info!(\"stopping server\");\n\n let state = server_state.compare_and_swap(ServerState::Running, ServerState::Stopping);\n\n if state != ServerState::Running {\n\n return;\n\n }\n\n // borrow the server to stop the subsystems, drop it before the borrow_mut()\n\n let borrow = server.borrow();\n\n if let ServerField::Executor(executor) = &borrow.executor {\n\n executor.stop();\n\n // give the executor some time to stop threads.\n\n thread::sleep(Duration::from_millis(20));\n\n }\n\n if let ServerField::Scheduler(scheduler) = &borrow.scheduler {\n\n scheduler.stop()\n\n }\n\n if let ServerField::Monitor(monitor) = &borrow.monitor {\n\n monitor.stop()\n\n }\n\n drop(borrow);\n\n\n\n let mut s = server.borrow_mut();\n\n s.scheduler = ServerField::Uninitialized;\n\n s.executor = ServerField::Uninitialized;\n\n s.monitor = ServerField::Uninitialized;\n\n\n\n server_state.store(ServerState::Stopped);\n\n log::info!(\"server is now stopped\");\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 65, "score": 156250.65008186072 }, { "content": "pub fn configure(settings: &settings::Settings, components: &[ComponentInfo]) -> Result<Vec<CoordinatorInfo>, ComponentError> {\n\n let mut active_coordinators: Vec<CoordinatorInfo> = Vec::new();\n\n for c in &settings.coordinator {\n\n // c is the coordinator HashMap\n\n for k in c.keys() {\n\n let result = match k.as_str() {\n\n \"EchoCoordinator\" => echo_coordinator::configure(settings, components),\n\n \"ChatCoordinator\" => chat_coordinator::configure(settings, components),\n\n \"MonitorCoordinator\" => monitor::configure(settings, components),\n\n \"AliceCoordinator\" => alice::configure(settings, components),\n\n \"UdpEchoCoordinator\" => udp_echo_coordinator::configure(settings, components),\n\n _ => {\n\n log::warn!(\"unhandled {:#?} coordintor configuration\", k);\n\n Ok(None)\n\n },\n\n };\n\n if let Err(e) = result {\n\n return Err(e);\n\n }\n\n if let Ok(Some(sender)) = result {\n\n active_coordinators.push(CoordinatorInfo::new(k.clone(), sender))\n\n }\n\n }\n\n }\n\n Ok(active_coordinators)\n\n}\n", "file_path": "examples/test-server/src/coordinators.rs", "rank": 66, "score": 154818.37382806197 }, { "content": "/// configure enabled components and return their senders.\n\npub fn configure(settings: &settings::Settings) -> Result<Vec<ComponentInfo>, ComponentError> {\n\n let mut active_components: Vec<ComponentInfo> = Vec::new();\n\n for c in &settings.component {\n\n // c is the component HashMap\n\n for (k, v) in c {\n\n let config = SimpleConfig::from(v);\n\n if config.enabled {\n\n let result = match k.as_str() {\n\n \"EchoConsumer\" => echo_consumer::configure(config, settings),\n\n \"EchoProducer\" => echo_producer::configure(config, settings),\n\n \"ChatConsumer\" => chat_consumer::configure(config, settings),\n\n \"ChatProducer\" => chat_producer::configure(config, settings),\n\n #[allow(unreachable_patterns)]\n\n _ => {\n\n log::warn!(\"unhandled {:#?} component configuration\", k);\n\n Ok(None)\n\n },\n\n };\n\n if let Err(e) = result {\n\n return Err(e);\n\n }\n\n if let Ok(Some(sender)) = result {\n\n active_components.push(ComponentInfo::new(k.clone(), sender));\n\n }\n\n }\n\n }\n\n }\n\n Ok(active_components)\n\n}\n", "file_path": "examples/test-server/src/components.rs", "rank": 67, "score": 154166.92335001592 }, { "content": "type TestMessageReceiver = Receiver<TestMessage>;\n\n\n\nmod common;\n\npub use common::TestDriver;\n\nuse common::*;\n\n\n\npub mod chaos_monkey;\n\npub mod daisy_chain;\n\npub mod fanout_fanin;\n\npub mod forwarder;\n", "file_path": "d3-test-drivers/src/lib.rs", "rank": 68, "score": 153120.13935743264 }, { "content": "/// The get_time_slice function returns the current timeslice value.\n\npub fn get_time_slice() -> std::time::Duration { std::time::Duration::from_millis(TIMESLICE_IN_MILLIS.load() as u64) }\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 69, "score": 151129.86523934314 }, { "content": "/// The network has some boilerplate which keeps the public exposure\n\n/// small. It also allows swapping out the Factory and Instance as\n\n/// they're exposes as trait objects.\n\n///\n\n/// This is the only public exposure, in here and it shouldn't leak out of the lib.\n\npub fn new_network_factory() -> NetworkFactoryObj {\n\n let (sender, receiver) = channel_with_capacity::<NetCmd>(250);\n\n Arc::new(SystemNetworkFactory { sender, receiver })\n\n}\n\n\n", "file_path": "d3-components/src/mio_network.rs", "rank": 70, "score": 146957.57917837807 }, { "content": "/// Not quite as simple as the echo service, which has a pathway of Coordinate -> Consumer -> Producer -> Coordinator\n\n///\n\n/// Normally, we'd add a new component to manage routing with multiple chat rooms. However, we're going to cheap\n\n/// out and just have a single room.\n\npub fn configure(settings: &Settings, components: &[ComponentInfo]) -> Result<Option<ComponentSender>, ComponentError> {\n\n // ensure our service is configure in services\n\n if !settings.services.contains(\"ChatService\") {\n\n log::debug!(\"chat service is not configured\");\n\n return Ok(None);\n\n }\n\n // find ourself in the list\n\n for c in &settings.coordinator {\n\n if let Some(v) = c.get(\"ChatCoordinator\") {\n\n let coordinator = match v {\n\n CoordinatorVariant::SimpleTcpConfig { tcp_address, kv } if kv.is_some() => {\n\n let mutable = Mutex::new(MutableCoordinatorData {\n\n kv: kv.as_ref().unwrap().clone(),\n\n ..MutableCoordinatorData::default()\n\n });\n\n Some(ChatCoordinator {\n\n net_sender: get_network_sender(),\n\n bind_addr: tcp_address.to_string(),\n\n components: components.to_owned(),\n\n mutable,\n", "file_path": "examples/chat-service/src/chat_coordinator.rs", "rank": 71, "score": 146110.2341510165 }, { "content": "#[allow(dead_code)]\n\npub fn configure(settings: &Settings, components: &[ComponentInfo]) -> Result<Option<ComponentSender>, ComponentError> {\n\n // ensure our service is configure in services\n\n if !settings.services.contains(\"EchoService\") {\n\n log::debug!(\"echo service is not configured\");\n\n return Ok(None);\n\n }\n\n // find ourself in the list\n\n for c in &settings.coordinator {\n\n if let Some(value) = c.get(\"EchoCoordinator\") {\n\n let maybe_coordinator = match value {\n\n CoordinatorVariant::SimpleTcpConfig { tcp_address, .. } => Some(EchoCoordinator {\n\n net_sender: get_network_sender(),\n\n bind_addr: tcp_address.to_string(),\n\n my_sender: Mutex::new(None),\n\n components: components.to_owned(),\n\n }),\n\n #[allow(unreachable_patterns)] // in case it becomes reachable, we want to know\n\n _ => None,\n\n };\n\n if let Some(coordinator) = maybe_coordinator {\n", "file_path": "examples/echo-service/src/echo_coordinator.rs", "rank": 72, "score": 146106.62837684344 }, { "content": "// The ugliest part of a coordinator is the configuration parsing.\n\npub fn configure(settings: &Settings, _components: &[ComponentInfo]) -> Result<Option<ComponentSender>, ComponentError> {\n\n // ensure our service is configure in services\n\n if !settings.services.contains(\"UdpEchoService\") {\n\n log::debug!(\"udp echo service is not configured\");\n\n return Ok(None);\n\n }\n\n // find ourself in the list\n\n for c in &settings.coordinator {\n\n if let Some(value) = c.get(\"UdpEchoCoordinator\") {\n\n let maybe_coordinator = match value {\n\n CoordinatorVariant::SimpleUdpConfig { udp_address, .. } => Some(EchoCoordinator {\n\n net_sender: get_network_sender(),\n\n bind_addr: udp_address.to_string(),\n\n }),\n\n #[allow(unreachable_patterns)] // in case it becomes reachable, we want to know\n\n _ => None,\n\n };\n\n\n\n if let Some(coordinator) = maybe_coordinator {\n\n if coordinator.bind_addr.parse::<SocketAddr>().is_err() {\n", "file_path": "examples/udp-echo-service/src/udp_echo.rs", "rank": 73, "score": 143948.061104201 }, { "content": "// compute state, render it, then publish changes to the neighbors\n\npub fn tick_tock(cells: &[LifeSender]) {\n\n // compute state and render changes\n\n cells.iter().for_each(|s| s.send(LifeCmd::Tick).unwrap());\n\n sync();\n\n // publish changes to neighbors\n\n cells.iter().for_each(|s| s.send(LifeCmd::Tock).unwrap());\n\n sync();\n\n}\n\n\n", "file_path": "examples/d3-life/src/life_cell.rs", "rank": 74, "score": 143695.86591235694 }, { "content": "#[doc(hidden)]\n\npub trait MachineBuilder {\n\n // The instruction set implemented by the machine\n\n type InstructionSet: MachineImpl;\n\n\n\n // build with a fixed size queue capacity\n\n fn build_raw<T>(raw: T, channel_capacity: usize) -> (Arc<T>, Sender<Self::InstructionSet>, MachineAdapter)\n\n where\n\n T: 'static + Machine<Self::InstructionSet>,\n\n <Self as MachineBuilder>::InstructionSet: Send,\n\n {\n\n // need to review allocation strategy for bounded\n\n let (sender, receiver) = machine_impl::channel_with_capacity::<Self::InstructionSet>(channel_capacity);\n\n Self::build_common(raw, sender, receiver)\n\n }\n\n\n\n // add an instruction set\n\n fn build_addition<T>(machine: &Arc<T>, channel_capacity: usize) -> (Sender<Self::InstructionSet>, MachineAdapter)\n\n where\n\n T: 'static + Machine<Self::InstructionSet>,\n\n {\n", "file_path": "d3-core/src/collective/machine.rs", "rank": 75, "score": 143575.21424825356 }, { "content": "// clear the canvas and generate a new set of alive and dead cells\n\nfn reset_cells(cells: &[LifeSender], random_cells: &mut RandomCells, canvas: &mut WindowCanvas) {\n\n canvas.set_draw_color(Color::RGB(0, 0, 0));\n\n canvas.clear();\n\n canvas.present();\n\n random_cells.reset_cells(cells);\n\n}\n\n\n", "file_path": "examples/d3-life/src/main.rs", "rank": 76, "score": 142082.37179249694 }, { "content": "type NetReceiver = Receiver<NetCmd>;\n\n// commonly use result_type\n\n\n", "file_path": "d3-components/src/mio_network.rs", "rank": 77, "score": 141726.02340828083 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, SmartDefault)]\n\nenum NetworkState {\n\n #[default]\n\n Stopped,\n\n Initializing,\n\n Stopping,\n\n Running,\n\n}\n\n\n\n/// NetworkFields are used to allow a default Network struct, the fields\n\n/// provide an Uninitialized variant, along with a varient for each field\n\n/// in the Network struct that can't otherwise be defaulted.\n", "file_path": "d3-components/src/network_start_stop.rs", "rank": 78, "score": 141294.71460092458 }, { "content": "// drop everything and shut down the server\n\npub fn finish(cells: Vec<LifeSender>) {\n\n cells.iter().for_each(|s| s.send(LifeCmd::RemoveNeighbors).unwrap());\n\n sync();\n\n drop(cells);\n\n sync();\n\n stop_server();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_spaceship() {\n\n CombinedLogger::init(vec![TermLogger::new(LevelFilter::Warn, Config::default(), TerminalMode::Mixed)]).unwrap();\n\n\n\n let (_graphics_receiver, cells) = init_cells();\n\n set_cells(&cells, &[(1, 2), (2, 3), (3, 1), (3, 2), (3, 3)]);\n\n // next: universe.set_cells(&[(2,1), (2,3), (3,2), (3,3), (4,2)]);\n\n\n", "file_path": "examples/d3-life/src/life_cell.rs", "rank": 79, "score": 141040.4012059511 }, { "content": "#[derive(Debug)]\n\nstruct RunParams {\n\n machine_count: usize,\n\n messages: usize,\n\n iterations: usize,\n\n forwarding_multiplier: usize,\n\n timeout: std::time::Duration,\n\n unbound_queue: bool,\n\n}\n\n\n\n// convert from a field map to RunParams\n\nimpl From<settings::FieldMap> for RunParams {\n\n fn from(map: settings::FieldMap) -> Self {\n\n Self {\n\n machine_count: *map.get(&settings::Field::machines).expect(\"machines missing\"),\n\n messages: *map.get(&settings::Field::messages).expect(\"messages missing\"),\n\n iterations: *map.get(&settings::Field::iterations).expect(\"iterations missing\"),\n\n forwarding_multiplier: *map\n\n .get(&settings::Field::forwarding_multiplier)\n\n .expect(\"forwarding_multiplier missing\"),\n\n timeout: std::time::Duration::from_secs(*map.get(&settings::Field::timeout).expect(\"timeout missing\") as u64),\n\n unbound_queue: *map.get(&settings::Field::unbound_queue).unwrap_or(&0) != 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/test-server/src/forwarder.rs", "rank": 80, "score": 140716.513094165 }, { "content": "// The monitor runs in a thread, this is its data\n\nstruct ThreadData {\n\n receiver: MonitorReceiver,\n\n executor: ExecutorControlObj,\n\n senders: Vec<CoreStatsSender>,\n\n}\n\nimpl ThreadData {\n\n // launch the long running system monitor thread\n\n fn spawn(receiver: MonitorReceiver, executor: ExecutorControlObj) -> Option<std::thread::JoinHandle<()>> {\n\n let thread = thread::spawn(move || {\n\n let mut res = Self {\n\n receiver,\n\n executor,\n\n senders: Vec::new(),\n\n };\n\n res.run()\n\n });\n\n Some(thread)\n\n }\n\n\n\n // the system monitor run loop. It doesn't do much.\n", "file_path": "d3-core/src/scheduler/overwatch.rs", "rank": 81, "score": 140416.97927507915 }, { "content": "#[allow(dead_code)]\n\nstruct ThreadData {\n\n id: Id,\n\n receiver: SchedReceiver,\n\n monitor: MonitorSender,\n\n scheduler: SchedSender,\n\n workers: Workers,\n\n run_queue: ExecutorInjector,\n\n wait_queue: Arc<deque::Injector<SchedTask>>,\n\n work: ExecutorWorker,\n\n stealers: ExecutorStealers,\n\n shared_info: Arc<SharedExecutorInfo>,\n\n blocked_sender_count: usize,\n\n}\n\nimpl ThreadData {\n\n /// build stealers, the workers are a shared RwLock object. Clone\n\n /// the stealer from every worker except ourself.\n\n fn build_stealers(&self) -> ExecutorStealers {\n\n let stealers = self\n\n .workers\n\n .read()\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 82, "score": 140403.58950668876 }, { "content": "struct Factory {\n\n sender: SchedSender,\n\n receiver: SchedReceiver,\n\n}\n\nimpl Factory {\n\n /// create the factory\n\n pub fn new() -> Self {\n\n let (sender, receiver) = crossbeam::channel::unbounded::<SchedCmd>();\n\n Self { sender, receiver }\n\n }\n\n /// get the sender for the scheduler\n\n pub fn get_sender(&self) -> SchedSender { self.sender.clone() }\n\n /// start the scheduler\n\n pub fn create_and_start(&self, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector)) -> SchedulerEnum {\n\n // this where different schedulers can be started\n\n log::info!(\"creating Scheduler\");\n\n let s: SchedulerEnum = DefaultScheduler::new(self.sender.clone(), self.receiver.clone(), monitor, queues).into();\n\n s\n\n }\n\n}\n\n\n\nimpl SchedulerFactory for Factory {\n\n fn get_sender(&self) -> SchedSender { self.sender.clone() }\n\n // start must return a sized object trait, I prefer Arc over Box\n\n fn start(&self, monitor: MonitorSender, queues: (ExecutorInjector, SchedTaskInjector)) -> SchedulerEnum {\n\n self.create_and_start(monitor, queues)\n\n }\n\n}\n", "file_path": "d3-core/src/scheduler/sched_factory.rs", "rank": 83, "score": 140210.623256042 }, { "content": "type Id = usize;\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 84, "score": 140153.68841274132 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, SmartDefault)]\n\nenum ServerState {\n\n #[default]\n\n Stopped,\n\n Initializing,\n\n Stopping,\n\n Running,\n\n}\n\n\n\n// These are the aforementioned server fields. The server owns the scheduler, executor and monitor.\n", "file_path": "d3-core/src/scheduler/setup_teardown.rs", "rank": 85, "score": 140006.34865632746 }, { "content": "#[derive(Debug, Default)]\n\nstruct BigExecutorStats {\n\n executors_created: usize,\n\n max_live_executors: usize,\n\n max_dead_executors: usize,\n\n}\n\nimpl BigExecutorStats {\n\n fn add_worker(&mut self, live_count: usize) {\n\n self.executors_created += 1;\n\n self.max_live_executors = usize::max(self.max_live_executors, live_count);\n\n }\n\n fn remove_worker(&mut self, dead_count: usize) { self.max_dead_executors = usize::max(self.max_dead_executors, dead_count); }\n\n}\n\n\n\nimpl Drop for BigExecutorStats {\n\n fn drop(&mut self) {\n\n log::info!(\"{:#?}\", self);\n\n }\n\n}\n\n\n", "file_path": "d3-core/src/scheduler/executor.rs", "rank": 86, "score": 137690.79128705914 }, { "content": "// The factory for the system monitor\n\npub trait MonitorFactory {\n\n /// get a clone of the sender for the system monitor\n\n fn get_sender(&self) -> MonitorSender;\n\n /// start the system monitor\n\n fn start(&self, executor: ExecutorControlObj) -> MonitorControlObj;\n\n}\n\npub type MonitorFactoryObj = Arc<dyn MonitorFactory>;\n\n\n", "file_path": "d3-core/src/scheduler/traits.rs", "rank": 87, "score": 133120.71226422858 }, { "content": "// change the distribution of alive and dead cells\n\nfn change_distribution(percent_pop: usize, canvas: &mut WindowCanvas) -> RandomCells {\n\n let title = format!(\"rust-d3-life demo {}x{} {}%\", GRID_SIZE, GRID_SIZE, percent_pop);\n\n canvas.window_mut().set_title(&title).unwrap();\n\n let limit = (GRID_SIZE * GRID_SIZE) * percent_pop / 100;\n\n RandomCells::new(limit, GRID_SIZE * GRID_SIZE)\n\n}\n\n\n", "file_path": "examples/d3-life/src/main.rs", "rank": 88, "score": 132098.17928166455 }, { "content": "#[doc(hidden)]\n\npub trait MachineDependentSenderAdapter {\n\n fn try_send(&mut self) -> Result<usize, TrySendError>;\n\n}\n\n\n\n// This is information that the executor thread shares with the worker, allowing\n\n// the big executor insight into what the executor is up to.\n\n#[derive(Debug)]\n\npub struct SharedExecutorInfo {\n\n state: AtomicCell<ExecutorState>,\n\n}\n\nimpl SharedExecutorInfo {\n\n pub fn set_state(&self, new: ExecutorState) { self.state.store(new); }\n\n pub fn get_state(&self) -> ExecutorState { self.state.load() }\n\n pub fn compare_set_state(&self, old: ExecutorState, new: ExecutorState) { self.state.compare_and_swap(old, new); }\n\n}\n\nimpl Default for SharedExecutorInfo {\n\n fn default() -> Self {\n\n Self {\n\n state: AtomicCell::new(ExecutorState::Init),\n\n }\n", "file_path": "d3-core/src/tls/tls_executor.rs", "rank": 89, "score": 126233.26642664912 }, { "content": "fn run_server(settings: &settings::Settings) {\n\n // configure components, Ok() contains vec<ComponentInfo>\n\n let result = components::configure(settings);\n\n if result.is_err() {\n\n log::error!(\"Component construction failed, shutting down\")\n\n }\n\n let components = result.unwrap();\n\n if components.is_empty() {\n\n log::error!(\"No Components have been configured.\")\n\n }\n\n // configure coordinators, Ok() contains vec<CoordinatorInfo>\n\n let result = coordinators::configure(settings, &components);\n\n if result.is_err() {\n\n log::error!(\"Coordinator construction failed, shutting down\")\n\n }\n\n let coordinators = result.unwrap();\n\n if coordinators.is_empty() {\n\n log::error!(\"No Coordinators have been configured.\")\n\n }\n\n\n", "file_path": "examples/test-server/src/main.rs", "rank": 90, "score": 124548.8833334099 }, { "content": "fn run_fanout_fanin(settings: &ForwarderSettings) {\n\n let fields = match &settings.fanout_fanin {\n\n Some(map) => merge_maps(settings.default.clone(), map.clone()),\n\n None => settings.default.clone(),\n\n };\n\n let params = RunParams::from(fields);\n\n log::info!(\"fanout_fanin: {:?}\", params);\n\n\n\n let mut fanout_fanin = FanoutFaninDriver {\n\n machine_count: params.machine_count,\n\n message_count: params.messages,\n\n bound_queue: !params.unbound_queue,\n\n duration: params.timeout,\n\n ..Default::default()\n\n };\n\n\n\n fanout_fanin.setup();\n\n log::debug!(\"fanout_fanin: starting run\");\n\n let t = std::time::Instant::now();\n\n for _ in 0 .. params.iterations {\n\n fanout_fanin.run();\n\n }\n\n log::info!(\"completed fanout_fanin run in {:#?}\", t.elapsed());\n\n FanoutFaninDriver::teardown(fanout_fanin);\n\n}\n\n\n", "file_path": "examples/test-server/src/forwarder.rs", "rank": 91, "score": 123652.4198216445 }, { "content": "fn run_daisy_chain(settings: &ForwarderSettings) {\n\n let fields = match &settings.daisy_chain {\n\n Some(map) => merge_maps(settings.default.clone(), map.clone()),\n\n None => settings.default.clone(),\n\n };\n\n let params = RunParams::from(fields);\n\n log::info!(\"daisy_chain: {:?}\", params);\n\n\n\n let mut daisy_chain = DaisyChainDriver {\n\n machine_count: params.machine_count,\n\n message_count: params.messages,\n\n bound_queue: !params.unbound_queue,\n\n forwarding_multiplier: params.forwarding_multiplier,\n\n duration: params.timeout,\n\n ..Default::default()\n\n };\n\n\n\n daisy_chain.setup();\n\n let t = std::time::Instant::now();\n\n for _ in 0 .. params.iterations {\n\n daisy_chain.run();\n\n }\n\n log::info!(\"completed daisy-chain run in {:#?}\", t.elapsed());\n\n DaisyChainDriver::teardown(daisy_chain);\n\n}\n\n\n", "file_path": "examples/test-server/src/forwarder.rs", "rank": 92, "score": 123652.41982164451 }, { "content": "fn run_chaos_monkey(settings: &ForwarderSettings) {\n\n let fields = match &settings.chaos_monkey {\n\n Some(map) => merge_maps(settings.default.clone(), map.clone()),\n\n None => settings.default.clone(),\n\n };\n\n let params = RunParams::from(fields);\n\n\n\n let fields = match &settings.chaos_monkey {\n\n Some(map) => merge_maps(settings.default.clone(), map.clone()),\n\n None => settings.default.clone(),\n\n };\n\n let inflection_value = *fields.get(&settings::Field::inflection_value).unwrap_or(&1usize);\n\n log::info!(\"chaos_monkey: {:?}, inflection_value {}\", params, inflection_value);\n\n\n\n let mut chaos_monkey = ChaosMonkeyDriver {\n\n machine_count: params.machine_count,\n\n message_count: params.messages,\n\n bound_queue: !params.unbound_queue,\n\n duration: params.timeout,\n\n inflection_value: inflection_value as u32,\n", "file_path": "examples/test-server/src/forwarder.rs", "rank": 93, "score": 123652.41982164451 }, { "content": "#[derive(Debug)]\n\nstruct Alice {\n\n uuid: AtomicCell<Uuid>,\n\n net_sender: NetSender,\n\n conn_id: NetConnId,\n\n state: AtomicCell<AliceState>,\n\n logtag: String,\n\n}\n\n\n\n// Alice implements the NetCmd instruction set.\n\nimpl Machine<NetCmd> for Alice {\n\n // connected doesn't require an impl, we'll just save the uuid and log\n\n fn connected(&self, uuid: Uuid) {\n\n self.uuid.store(uuid);\n\n log::debug!(\"{} {} has entered the building\", self.logtag, self.uuid.load());\n\n }\n\n // disconnected doesn't require an impl, we'll just log\n\n fn disconnected(&self) {\n\n log::debug!(\"{} {} has left the building\", self.logtag, self.uuid.load());\n\n }\n\n // receive is required\n", "file_path": "examples/alice-service/src/alice.rs", "rank": 94, "score": 120163.275977252 }, { "content": "#[allow(dead_code)]\n\nstruct MonitorCoordinator {\n\n net_sender: NetSender,\n\n bind_addr: String,\n\n components: Vec<ComponentInfo>,\n\n mutable: Arc<Mutex<MutableData>>,\n\n}\n\nimpl MonitorCoordinator {\n\n fn add_connection(&self, conn_id: NetConnId) {\n\n if self\n\n .net_sender\n\n .send(NetCmd::BindConn(conn_id, self.mutable.lock().my_sender.as_ref().unwrap().clone()))\n\n .is_err()\n\n {}\n\n self.mutable.lock().connections.insert(conn_id);\n\n }\n\n fn remove_connection(&self, conn_id: NetConnId) { self.mutable.lock().connections.remove(&conn_id); }\n\n fn executor_stats(&self, stats: ExecutorStats) {\n\n let bytes: Vec<u8> = format!(\"{:#?}\", stats).as_bytes().to_vec();\n\n self.mutable.lock().send_bytes(bytes, &self.net_sender);\n\n }\n", "file_path": "examples/monitor-service/src/monitor.rs", "rank": 95, "score": 118639.43784258013 }, { "content": "#[derive(Debug)]\n\nstruct AliceCoordinator {\n\n // A net_sender is injected, simplifying communincation with the network.\n\n net_sender: NetSender,\n\n // The bind_addr is also injected\n\n bind_addr: String,\n\n}\n\n\n\n// The implementation of the AliceCoordinator only needs to handle a new connection\n\nimpl AliceCoordinator {\n\n // When a new connection arives, create Alice and let her handle the traffic\n\n fn add_connection(&self, conn_id: NetConnId) {\n\n // create Alice and let her handle connection traffic\n\n let (_, sender) = executor::connect::<_, NetCmd>(Alice::new(conn_id, self.net_sender.clone()));\n\n // give Alice's sender to the network binding her to the connection\n\n self.net_sender\n\n .send(NetCmd::BindConn(conn_id, sender))\n\n .expect(\"failed to send BindConn\");\n\n }\n\n}\n\n\n", "file_path": "examples/alice-service/src/alice.rs", "rank": 96, "score": 118577.33000702497 }, { "content": "// The entry point for configuring the chat consumer component. It receives two sets of settings,\n\n// component specific and the full set. In most cases the specific set is sufficient.\n\n// In this case, there's very little to do.\n\npub fn configure(config: SimpleConfig, _settings: &settings::Settings) -> Result<Option<ComponentSender>, ComponentError> {\n\n if config.enabled {\n\n let (_, sender) = executor::connect(ConsumerComponent::new());\n\n Ok(Some(sender))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\n/// The ConsumerComponent is a factory for a ChatInstance, a consumer of chat messages.\n", "file_path": "examples/chat-service/src/chat_consumer.rs", "rank": 97, "score": 114832.46406215 }, { "content": "// The entry point for configuring the chat producer component. It receives two sets of settings,\n\n// component specific and the full set. In most cases the specific set is sufficient.\n\n// In this case, there's very little to do.\n\npub fn configure(config: SimpleConfig, _settings: &settings::Settings) -> Result<Option<ComponentSender>, ComponentError> {\n\n if config.enabled {\n\n let (_, sender) = executor::connect(ProducerComponent::new());\n\n Ok(Some(sender))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "examples/chat-service/src/chat_producer.rs", "rank": 98, "score": 114832.46406215 }, { "content": "/// This is the entry point for configuring the producer component. We get two sets of settings,\n\n/// our specific and the full set. Hopefully we'll not need to dig into the full set\n\n/// In our case, there's very little to do.\n\npub fn configure(config: SimpleConfig, _settings: &settings::Settings) -> Result<Option<ComponentSender>, ComponentError> {\n\n if config.enabled {\n\n let (_, sender) = executor::connect(ProducerComponent::new());\n\n Ok(Some(sender))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "examples/echo-service/src/echo_producer.rs", "rank": 99, "score": 114828.7398814969 } ]
Rust
src/main.rs
diodesign/rustinvaders
40d4f0adda37ecf4f851a65ff206954bbba812ac
/* Space invaders in Rust * * Game concept by Tomohiro Nishikado / Taito * Rust code By Chris Williams <[email protected]> * * Written for fun. See LICENSE. * */ extern crate glfw; extern crate kiss3d; extern crate nalgebra as na; extern crate rand; use std::path::Path; use na::{ Point3, Point2 }; use kiss3d::window::Window; use kiss3d::event::{ Event, WindowEvent, Key, Action }; use kiss3d::light::Light; use kiss3d::camera::ArcBall; use kiss3d::text::Font; mod bullet; mod aliens; mod hero; mod collision; const MAX_SCORE: i32 = 9999999; /* seems like a cool number */ const MAX_LIVES: i32 = 99; /* also a cool number */ /* collect up the objects in the playfield */ struct Playfield { aliens: aliens::Aliens, /* squadron of enemy aliens to shoot down */ player: hero::Hero, /* our player hero */ } /* maintain state from level to level */ struct Game { score: i32, /* player's current points score */ lives: i32, /* player's current number of lives */ player_x_pos: f32, /* player's ship x-position (y and z are fixed) */ } enum LevelOutcome { Victory, /* player beat the level */ Died /* player ran out of lives */ } fn main() { let mut window = Window::new("Rust Invaders"); window.set_framerate_limit(Some(60)); window.set_light(Light::StickToCamera); /* notes: each of config_game, play_game, and game_over must delete all * scene objects before exiting. each function must track its own objects, * there is no automatic clean-up */ loop { /* render the opening screen + menu */ /* setup and play the game */ play_game(&mut window); /* render game over screen */ } } /* camera generate a standard camera view => distance = camera's z-axis distance from scene center */ fn camera(distance: f32) -> ArcBall { let eye = Point3::new(0.0, 0.0, distance); let at = Point3::origin(); return ArcBall::new(eye, at); } /* show a menu or at least give the player a chance to start */ fn config_game(mut window: &mut Window) { /* for now simply check the player is ready - difficulty settings and so on can be configured later: TODO */ fullscreen_message(window, "Welcome to Rust Invaders", 0.6, 0.6, 0.6); } /* show the bad news with white on red */ fn game_over(mut window: &mut Window) { fullscreen_message(window, "Game over :(", 0.4, 0.0, 0.0); } /* show end of level congratualtions with white */ fn congrats (mut window: &mut Window) { fullscreen_message(window, "Level complete :)", 0.0, 0.4, 0.0); } /* fullscreen_message render basic fullscreen text message with spinning black alien at the top. => window = graphics context text = message to display using white characters r, g, b = background color <= returns when space key is pressed */ fn fullscreen_message(mut window: &mut Window, text: &str, r: f32, g: f32, b: f32) { window.set_background_color(r, g, b); let font = Font::new(&Path::new("media/gameplay.ttf")).expect("Could not load font file"); let mut camera = camera(-100.0); let mut key_press = false; let x_start = 100.0 - (text.len() as f32 * 10.0 * 0.5); /* spawn single black rotating alien, fixed in place */ let mut alien = aliens::Alien::new(&mut window); alien.spawn(0.0, 10.0, 0.0, 0.0); alien.override_color(0.0, 0.0, 0.0); while window.render_with_camera(&mut camera) && key_press == false { window.draw_text(text, &Point2::new(x_start, 50.0), 64.0, &font, &Point3::new(1.0, 1.0, 1.0)); window.draw_text("Press space to continue", &Point2::new(50.0, 80.0), 64.0, &font, &Point3::new(0.9, 0.9, 0.9)); alien.animate(0.0); /* step = 0: don't move the alien */ for mut event in window.events().iter() { key_press = is_space_pressed(&mut event); if key_press == true { break; } } } /* destroy the alien immediately */ alien.delete(); } /* return true if the given event translates to a space keypress */ fn is_space_pressed(event: &mut Event) -> bool { match event.value { WindowEvent::Key(code, action, _) => match(code, action) { (Key::Space, Action::Press) => { return true; }, (_, _) => {} }, /* ignore mouse events */ WindowEvent::MouseButton(_, _, _) => event.inhibited = true, WindowEvent::Scroll(_, _, _) => event.inhibited = true, _ => {} /* pass on other events to the default handlers */ }; return false; } /* a game is a loop of levels until the player runs out of lives */ fn play_game(mut window: &mut Window) { /* set up the camera and black-background scene for the whole game */ window.set_background_color(0.0, 0.0, 0.0); let mut camera = camera(-250.0); /* these variables carry across from level to level */ let mut state = Game { score: 0, lives: 3, player_x_pos: 0.0, }; /* play level after level until player dies */ loop { match play_level(&mut window, &mut camera, &mut state) { LevelOutcome::Died => break, /* exit to game over screen */ LevelOutcome::Victory => congrats(&mut window) } } } /* play a level of the game * => window = graphics context * camera = viewing camera context * state = game state variables * <= LevelOutcome::PlayerDead if hero ran out of lives */ fn play_level(mut window: &mut Window, camera: &mut ArcBall, state: &mut Game) -> LevelOutcome { let font = Font::new(&Path::new("media/gameplay.ttf")).expect("Could not load font file"); /* create the baddies and hero for this level */ let mut playfield = Playfield { aliens: aliens::Aliens::new(&mut window), player: hero::Hero::new(&mut window, state.player_x_pos), }; let mut player_move_left = false; let mut player_move_right = false; let mut player_fire = false; /* rendering loop */ while window.render_with_camera(camera) { /* render the score line */ window.draw_text(format!("Score: {:07} Lives: {:02}", state.score, state.lives).as_str(), &Point2::new(10.0, 2.0), 64.0, &font, &Point3::new(1.0, 1.0, 1.0)); /* update aliens, player and any of their bullets / bombs in play */ playfield.aliens.animate(); playfield.player.animate(); /* check events for things like keypresses */ for mut event in window.events().iter() { match event.value { /* handle a keypress */ WindowEvent::Key(code, action, _) => { match (code, action) { (Key::Z, Action::Press) => player_move_left = true, (Key::Z, Action::Release) => player_move_left = false, (Key::X, Action::Press) => player_move_right = true, (Key::X, Action::Release) => player_move_right = false, (Key::Return, Action::Press) => player_fire = true, (Key::Return, Action::Release) => player_fire = false, (_, _) => {} } /* stop other keypresses going through to the default handler */ event.inhibited = true; }, /* ignore mouse events */ WindowEvent::MouseButton(_, _, _) => event.inhibited = true, WindowEvent::Scroll(_, _, _) => event.inhibited = true, _ => {} /* pass on other events to the default handlers */ } } /* stop playing the level if the player is alive and the aliens are all dead, or if we're * out of lives. this check means we keep animating enemy and ship explosions when * the player has shot all the aliens or has run out of lives, rather than bailing out * immediately */ if (playfield.player.state == hero::State::Alive && playfield.aliens.all_dead() == true) || (playfield.player.state != hero::State::Dying && state.lives < 1) { break; } /* only update the player if it's still alive, otherwise all sorts * of inconsistencies will occur (ship hit by a bomb or alien while dying etc) */ if playfield.player.state != hero::State::Alive { continue; /* skip movement, collision detection, etc while player is dead/dying */ } /* process results of events: if a movement key is held down then * continue moving in that direction */ match (player_move_left, player_move_right) { (true, false) => playfield.player.move_left(), (false, true) => playfield.player.move_right(), _ => {} } /* player can keep fire button held down, but we only allow one * hero bullet per playfield as per the original game */ if player_fire == true { playfield.player.fire(&mut window); /* needs window to create its bullet */ } playfield.aliens.fire(&mut window); /* aliens drop bombs as soon as they are able */ /* did the player's bullet hit an alien? */ if playfield.player.bullet.is_some() == true { let (x, y, _) = playfield.player.bullet.as_mut().unwrap().get_coords(); if playfield.aliens.collision(x, y) == collision::CollisionOutcome::Hit { /* the call to collision() removes the alien if there is a hit, but * we have to tell the ship's bullet to blow up too */ playfield.player.destroy_bullet(); state.score = state.score + aliens::ALIEN_POINTS; if state.score > MAX_SCORE { state.score = MAX_SCORE; } } /* remove bullet if it's gone out of bounds */ if y > aliens::ALIEN_Y_CEILING { playfield.player.destroy_bullet(); } } /* did an alien bomb hit the player? */ if playfield.aliens.bomb.is_some() == true { let (x, y, _) = playfield.aliens.bomb.as_mut().unwrap().get_coords(); if playfield.player.collision(x, y) == collision::CollisionOutcome::Hit { /* tell aliens to blow up their bomb, and the player its ship, if there is a hit */ playfield.aliens.destroy_bomb(); playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = state.lives - 1; } /* remove the bomb if it goes out of bounds */ if y < hero::HERO_Y_FLOOR { playfield.aliens.destroy_bomb(); } } /* get the player's x, y coords */ let (player_x_pos, player_y_pos, _) = playfield.player.get_coords(); /* did an alien fly into the player? */ if playfield.aliens.collision(player_x_pos, player_y_pos) == collision::CollisionOutcome::Hit { playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = state.lives - 1 } /* did the aliens manage to get below the player? if so, that's an instant * game over, I'm afraid */ if playfield.aliens.lowest_y() <= player_y_pos { playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = 0; } } /* we've exited the level loop. remove all objects from the playfield */ playfield.aliens.delete(); playfield.player.delete(); /* if we're still alive then we beat the level, otherwise we died */ if state.lives > 0 { return LevelOutcome::Victory } return LevelOutcome::Died }
/* Space invaders in Rust * * Game concept by Tomohiro Nishikado / Taito * Rust code By Chris Williams <[email protected]> * * Written for fun. See LICENSE. * */ extern crate glfw; extern crate kiss3d; extern crate nalgebra as na; extern crate rand; use std::path::Path; use na::{ Point3, Point2 }; use kiss3d::window::Window; use kiss3d::event::{ Event, WindowEvent, Key, Action }; use kiss3d::light::Light; use kiss3d::camera::ArcBall; use kiss3d::text::Font; mod bullet; mod aliens; mod hero; mod collision; const MAX_SCORE: i32 = 9999999; /* seems like a cool number */ const MAX_LIVES: i32 = 99; /* also a cool number */ /* collect up the objects in the playfield */ struct Playfield { aliens: aliens::Aliens, /* squadron of enemy aliens to shoot down */ player: hero::Hero, /* our player hero */ } /* maintain state from level to level */ struct Game { score: i32, /* player's current points score */ lives: i32, /* player's current number of lives */ player_x_pos: f32, /* player's ship x-position (y and z are fixed) */ } enum LevelOutcome { Victory, /* player beat the level */ Died /* player ran out of lives */ } fn main() { let mut window = Window::new("Rust Invaders"); window.set_framerate_limit(Some(60)); window.set_light(Light::StickToCamera); /* notes: each of config_game, play_game, and game_over must delete all * scene objects before exiting. each function must track its own objects, * there is no automatic clean-up */ loop { /* render the opening screen + menu */ /* setup and play the game */ play_game(&mut window); /* render game over screen */ } } /* camera generate a standard camera view => distance = camera's z-axis distance from scene center */ fn camera(distance: f32) -> ArcBall { let eye = Point3::new(0.0, 0.0, distance); let at = Point3::origin(); return ArcBall::new(eye, at); } /* show a menu or at least give the player a chance to start */ fn config_game(mut window: &mut Window) { /* for now simply check the player is ready - difficulty settings and so on can be configured later: TODO */ fullscreen_message(window, "Welcome to Rust Invaders", 0.6, 0.6, 0.6); } /* show the bad news with white on red */ fn game_over(mut window: &mut Window) { fullscreen_message(window, "Game over :(", 0.4, 0.0, 0.0); } /* show end of level congratualtions with white */ fn congrats (mut window: &mut Window) { fullscreen_message(window, "Level complete :)", 0.0, 0.4, 0.0); } /* fullscreen_message render basic fullscreen text message with spinning black alien at the top. => window = graphics context text = message to display using white characters r, g, b = background color <= returns when space key is pressed */ fn fullscreen_message(mut window: &mut Window, text: &str, r: f32, g: f32, b: f32) { window.set_background_color(r, g, b); let font = Font::new(&Path::new("media/gameplay.ttf")).expect("Could not load font file"); let mut camera = camera(-100.0); let mut key_press = false; let x_start = 100.0 - (text.len() as f32 * 10.0 * 0.5); /* spawn single black rotating alien, fixed in place */ let mut alien = aliens::Alien::new(&mut window); alien.spawn(0.0, 10.0, 0.0, 0.0); alien.override_color(0.0, 0.0, 0.0); while window.render_with_camera(&mut camera) && key_press == false { window.draw_text(text, &Point2::new(x_start, 50.0), 64.0, &font, &Point3::new(1.0, 1.0, 1.0)); window.draw_text("Press space to continue", &Point2::new(50.0, 80.0), 64.0, &font, &Point3::new(0.9, 0.9, 0.9)); alien.animate(0.0); /* step = 0: don't move the alien */ for mut event in window.events().iter() { key_press = is_space_pressed(&mut event); if key_press == true { break; } } } /* destroy the alien immediately */ alien.delete(); } /* return true if the given event translates to a space keypress */ fn is_space_pressed(event: &mut Event) -> bool { match event.value { WindowEvent::Key(code, action, _) => match(code, action) { (Key::Space, Action::Press) => { return true; }, (_, _) => {} }, /* ignore mouse events */ WindowEvent::MouseButton(_, _, _) => event.inhibited = true, WindowEvent::Scroll(_, _, _) => event.inhibited = true, _ => {} /* pass on other events to the default handlers */ }; return false; } /* a game is a loop of levels until the player runs out of lives */ fn play_game(mut window: &mut Window) { /* set up the camera and black-background scene for the whole game */ window.set_background_color(0.0, 0.0, 0.0); let mut camera = camera(-250.0); /* these variables carry across from level to level */ let mut state = Game { score: 0, lives: 3, player_x_pos: 0.0, }; /* play level after level until player dies */ loop { match play_level(&mut window, &mut camera, &mut state) { LevelOutcome::Died => break, /* exit to game over screen */ LevelOutcome::Victory => congrats(&mut window) } } } /* play a level of the game * => window = graphics context * camera = viewing camera context * state = game state variables * <= LevelOutcome::PlayerDead if hero ran out of lives */ fn play_level(mut window: &mut Window, camera: &mut ArcBall, state: &mut Game) -> LevelOutcome { let font = Font::new(&Path::new("media/gameplay.ttf")).expect("Could not load font file"); /* create the baddies and hero for this level */ let mut playfield = Playfield { aliens: aliens::Aliens::new(&mut window), player: hero::Hero::new(&mut window, state.player_x_pos), }; let mut player_move_left = false; let mut player_move_right = false; let mut player_fire = false; /* rendering loop */ while window.render_with_camera(camera) { /* render the score line */ window.draw_text(format!("Score: {:07} Lives: {:02}", state.score, state.lives).as_str(), &Point2::new(10.0, 2.0), 64.0, &font, &Point3::new(1.0, 1.0, 1.0)); /* update aliens, player and any of their bullets / bombs in play */ playfield.aliens.animate(); playfield.player.animate(); /* check events for things like keypresses */ for mut event in window.events().iter() { match event.value { /* handle a keypress */ WindowEvent::Key(code, action, _) => { match (code, action) { (Key::Z, Action::Press) => player_move_left = true, (Key::Z, Action::Release) => player_move_left = false, (Key::X, Action::Press) => player_move_right = true, (Key::X, Action::Release) => player_move_right = false, (Key::Return, Action::Press) => player_fire = true, (Key::Return, Action::Release) => player_fire = false, (_, _) => {} } /* stop other keypresses going through to the default handler */ event.inhibited = true; }, /* ignore mouse events */ WindowEvent::MouseButton(_, _, _) => event.inhibited = true, WindowEvent::Scroll(_, _, _) => event.inhibited = true, _ => {} /* pass on other events to the default handlers */ } } /* stop playing the level if the player is alive and the aliens are all dead, or if we're * out of lives. this check means we keep animating enemy and ship explosions when * the player has shot all the aliens or has run out of lives, rather than bailing out * immediately */ if (playfield.player.state == hero::State::Alive && playfield.aliens.all_dead() == true) || (playfield.player.state != hero::State::Dying && state.lives < 1) { break; } /* only update the player if it's still alive, otherwise all sorts * of inconsistencies will occur (ship hit by a bomb or alien while dying etc) */ if playfield.player.state != hero::State::Alive { continue; /* skip movement, collision detection, etc while player is dead/dying */ } /* process results of events: if a movement key is held down then * continue moving in that direction */ match (player_move_left, player_move_right) { (true, false) => playfield.player.move_left(), (false, true) => playfield.player.move_right(), _ => {} } /* player can keep fire button held down, but we only allow one * hero bullet per playfield as per the original game */ if player_fire == true { playfield.player.fire(&mut window); /* needs window to create its bullet */ } playfield.aliens.fire(&mut window); /* aliens drop bombs as soon as they are able */ /* did the player's bullet hit an alien? */ if playfield.player.bullet.is_some() == true { let (x, y, _) = playfield.player.bullet.as_mut().unwrap().get_coords(); if playfield.aliens.collision(x, y) == collision::CollisionOutcome::Hit { /* the call to collision() removes the alien if there is a hit, but * we have to tell the ship's bullet to blow up too */ playfield.player.destroy_bullet(); state.score = state.score + aliens::ALIEN_POINTS; if state.score > MAX_SCORE { state.score = MAX_SCORE; } } /* remove bullet if it's gone out of bounds */ if y > aliens::ALIEN_Y_CEILING { playfield.player.destroy_bullet(); } } /* did an alien bomb hit the player? */ if playfield.aliens.bomb.is_some() == true { let (x, y, _) = playfield.aliens.bomb.as_mut().unwrap().get_coords(); if playfield.player.collision(x, y) == collision::CollisionOutcome::Hit { /* tell aliens to blow up their bomb, and the player its ship, if there is a hit */ playfield.aliens.destroy_bomb(); playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = state.lives - 1; } /* remove the bomb if it goes out of bounds */ if y < hero::HERO_Y_FLOOR { playfield.aliens.destroy_bomb(); } }
(player_x_pos, player_y_pos) == collision::CollisionOutcome::Hit { playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = state.lives - 1 } /* did the aliens manage to get below the player? if so, that's an instant * game over, I'm afraid */ if playfield.aliens.lowest_y() <= player_y_pos { playfield.player.destroy(&mut window); /* window needed to add explosion debris to game world */ state.lives = 0; } } /* we've exited the level loop. remove all objects from the playfield */ playfield.aliens.delete(); playfield.player.delete(); /* if we're still alive then we beat the level, otherwise we died */ if state.lives > 0 { return LevelOutcome::Victory } return LevelOutcome::Died }
/* get the player's x, y coords */ let (player_x_pos, player_y_pos, _) = playfield.player.get_coords(); /* did an alien fly into the player? */ if playfield.aliens.collision
random
[ { "content": "#[derive(PartialEq)]\n\nenum State\n\n{\n\n Alive,\n\n Dying,\n\n Dead\n\n}\n\n\n\n/* aliens are either shuffling left, right, or down and then right, or down then left */\n\npub enum Movement\n\n{\n\n Left, /* moving left */\n\n Right, /* moving right */\n\n DownRight, /* moving down, will go right */\n\n DownLeft /* moving down, will go left */\n\n}\n\n\n\n/* aliens have 2 animation states: the base design and a slightly modified one */\n", "file_path": "src/aliens.rs", "rank": 10, "score": 92220.12432928343 }, { "content": "pub fn random_explosion_vector(rng: &mut rand::rngs::ThreadRng) -> f32\n\n{\n\n if rng.gen()\n\n {\n\n return rng.gen_range(-0.5f32, -0.1f32);\n\n }\n\n\n\n return rng.gen_range(0.1f32, 0.5f32);\n\n}\n\n\n\n/* ------------------------------------------------------------------------------ */\n\n\n\n/* collect up all aliens and the bomb they drop in a playfield together */\n\npub struct Aliens\n\n{\n\n squadron: Vec<Alien>,\n\n pub bomb: Option<bullet::Bullet>\n\n}\n\n\n\n/* control the whole squad at once */\n", "file_path": "src/aliens.rs", "rank": 11, "score": 87864.26203265325 }, { "content": "struct Debris\n\n{\n\n node: SceneNode, /* the object in the game world */\n\n x: f32, y: f32, z: f32 /* movement vector */\n\n}\n\n\n\n/* Player has 3 game states: alive, exploding, or dead */\n\n#[derive(PartialEq)]\n\npub enum State\n\n{\n\n Alive, /* playing normally */\n\n Dying, /* exploding in death */\n\n Dead /* finished exploding, reseting to alive */\n\n}\n\n\n\npub struct Hero\n\n{\n\n x: f32, y: f32, z: f32, /* game world coords of the hero's ship */\n\n ship: SceneNode, /* the ship in the graphics context */\n\n time_of_death: Option<Instant>, /* when the hero started dying */\n", "file_path": "src/hero.rs", "rank": 12, "score": 76710.05241369901 }, { "content": "enum Frame\n\n{\n\n Base,\n\n Translated\n\n}\n\n\n\npub struct Alien\n\n{\n\n x: f32, y: f32, z: f32, /* center of the model on the playfield */\n\n pixels: Vec<Pixel>, /* the pixels making up this alien */\n\n model: SceneNode, /* the scene node holding all the pixels */\n\n frame: Frame, /* the type of animation frame being displayed */\n\n state: State, /* whether the alien is alive, dead, etc */\n\n last_time: Instant, /* last time we animated this alien */\n\n time_of_death: Option<Instant>, /* when the alien was declared dead */\n\n rng: rand::rngs::ThreadRng, /* access to the thread's RNG */\n\n drop_steps: f32, /* number of units we've moved alien down at end of row */\n\n movement: Movement /* the direction the alien is traveling */\n\n}\n\n\n", "file_path": "src/aliens.rs", "rank": 15, "score": 54954.91052547875 }, { "content": "struct Pixel\n\n{\n\n /* dimensions of this pixel */\n\n width: f32, height: f32, depth: f32,\n\n\n\n /* the pixel's x, y, z base coords and a translation to move the pixel into\n\n another position. this allows the pixel to be animated by switching between\n\n its base and translated positions. yes, this could be point structs... soon */\n\n x: f32, y: f32, z: f32,\n\n tx: f32, ty: f32, tz: f32,\n\n\n\n r: f32, g: f32, b: f32, /* color of the pixel */\n\n explode_x: f32, explode_y: f32, explode_z: f32, /* vector describing the pixel's explosion trajectory */\n\n node: Option<SceneNode> /* this pixel's scene node */\n\n}\n\n\n\n/* aliens have 3 game states: alive, exploding, or dead */\n", "file_path": "src/aliens.rs", "rank": 16, "score": 54574.73922695215 }, { "content": "pub fn check(scenario: Collision) -> CollisionOutcome\n\n{\n\n if scenario.a.x < scenario.b.x + COLLISION_TOLERANCE &&\n\n scenario.a.x > scenario.b.x - COLLISION_TOLERANCE &&\n\n scenario.a.y < scenario.b.y + COLLISION_TOLERANCE &&\n\n scenario.a.y > scenario.b.y - COLLISION_TOLERANCE\n\n {\n\n return CollisionOutcome::Hit;\n\n }\n\n\n\n return CollisionOutcome::Miss;\n\n}\n\n\n", "file_path": "src/collision.rs", "rank": 17, "score": 49026.547642325575 }, { "content": "/* Space invaders in Rust\n\n *\n\n * Bullet time\n\n *\n\n * Game concept by Tomohiro Nishikado / Taito\n\n * Rust code By Chris Williams <[email protected]>\n\n *\n\n * Written for fun. See LICENSE.\n\n *\n\n */\n\n\n\nextern crate glfw;\n\nextern crate kiss3d;\n\nextern crate nalgebra as na;\n\nextern crate rand;\n\n\n\nuse na::Translation3;\n\nuse kiss3d::window::Window;\n\nuse kiss3d::scene::SceneNode;\n\n\n", "file_path": "src/bullet.rs", "rank": 18, "score": 21994.020346746744 }, { "content": " x: x, y: y, z: 0.0,\n\n bullet: window.add_sphere(rad),\n\n speed: speed\n\n };\n\n\n\n shell.bullet.append_translation(&Translation3::new(shell.x, shell.y, shell.z));\n\n shell.bullet.set_color(r, g, b); \n\n\n\n return shell;\n\n }\n\n\n\n /* remove the bullet's sphere from the screen */\n\n pub fn destroy(&mut self)\n\n {\n\n self.bullet.unlink();\n\n }\n\n\n\n /* if a bullet is in play then move it. if the ship is blowing up, then blow it up some more */\n\n pub fn animate(&mut self)\n\n {\n", "file_path": "src/bullet.rs", "rank": 19, "score": 21976.986747611405 }, { "content": "pub struct Bullet\n\n{\n\n x: f32, y: f32, z: f32,\n\n bullet: SceneNode,\n\n speed: f32\n\n}\n\n\n\nimpl Bullet\n\n{\n\n /* create a new bullet\n\n * => window = graphics context in which to create the bullet \n\n * x, y = coords on where to start the bullet (z = 0.0)\n\n * rad = radius of the bullet's sphere\n\n * r, g, b = color of the bullet,\n\n * speed = rate at which bullet will move in y direction\n\n */\n\n pub fn new(window: &mut Window, x: f32, y: f32, rad: f32, r: f32, g: f32, b: f32, speed: f32) -> Bullet\n\n {\n\n let mut shell = Bullet\n\n {\n", "file_path": "src/bullet.rs", "rank": 20, "score": 21972.45712207917 }, { "content": " self.y = self.y + self.speed;\n\n self.bullet.append_translation(&Translation3::new(0.0, self.speed, 0.0));\n\n }\n\n\n\n /* returns Some(x, y, z) coords of active bullet, or None if no bullet */ \n\n pub fn get_coords(&self) -> (f32, f32, f32)\n\n {\n\n return (self.x, self.y, self.z);\n\n }\n\n}\n\n\n", "file_path": "src/bullet.rs", "rank": 21, "score": 21957.345627503662 }, { "content": "/* Space invaders in Rust\n\n *\n\n * Player graphics\n\n *\n\n * Game concept by Tomohiro Nishikado / Taito\n\n * Rust code By Chris Williams <[email protected]>\n\n *\n\n * Written for fun. See LICENSE.\n\n *\n\n */\n\n\n\nextern crate glfw;\n\nextern crate kiss3d;\n\nextern crate nalgebra as na;\n\nextern crate rand;\n\n\n\nuse std::time::Instant;\n\nuse na::{ Translation3, UnitQuaternion, Vector3 };\n\nuse kiss3d::window::Window;\n\nuse kiss3d::scene::SceneNode;\n", "file_path": "src/hero.rs", "rank": 22, "score": 21361.815502373618 }, { "content": " {\n\n self.bullet = Some(bullet::Bullet::new(&mut window, self.x, BULLET_Y_START,\n\n BULLET_RADIUS, BULLET_COLOR_R, BULLET_COLOR_G,\n\n BULLET_COLOR_B, BULLET_ASCENT));\n\n }\n\n }\n\n\n\n /* remove bullet from game */\n\n pub fn destroy_bullet(&mut self)\n\n {\n\n if self.bullet.as_mut().is_some() == true\n\n {\n\n self.bullet.as_mut().unwrap().destroy();\n\n self.bullet = None;\n\n }\n\n }\n\n\n\n /* check to see if the ship has collided with a thing at x,y.\n\n * note: this check does *NOT* affect the ship */\n\n pub fn collision(&mut self, x: f32, y: f32) -> collision::CollisionOutcome\n", "file_path": "src/hero.rs", "rank": 23, "score": 21349.92019695986 }, { "content": " self.debris.push(particle);\n\n }\n\n }\n\n\n\n /* animate the ship exploding or its bullet */\n\n pub fn animate(&mut self)\n\n {\n\n /* if the ship is blowing up then keep it hidden, otherwise visible */\n\n match self.state\n\n {\n\n State::Alive => self.ship.set_visible(true),\n\n State::Dying =>\n\n {\n\n /* continue blowing up the ship */\n\n self.ship.set_visible(false);\n\n self.explode();\n\n\n\n /* after 5 seconds, prepare to ressurrect the hero and also\n\n * delete all the flying debris */\n\n if self.time_of_death.unwrap().elapsed().as_secs() > 4\n", "file_path": "src/hero.rs", "rank": 24, "score": 21349.069939754383 }, { "content": " debris: Vec<Debris>, /* vector array of debris particles when dying */\n\n pub state: State, /* whether the hero is alive, exploding or dead */\n\n pub bullet: Option<bullet::Bullet> /* bullet fired by the ship */\n\n}\n\n\n\nimpl Hero\n\n{\n\n /* create a new ship model at the given point on the x axis */\n\n pub fn new(window: &mut Window, x: f32) -> Hero\n\n {\n\n let mut hero = Hero\n\n {\n\n state: State::Alive,\n\n x: x, y: HERO_Y_BASE, z: 0.0,\n\n ship: window.add_cone(HERO_RADIUS, HERO_HEIGHT),\n\n bullet: None,\n\n time_of_death: None,\n\n debris: Vec::new()\n\n };\n\n \n", "file_path": "src/hero.rs", "rank": 25, "score": 21347.40685107321 }, { "content": " hero.ship.append_translation(&Translation3::new(hero.x, hero.y, hero.z));\n\n hero.ship.set_color(HERO_GRAY, HERO_GRAY, HERO_GRAY);\n\n\n\n return hero;\n\n }\n\n\n\n /* make sure everything is removed from the game world */\n\n pub fn delete(&mut self)\n\n {\n\n self.ship.unlink();\n\n self.destroy_bullet();\n\n\n\n for particle in self.debris.iter_mut()\n\n {\n\n particle.node.unlink();\n\n }\n\n }\n\n\n\n /* start blowing up the ship */\n\n pub fn destroy(&mut self, mut window: &mut Window)\n", "file_path": "src/hero.rs", "rank": 26, "score": 21347.27167972596 }, { "content": "\n\nuse super::bullet;\n\nuse super::collision;\n\nuse super::aliens::random_explosion_vector;\n\n\n\nconst HERO_HEIGHT: f32 = 13.0;\n\nconst HERO_RADIUS: f32 = 5.0;\n\nconst HERO_GRAY: f32 = 0.8;\n\nconst HERO_MOVE_STEP: f32 = 1.0;\n\nconst HERO_Y_BASE: f32 = -90.0;\n\n\n\npub const HERO_Y_FLOOR: f32 = HERO_Y_BASE - (HERO_HEIGHT / 2.0);\n\n\n\nconst BULLET_Y_START: f32 = HERO_Y_BASE + (HERO_HEIGHT / 2.0);\n\nconst BULLET_RADIUS: f32 = 2.0;\n\nconst BULLET_COLOR_R: f32 = 1.0;\n\nconst BULLET_COLOR_G: f32 = 0.0;\n\nconst BULLET_COLOR_B: f32 = 0.0;\n\nconst BULLET_ASCENT: f32 = 2.0;\n\n\n\n/* when the ship explodes, we need to animate its debris particles */\n", "file_path": "src/hero.rs", "rank": 27, "score": 21345.073662254512 }, { "content": " {\n\n self.state = State::Dead;\n\n while self.debris.len() > 0\n\n {\n\n let mut particle = self.debris.pop();\n\n particle.unwrap().node.unlink();\n\n }\n\n }\n\n },\n\n State::Dead =>\n\n {\n\n /* prepare to ressurrect the ship. if we're out of lives, let\n\n * the main game loop catch that */\n\n self.time_of_death = None;\n\n self.state = State::Alive;\n\n }\n\n }\n\n\n\n if self.bullet.is_some() == true\n\n {\n", "file_path": "src/hero.rs", "rank": 28, "score": 21342.177598384438 }, { "content": " {\n\n self.time_of_death = Some(Instant::now());\n\n self.state = State::Dying;\n\n\n\n /* create particles of exploding debris */\n\n let mut rnd = rand::thread_rng();\n\n for _ in 0..20\n\n {\n\n let mut particle = Debris\n\n {\n\n node: window.add_cube(2.0, 2.0, 2.0),\n\n x: random_explosion_vector(&mut rnd),\n\n y: random_explosion_vector(&mut rnd).abs(), /* only explode upwards */\n\n z: random_explosion_vector(&mut rnd),\n\n };\n\n\n\n /* color the debris a firey red and move it into position of the player's ship */ \n\n particle.node.set_color(1.0, 0.2, 0.2);\n\n particle.node.append_translation(&Translation3::new(self.x, self.y, self.z));\n\n\n", "file_path": "src/hero.rs", "rank": 29, "score": 21341.76966610694 }, { "content": " /* animate the bullet */\n\n self.bullet.as_mut().unwrap().animate();\n\n }\n\n }\n\n\n\n /* create debris and animate them when the ship explodes */\n\n fn explode(&mut self)\n\n {\n\n let rotate = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.10);\n\n for particle in self.debris.iter_mut()\n\n {\n\n particle.node.append_translation(&Translation3::new(particle.x, particle.y, particle.z));\n\n particle.node.prepend_to_local_rotation(&rotate);\n\n }\n\n }\n\n\n\n /* fire a new bullet if one isn't already in play */\n\n pub fn fire(&mut self, mut window: &mut Window)\n\n {\n\n if self.bullet.is_some() == false\n", "file_path": "src/hero.rs", "rank": 30, "score": 21339.048478329536 }, { "content": "\n\n pub fn move_right(&mut self)\n\n {\n\n self.move_ship(0.0 - HERO_MOVE_STEP);\n\n }\n\n\n\n fn move_ship(&mut self, distance: f32)\n\n {\n\n self.x = self.x + distance;\n\n self.ship.append_translation(&Translation3::new(distance, 0.0, 0.0));\n\n }\n\n}\n\n\n", "file_path": "src/hero.rs", "rank": 31, "score": 21337.41568072701 }, { "content": " {\n\n let scenario = collision::Collision\n\n {\n\n a: collision::CollisionObject{ x: x, y: y },\n\n b: collision::CollisionObject{ x: self.x, y: self.y }\n\n };\n\n\n\n return collision::check(scenario);\n\n }\n\n\n\n /* returns Some(x, y, z) coords of the ship */\n\n pub fn get_coords(&self) -> (f32, f32, f32)\n\n {\n\n (self.x, self.y, self.z)\n\n }\n\n\n\n pub fn move_left(&mut self)\n\n {\n\n self.move_ship(HERO_MOVE_STEP);\n\n }\n", "file_path": "src/hero.rs", "rank": 32, "score": 21335.5953252298 }, { "content": "/* Space invaders in Rust\n\n *\n\n * Alien designs\n\n *\n\n * Game concept by Tomohiro Nishikado / Taito\n\n * Rust code By Chris Williams <[email protected]>\n\n *\n\n * Written for fun. See LICENSE.\n\n *\n\n */\n\n\n\nextern crate glfw;\n\nextern crate kiss3d;\n\nextern crate nalgebra as na;\n\nextern crate rand;\n\n\n\nuse std::time::Instant;\n\nuse rand::Rng;\n\nuse na::{ Vector3, Translation3, UnitQuaternion };\n\nuse kiss3d::window::Window;\n", "file_path": "src/aliens.rs", "rank": 43, "score": 19586.174490564757 }, { "content": "use kiss3d::scene::SceneNode;\n\n\n\nuse super::bullet;\n\nuse super::collision;\n\n\n\nconst ALIEN_HEIGHT: f32 = 10.0; /* in 3d units */\n\nconst ALIEN_WIDTH: f32 = 13.0; /* in 3d units */\n\nconst ALIENS_PER_ROW: i32 = 11;\n\nconst ALIEN_ROWS: i32 = 5;\n\nconst ALIEN_TOP_Y: i32 = 9; /* in whole number of aliens from game world center */\n\nconst ALIEN_SIDE_SPACE: i32 = 3; /* space either side (in nr of aliens) of alien pattern */\n\n\n\npub const ALIEN_POINTS: i32 = 100; /* number of points per alien */\n\npub const ALIEN_Y_CEILING: f32 = (ALIEN_TOP_Y as f32) * ALIEN_HEIGHT;\n\n\n\nconst BOMB_RADIUS: f32 = 4.0;\n\nconst BOMB_COLOR_R: f32 = 0.0;\n\nconst BOMB_COLOR_G: f32 = 1.0;\n\nconst BOMB_COLOR_B: f32 = 0.0;\n\nconst BOMB_DESCENT: f32 = -1.0;\n\n\n\n/* aliens are made up of a number of pixels */\n", "file_path": "src/aliens.rs", "rank": 44, "score": 19581.411949157384 }, { "content": "\n\n /* update the positions of the aliens and check to see if any collided with the invisible walls\n\n * which causes them to move down a row and reverse movement. also animate the aliens' bomb */\n\n pub fn animate(&mut self)\n\n {\n\n let mut hit_wall_right = false;\n\n let mut hit_wall_left = false;\n\n\n\n /* animate the aliens' bomb */\n\n if self.bomb.is_some() == true\n\n {\n\n self.bomb.as_mut().unwrap().animate();\n\n }\n\n\n\n /* scale the speed depending on how many aliens are alive - fewer means faster */\n\n let aliens = (ALIENS_PER_ROW * ALIEN_ROWS) as usize - self.squadron.iter().filter(|f| f.state == State::Alive).count();\n\n let step = 0.1 + (aliens as f32 * 0.008);\n\n\n\n /* move the aliens one by one, and check for collision with side walls */\n\n for baddie in self.squadron.iter_mut().filter(|f| f.state != State::Dead)\n", "file_path": "src/aliens.rs", "rank": 45, "score": 19580.496008506914 }, { "content": " baddie.spawn(x as f32 * ALIEN_WIDTH, y as f32 * ALIEN_HEIGHT, 0.0, rotation);\n\n baddies.squadron.push(baddie);\n\n }\n\n }\n\n\n\n /* sort baddies from high y to low y, for bomb dropping code */\n\n baddies.squadron.sort_by(|a, b| b.y.partial_cmp(&a.y).unwrap());\n\n\n\n return baddies;\n\n }\n\n\n\n /* ensure all objects are removed from the game world */\n\n pub fn delete(&mut self)\n\n {\n\n for baddie in self.squadron.iter_mut()\n\n {\n\n baddie.delete();\n\n }\n\n self.destroy_bomb();\n\n }\n", "file_path": "src/aliens.rs", "rank": 46, "score": 19577.490782708766 }, { "content": " }\n\n }\n\n\n\n return lowest;\n\n }\n\n\n\n /* check to see if any alive aliens collide with the thing at x,y. if one does,\n\n * then blow up the alien, removing it from the game, and return a hit */\n\n pub fn collision(&mut self, x: f32, y: f32) -> collision::CollisionOutcome\n\n {\n\n for baddie in self.squadron.iter_mut().filter(|b| b.state == State::Alive)\n\n {\n\n let scenario = collision::Collision\n\n {\n\n a: collision::CollisionObject{ x: x, y: y },\n\n b: collision::CollisionObject{ x: baddie.x, y: baddie.y }\n\n };\n\n\n\n match collision::check(scenario)\n\n {\n", "file_path": "src/aliens.rs", "rank": 47, "score": 19577.093179123996 }, { "content": " * the vector remains sorted from top left to bottom right... */\n\n let lowest = self.squadron.iter().filter(|f| f.x == baddie.x && f.y <= baddie.y).last().unwrap();\n\n\n\n let x = lowest.x;\n\n let y = lowest.y - (ALIEN_HEIGHT / 2.0); /* start bomb just below alien */\n\n self.bomb = Some(bullet::Bullet::new(&mut window, x, y, BOMB_RADIUS,\n\n BOMB_COLOR_R, BOMB_COLOR_G, BOMB_COLOR_B,\n\n BOMB_DESCENT));\n\n }\n\n }\n\n\n\n /* remove bomb from game */\n\n pub fn destroy_bomb(&mut self)\n\n {\n\n if self.bomb.as_mut().is_some() == true\n\n {\n\n self.bomb.as_mut().unwrap().destroy();\n\n self.bomb = None;\n\n }\n\n }\n", "file_path": "src/aliens.rs", "rank": 48, "score": 19576.558109734957 }, { "content": " state: State::Alive,\n\n last_time: Instant::now(),\n\n time_of_death: None,\n\n rng: rand::thread_rng(),\n\n drop_steps: 0.0,\n\n movement: Movement::Right\n\n }\n\n }\n\n\n\n /* calling new() just initializes the alien. call spawn() to actually create it on screen\n\n * => center_x, center_y, center_z = coords for the center of the alien model\n\n * angle = y-axis rotation angle to apply to the alien */\n\n pub fn spawn(&mut self, center_x: f32, center_y: f32, center_z: f32, angle: f32)\n\n {\n\n self.x = center_x;\n\n self.y = center_y;\n\n self.z = center_z;\n\n\n\n /* spin through the array of pixels to create this monster */\n\n for pixel in self.pixels.iter_mut()\n", "file_path": "src/aliens.rs", "rank": 49, "score": 19573.91549525285 }, { "content": " {\n\n /* animate and move this particular alien */\n\n baddie.animate(step);\n\n\n\n /* if we're moving left or right, check to see if we hit a wall */\n\n match baddie.movement\n\n {\n\n Movement::Left | Movement::Right =>\n\n {\n\n /* did the baddie just collide with a wall om the left? */\n\n if baddie.x > ((ALIENS_PER_ROW / 2) + 1 + ALIEN_SIDE_SPACE) as f32 * ALIEN_WIDTH\n\n {\n\n hit_wall_left = true;\n\n }\n\n\n\n /* did the baddie just collide with a wall om the right? */\n\n if baddie.x < ((ALIENS_PER_ROW / 2) + ALIEN_SIDE_SPACE) as f32 * (0.0 - ALIEN_WIDTH)\n\n {\n\n hit_wall_right = true;\n\n }\n", "file_path": "src/aliens.rs", "rank": 50, "score": 19572.80474993469 }, { "content": "impl Aliens\n\n{\n\n /* construct a playfield of aliens, pass it back as a vector of aliens */\n\n pub fn new(mut window: &mut Window) -> Aliens\n\n {\n\n let mut baddies = Aliens\n\n {\n\n squadron: Vec::<Alien>::with_capacity(55),\n\n bomb: None\n\n };\n\n\n\n /* generate a formation ALIENS_PER_ROW number of aliens wide, centered\n\n * on the x-axis, and ALIEN_ROWS number of aliens tall, from ALIEN_TOP_Y downwards.\n\n * ALIEN_TOP_Y is in whole number of aliens from the center of the playfield */\n\n for y in (ALIEN_TOP_Y - ALIEN_ROWS)..ALIEN_TOP_Y\n\n {\n\n for x in 0 - (ALIENS_PER_ROW / 2)..(ALIENS_PER_ROW / 2) + 1\n\n {\n\n let mut baddie = Alien::new(&mut window);\n\n let rotation = 0.4 * ((x + y) as f32);\n", "file_path": "src/aliens.rs", "rank": 51, "score": 19572.662711322446 }, { "content": " }\n\n\n\n /* call for each video frame to animate the alien\n\n * => step = number of coordinate points to move. 0.0 for no movement */\n\n pub fn animate(&mut self, step: f32)\n\n {\n\n /* are we supposed to be exploding or be alive doing stuff? */\n\n match self.state\n\n {\n\n State::Alive =>\n\n {\n\n /* rotate the alien slightly */\n\n self.rotate(0.018);\n\n\n\n /* switch between animation frames every second */\n\n if self.last_time.elapsed().as_secs() > 1\n\n {\n\n self.switch();\n\n self.last_time = Instant::now();\n\n }\n", "file_path": "src/aliens.rs", "rank": 52, "score": 19571.502969346508 }, { "content": "\n\n /* drop a bomb if one isn't already in play */\n\n pub fn fire(&mut self, mut window: &mut Window)\n\n {\n\n if self.bomb.is_none() == true\n\n {\n\n /* work out how many aliens are alive and therefore qualify to drop a bomb */\n\n let aliens = self.squadron.iter().filter(|f| f.state == State::Alive).count();\n\n\n\n if aliens == 0\n\n {\n\n return; /* no alive aliens means no bombs dropped */\n\n }\n\n\n\n /* work out which alien should drop a bomb next. the lowest alien in each column can\n\n * drop a bomb. first pick a random alive alien so we get its x, y position */\n\n let index = rand::thread_rng().gen::<usize>() % aliens;\n\n let baddie = self.squadron.iter().filter(|f| f.state == State::Alive).nth(index).unwrap();\n\n\n\n /* now find the alien in the same x column with the lowest y. this assumes\n", "file_path": "src/aliens.rs", "rank": 53, "score": 19570.645437418818 }, { "content": "\n\n /* if one or more of the aliens hit a side wall, then change their directions so they're\n\n * all moving downwards */\n\n if hit_wall_right == true\n\n {\n\n for faller in self.squadron.iter_mut()\n\n {\n\n faller.drop_steps = 0.0;\n\n faller.movement = Movement::DownLeft; /* go down then left */\n\n }\n\n }\n\n if hit_wall_left == true\n\n {\n\n for faller in self.squadron.iter_mut()\n\n {\n\n faller.drop_steps = 0.0;\n\n faller.movement = Movement::DownRight; /* go down then left */\n\n }\n\n }\n\n }\n", "file_path": "src/aliens.rs", "rank": 54, "score": 19569.79439097288 }, { "content": " {\n\n /* create a cube pixel aka a scene node */\n\n let mut p = self.model.add_cube(pixel.width, pixel.height, pixel.depth);\n\n\n\n /* move pixel into position within the alien */\n\n p.append_translation(&Translation3::new(pixel.x, pixel.y, pixel.z));\n\n\n\n /* color it */\n\n p.set_color(pixel.r, pixel.g, pixel.b);\n\n\n\n /* keep a record of the pixel's scene node */\n\n (*pixel).node = Some(p);\n\n }\n\n\n\n /* move the whole model into position and rotate it as required */\n\n self.model.append_translation(&Translation3::new(center_x, center_y, center_z));\n\n self.rotate(angle);\n\n }\n\n\n\n /* kill off this alien by marking it as dying and calculate how it's going to explode into pieces */\n", "file_path": "src/aliens.rs", "rank": 55, "score": 19569.602998733902 }, { "content": "\n\n /* return true if all aliens in the squadron are finally dead */\n\n pub fn all_dead(&mut self) -> bool\n\n {\n\n match self.squadron.iter().filter(|f| f.state != State::Dead).count()\n\n {\n\n 0 => true,\n\n _ => false\n\n }\n\n }\n\n\n\n /* return the lowest Y coord of the alien squadron */\n\n pub fn lowest_y(&mut self) -> f32\n\n {\n\n let mut lowest = ALIEN_Y_CEILING;\n\n for baddie in self.squadron.iter()\n\n {\n\n if baddie.y < lowest\n\n {\n\n lowest = baddie.y;\n", "file_path": "src/aliens.rs", "rank": 56, "score": 19567.491109922306 }, { "content": " {\n\n self.explode();\n\n },\n\n\n\n _ => {}\n\n }\n\n }\n\n\n\n /* rotate the whole alien model by given angle along y-axis */\n\n fn rotate(&mut self, angle: f32)\n\n {\n\n let rotate = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), angle);\n\n self.model.prepend_to_local_rotation(&rotate);\n\n }\n\n\n\n /* call this to switch pixels between their base and translated positions.\n\n * this allows the alien to have two frames of animation */\n\n fn switch(&mut self)\n\n {\n\n match self.frame\n", "file_path": "src/aliens.rs", "rank": 57, "score": 19565.484444376667 }, { "content": "\n\n let mut tx = 0.0;\n\n let mut ty = 0.0;\n\n\n\n /* step alien to the left or right or down */\n\n match self.movement\n\n {\n\n Movement::Left => tx = step,\n\n Movement::Right => tx = 0.0 - step,\n\n Movement::DownRight | Movement::DownLeft => ty = 0.0 - (step * 2.0)\n\n }\n\n\n\n /* update position of the alien */\n\n self.x = self.x + tx;\n\n self.y = self.y + ty;\n\n self.drop_steps = self.drop_steps + ty;\n\n self.model.append_translation(&Translation3::new(tx, ty, 0.0));\n\n },\n\n\n\n State::Dying =>\n", "file_path": "src/aliens.rs", "rank": 58, "score": 19565.382227879923 }, { "content": " {\n\n pixel.node.as_mut().unwrap().set_color(r, g, b);\n\n }\n\n }\n\n\n\n /* animate blowing up the alien: scatter its compoents, spinning them, and then delete them */\n\n fn explode(&mut self)\n\n {\n\n let rotate = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.05);\n\n let secs_since_death = self.time_of_death.unwrap().elapsed().as_secs();\n\n\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.node.as_mut().unwrap().append_translation(&Translation3::new(pixel.explode_x, pixel.explode_y, pixel.explode_z));\n\n pixel.node.as_mut().unwrap().prepend_to_local_rotation(&rotate);\n\n\n\n /* change color of the pixel based on seconds passed */\n\n match secs_since_death\n\n {\n\n 0 | 1 => pixel.node.as_mut().unwrap().set_color(1.0, 0.4, 0.0),\n", "file_path": "src/aliens.rs", "rank": 59, "score": 19563.867385446367 }, { "content": " };\n\n }\n\n\n\n /* remove all objects (pixels) from the game world */\n\n pub fn delete(&mut self)\n\n {\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.node.as_mut().unwrap().unlink();\n\n pixel.node = None;\n\n }\n\n }\n\n\n\n /* override_color\n\n Set all the pixels in an alien to a particular color\n\n => r, g, b = new RGB color of all the alien's pixel blocks\n\n */\n\n pub fn override_color(&mut self, r: f32, g: f32, b: f32)\n\n {\n\n for pixel in self.pixels.iter_mut()\n", "file_path": "src/aliens.rs", "rank": 60, "score": 19563.003557846077 }, { "content": " },\n\n\n\n /* if we're going down then make sure we don't go down too far - just one row */\n\n Movement::DownLeft =>\n\n {\n\n if baddie.drop_steps < 0.0 - ALIEN_HEIGHT\n\n {\n\n baddie.movement = Movement::Left\n\n }\n\n },\n\n\n\n Movement::DownRight =>\n\n {\n\n if baddie.drop_steps < 0.0 - ALIEN_HEIGHT\n\n {\n\n baddie.movement = Movement::Right\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/aliens.rs", "rank": 61, "score": 19562.983653211977 }, { "content": "impl Alien\n\n{\n\n /* allocate and initialize a new alien onject */\n\n pub fn new(window: &mut Window) -> Alien\n\n {\n\n Alien\n\n {\n\n /* describe the alien in blocks of pixels. could load this in as a model but\n\n * I want to animate this programmatically and I just want to draw something\n\n * to the screen. i've included the pixels going from left to right, top to\n\n * bottom, grouping horizontal lines into bars, and leaving individual pixels\n\n * as is. the overall design is:\n\n\n\n * *\n\n * *\n\n *******\n\n ** *** **\n\n ***********\n\n * ******* *\n\n * * * *\n", "file_path": "src/aliens.rs", "rank": 62, "score": 19560.86769334874 }, { "content": " 2 => pixel.node.as_mut().unwrap().set_color(1.0, 0.6, 0.0),\n\n 3 => pixel.node.as_mut().unwrap().set_color(1.0, 0.8, 0.0),\n\n 4 => pixel.node.as_mut().unwrap().set_color(1.0, 1.0, 0.0),\n\n 5 => pixel.node.as_mut().unwrap().set_color(0.8, 0.8, 0.0),\n\n 6 => pixel.node.as_mut().unwrap().set_color(0.6, 0.6, 0.0),\n\n 7 => pixel.node.as_mut().unwrap().set_color(0.4, 0.4, 0.0),\n\n _ => pixel.node.as_mut().unwrap().set_color(0.2, 0.2, 0.0),\n\n };\n\n }\n\n\n\n /* after a period of seconds, wipe away the remains: mark all components of the alien invisible,\n\n * unlink them from the scene, and mark the alien as dead. */\n\n if secs_since_death > 10\n\n {\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.node.as_mut().unwrap().unlink();\n\n }\n\n self.state = State::Dead;\n\n }\n\n }\n\n}\n\n\n\n/* ------------------------------------------------------------------------------ */\n\n\n\n/* generate a random value suitable for exploding a pixel */\n", "file_path": "src/aliens.rs", "rank": 63, "score": 19559.86469533327 }, { "content": " pub fn die(&mut self)\n\n {\n\n /* only aliens still alive can die */\n\n match self.state\n\n {\n\n State::Alive => {},\n\n _ => return\n\n };\n\n\n\n self.state = State::Dying;\n\n\n\n /* generate random x,y,z vector for explosion trajectory for this pixel */\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.explode_x = random_explosion_vector(&mut self.rng);\n\n pixel.explode_y = random_explosion_vector(&mut self.rng);\n\n pixel.explode_z = random_explosion_vector(&mut self.rng);\n\n }\n\n\n\n self.time_of_death = Some(Instant::now());\n", "file_path": "src/aliens.rs", "rank": 64, "score": 19559.240628593856 }, { "content": " {\n\n Frame::Base =>\n\n {\n\n /* move pixels into alternate positions, and update alien frame state */\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.node.as_mut().unwrap().append_translation(&Translation3::new(pixel.tx, pixel.ty, pixel.tz));\n\n }\n\n self.frame = Frame::Translated;\n\n },\n\n\n\n Frame::Translated =>\n\n {\n\n /* move pixels back to normal positions, and update alien frame state */\n\n for pixel in self.pixels.iter_mut()\n\n {\n\n pixel.node.as_mut().unwrap().append_translation(&Translation3::new(pixel.tx * -1.0, pixel.ty * -1.0, pixel.tz * -1.0));\n\n }\n\n self.frame = Frame::Base;\n\n }\n", "file_path": "src/aliens.rs", "rank": 65, "score": 19557.38716640744 }, { "content": " collision::CollisionOutcome::Hit =>\n\n {\n\n baddie.die();\n\n return collision::CollisionOutcome::Hit;\n\n },\n\n\n\n _ => {}\n\n };\n\n }\n\n\n\n return collision::CollisionOutcome::Miss;\n\n }\n\n}\n", "file_path": "src/aliens.rs", "rank": 66, "score": 19552.765353897586 }, { "content": " Pixel { width: 7.0, height: 1.0, depth: 1.0, x: 0.0, y: -1.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: -5.0, y: -1.0, z: 0.0, tx: 0.0, ty: 3.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: -5.0, y: -2.0, z: 0.0, tx: 0.0, ty: 3.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: -3.0, y: -2.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: 3.0, y: -2.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: 5.0, y: -2.0, z: 0.0, tx: 0.0, ty: 3.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 2.0, height: 1.0, depth: 1.0, x: -1.5, y: -3.0, z: 0.0, tx: -2.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 2.0, height: 1.0, depth: 1.0, x: 1.5, y: -3.0, z: 0.0, tx: 2.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 }\n\n ],\n\n\n\n x: 0.0, y: 0.0, z: 0.0, /* default position of alien model's center */\n\n\n\n /* attach all the pixels together as a group */\n\n model: window.add_group(),\n\n\n\n /* start off in normal animation frame */\n\n frame: Frame::Base,\n\n\n", "file_path": "src/aliens.rs", "rank": 67, "score": 19550.513706103156 }, { "content": " ** ** */\n\n\n\n\n\n pixels: vec!\n\n [\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: -3.0, y: 4.0, z: 0.0, tx: 1.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: 3.0, y: 4.0, z: 0.0, tx: -1.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: -2.0, y: 3.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: 2.0, y: 3.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 7.0, height: 1.0, depth: 1.0, x: 0.0, y: 2.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 2.0, height: 1.0, depth: 1.0, x: -4.0, y: 1.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 3.0, height: 1.0, depth: 1.0, x: 0.0, y: 1.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n Pixel { width: 2.0, height: 1.0, depth: 1.0, x: 4.0, y: 1.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 11.0, height: 1.0, depth: 1.0, x: 0.0, y: 0.0, z: 0.0, tx: 0.0, ty: 0.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n\n\n\n Pixel { width: 1.0, height: 1.0, depth: 1.0, x: 5.0, y: -1.0, z: 0.0, tx: 0.0, ty: 3.0, tz: 0.0, r: 0.2, g: 1.0, b: 0.2, node: None, explode_x: 0.0, explode_y: 0.0, explode_z: 0.0 },\n", "file_path": "src/aliens.rs", "rank": 68, "score": 19545.131486355716 }, { "content": "[![Build Status](https://travis-ci.org/diodesign/rustinvaders.svg?branch=master)](https://travis-ci.org/diodesign/rustinvaders)\n\n\n\n# Rustinvaders\n\n\n\nThis is a simple 3D game written in Rust, and inspired by the arcade classic _Space Invaders_. This was created purely for fun while taking my first steps in graphics and games programming. It uses the extremely handy [Kiss3D](http://kiss3d.org/) engine.\n\n\n\n## Obligatory screenshot\n\n\n\n![Screenshot of the player's ship (grey cone) firing a bullet (red sphere) at waves of green aliens, which explode when hit, while avoiding the invaders' bombs (green spheres)](https://raw.githubusercontent.com/diodesign/rustinvaders/screenshots/early-screenshot.png)\n\n\n\n## Building\n\n\n\nI recommend building this in an official [Debian-based Rust Docker container](https://hub.docker.com/_/rust/) with the following extra packages installed: `xorg-dev libglu1-mesa-dev`\n\n\n\nMake sure you have the above in place, checkout this project's code from GitHub, compile, and run using:\n\n```\n\ngit clone https://github.com/diodesign/rustinvaders.git\n\ncd rustinvaders\n\ncargo run --release\n\n```\n\n\n\n## Playing\n\n\n\nPress `z` to move to the left, `x` to go right, `Return` to fire. You can only have one bullet on screen at a time: that's a deliberate restriction to keep the gameplay faithful to the original. The aliens also drop bombs, and speed up as you destroy more of them - again, like the original. If you're hit by an alien or one of their bombs, you'll lose a life. You have three lives until it's game over. If the aliens manage to get below your ship, it's immediately game over.\n\n\n\n## Feedback\n\n\n\nThis is a work in progress - there are many little things to add and improve. If you have any suggestions, patches, complaints, etc, then submit an issue or pull request, or [try emailing me](mailto:[email protected]). Cheers for taking an interest.\n", "file_path": "README.md", "rank": 69, "score": 12210.64474092377 }, { "content": "/* Space invaders in Rust\n\n *\n\n * 2D collision checking\n\n *\n\n * Game concept by Tomohiro Nishikado / Taito\n\n * Rust code By Chris Williams <[email protected]>\n\n *\n\n * Written for fun. See LICENSE.\n\n *\n\n */\n\n\n\nconst COLLISION_TOLERANCE: f32 = 5.0; /* objects closer than this are considered collided */\n\n\n\n/* either objects hit, miss, or leave their y-bounds\n\n * (no need to check for x or z bounds in this game)\n\n */\n\n#[derive(PartialEq)]\n\npub enum CollisionOutcome\n\n{\n\n Hit,\n", "file_path": "src/collision.rs", "rank": 70, "score": 31.524882686972447 }, { "content": " Miss\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct CollisionObject\n\n{\n\n pub x: f32, pub y: f32 /* x,y coords of object */\n\n}\n\n\n\n/* describe a collision scenario */\n\n#[derive(Clone, Copy)]\n\npub struct Collision\n\n{\n\n /* two objects to check */\n\n pub a: CollisionObject,\n\n pub b: CollisionObject,\n\n}\n\n\n", "file_path": "src/collision.rs", "rank": 71, "score": 7.406719415891018 } ]
Rust
src/lib.rs
tekjar/rumqtt-coroutines
10212a4f10c697a0ad23217257039e6045ca9029
#![feature(proc_macro, conservative_impl_trait, generators)] extern crate futures_await as futures; extern crate tokio_core; extern crate tokio_io; extern crate tokio_timer; extern crate mqtt3; extern crate bytes; #[macro_use] extern crate log; mod codec; mod packet; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::io::{self, ErrorKind}; use std::error::Error; use std::time::Duration; use std::thread; use codec::MqttCodec; use futures::prelude::*; use futures::stream::{Stream, SplitSink}; use futures::sync::mpsc::{self, Sender, Receiver}; use std::sync::mpsc as stdmpsc; use tokio_core::reactor::Core; use tokio_core::net::TcpStream; use tokio_timer::Timer; use tokio_io::AsyncRead; use tokio_io::codec::Framed; use mqtt3::*; #[derive(Debug)] pub enum NetworkRequest { Subscribe(Vec<(TopicPath, QoS)>), Publish(Publish), Ping, } pub fn start(new_commands_tx: stdmpsc::Sender<Sender<NetworkRequest>>) { loop { let new_commands_tx = new_commands_tx.clone(); let mut reactor = Core::new().unwrap(); let handle = reactor.handle(); let address = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 1883); let (commands_tx, commands_rx) = mpsc::channel::<NetworkRequest>(1000); let ping_commands_tx = commands_tx.clone(); let client = async_block! { thread::sleep(Duration::new(5, 0)); let stream = await!(TcpStream::connect(&address, &handle))?; let connect = packet::gen_connect_packet("rumqtt-coro", 10, true, None, None); let framed = stream.framed(MqttCodec); let framed = await!(framed.send(connect)).unwrap(); new_commands_tx.send(commands_tx).unwrap(); let (sender, receiver) = framed.split(); handle.spawn(ping_timer(ping_commands_tx).then(|result| { match result { Ok(_) => println!("Ping timer done"), Err(e) => println!("Ping timer IO error {:?}", e), } Ok(()) })); handle.spawn(command_read(commands_rx, sender).then(|result| { match result { Ok(_) => println!("Command receiver done"), Err(e) => println!("Command IO error {:?}", e), } Ok(()) })); #[async] for msg in receiver { println!("message = {:?}", msg); } println!("Done with network receiver !!"); Ok::<(), io::Error>(()) }; let _response = reactor.run(client); println!("{:?}", _response); } } #[async] fn ping_timer(mut commands_tx: Sender<NetworkRequest>) -> io::Result<()> { let timer = Timer::default(); let keep_alive = 10; let interval = timer.interval(Duration::new(keep_alive, 0)); #[async] for _t in interval { println!("Ping timer fire"); commands_tx = await!( commands_tx.send(NetworkRequest::Ping).or_else(|e| { Err(io::Error::new(ErrorKind::Other, e.description())) }) )?; } Ok(()) } #[async] fn command_read(commands_rx: Receiver<NetworkRequest>, mut sender: SplitSink<Framed<TcpStream, MqttCodec>>) -> io::Result<()> { let commands_rx = commands_rx.or_else(|_| { Err(io::Error::new(ErrorKind::Other, "Rx Error")) }); #[async] for command in commands_rx { println!("command = {:?}", command); let packet = match command { NetworkRequest::Publish(publish) => { Packet::Publish(publish) } NetworkRequest::Ping => { packet::gen_pingreq_packet() } _ => unimplemented!() }; sender = await!(sender.send(packet))? } Ok(()) }
#![feature(proc_macro, conservative_impl_trait, generators)] extern crate futures_await as futures; extern crate tokio_core; extern crate tokio_io; extern crate tokio_timer; extern crate mqtt3; extern crate bytes; #[macro_use] extern crate log; mod codec; mod packet; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::io::{self, ErrorKind}; use std::error::Error; use std::time::Duration; use std::thread; use codec::MqttCodec; use futures::prelude::*; use futures::stream::{Stream, SplitSink}; use futures::sync::mpsc::{self, Sender, Receiver}; use std::sync::mpsc as stdmpsc; use tokio_core::reactor::Core; use tokio_core::net::TcpStream; use tokio_timer::Timer; use tokio_io::AsyncRead; use tokio_io::codec::Framed; use mqtt3::*; #[derive(Debug)] pub enum NetworkRequest { Subscribe(Vec<(TopicPath, QoS)>), Publish(Publish), Ping, } pub fn start(new_commands_tx: stdmpsc::Sender<Sender<NetworkRequest>>) { loop { let new_commands_tx = new_commands_tx.clone(); let mut reactor = Core::new().unwrap(); let handle = react
#[async] fn ping_timer(mut commands_tx: Sender<NetworkRequest>) -> io::Result<()> { let timer = Timer::default(); let keep_alive = 10; let interval = timer.interval(Duration::new(keep_alive, 0)); #[async] for _t in interval { println!("Ping timer fire"); commands_tx = await!( commands_tx.send(NetworkRequest::Ping).or_else(|e| { Err(io::Error::new(ErrorKind::Other, e.description())) }) )?; } Ok(()) } #[async] fn command_read(commands_rx: Receiver<NetworkRequest>, mut sender: SplitSink<Framed<TcpStream, MqttCodec>>) -> io::Result<()> { let commands_rx = commands_rx.or_else(|_| { Err(io::Error::new(ErrorKind::Other, "Rx Error")) }); #[async] for command in commands_rx { println!("command = {:?}", command); let packet = match command { NetworkRequest::Publish(publish) => { Packet::Publish(publish) } NetworkRequest::Ping => { packet::gen_pingreq_packet() } _ => unimplemented!() }; sender = await!(sender.send(packet))? } Ok(()) }
or.handle(); let address = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 1883); let (commands_tx, commands_rx) = mpsc::channel::<NetworkRequest>(1000); let ping_commands_tx = commands_tx.clone(); let client = async_block! { thread::sleep(Duration::new(5, 0)); let stream = await!(TcpStream::connect(&address, &handle))?; let connect = packet::gen_connect_packet("rumqtt-coro", 10, true, None, None); let framed = stream.framed(MqttCodec); let framed = await!(framed.send(connect)).unwrap(); new_commands_tx.send(commands_tx).unwrap(); let (sender, receiver) = framed.split(); handle.spawn(ping_timer(ping_commands_tx).then(|result| { match result { Ok(_) => println!("Ping timer done"), Err(e) => println!("Ping timer IO error {:?}", e), } Ok(()) })); handle.spawn(command_read(commands_rx, sender).then(|result| { match result { Ok(_) => println!("Command receiver done"), Err(e) => println!("Command IO error {:?}", e), } Ok(()) })); #[async] for msg in receiver { println!("message = {:?}", msg); } println!("Done with network receiver !!"); Ok::<(), io::Error>(()) }; let _response = reactor.run(client); println!("{:?}", _response); } }
function_block-function_prefixed
[ { "content": "pub fn gen_pingreq_packet() -> Packet {\n\n Packet::Pingreq\n\n}\n\n\n\n// pub fn gen_pingresp_packet() -> Packet {\n\n// Packet::Pingresp\n\n// }\n\n\n\n// pub fn gen_subscribe_packet(pkid: PacketIdentifier, topics: Vec<SubscribeTopic>) -> Packet {\n\n// Packet::Subscribe(Subscribe {\n\n// pid: pkid,\n\n// topics: topics,\n\n// })\n\n// }\n\n\n\n// pub fn gen_unsubscribe_packet(pkid: PacketIdentifier, topics: Vec<String>) -> Packet {\n\n// Packet::Unsubscribe(Unsubscribe {\n\n// pid: pkid,\n\n// topics: topics,\n\n// })\n", "file_path": "src/packet.rs", "rank": 1, "score": 73334.14492115473 }, { "content": "pub fn gen_connect_packet(id: &str, keep_alive: u16, clean_session: bool,\n\n username: Option<String>,\n\n password: Option<String>)\n\n -> Packet {\n\n Packet::Connect(Connect {\n\n protocol: Protocol::MQTT(4),\n\n keep_alive: keep_alive,\n\n client_id: id.to_string(),\n\n clean_session: clean_session,\n\n last_will: None,\n\n username: username,\n\n password: password,\n\n })\n\n}\n\n\n\n// pub fn gen_disconnect_packet() -> Packet {\n\n// Packet::Disconnect\n\n// }\n\n\n", "file_path": "src/packet.rs", "rank": 4, "score": 46071.11893023107 }, { "content": "fn main() {\n\n let (command_tx, command_rx) = mpsc::channel();\n\n\n\n thread::spawn(move || {\n\n rumqtt_coroutines::start(command_tx);\n\n });\n\n\n\n let mut user_command_tx = command_rx.recv().unwrap(); \n\n\n\n for i in 0..100 {\n\n let publish = Publish {\n\n dup: false,\n\n qos: QoS::AtLeastOnce,\n\n retain: false,\n\n topic_name: \"hello/world\".to_string(),\n\n pid: None,\n\n payload: Arc::new(vec![1, 2, 3])\n\n };\n\n let publish = NetworkRequest::Publish(publish);\n\n user_command_tx = match user_command_tx.send(publish).wait() {\n\n Ok(tx) => tx,\n\n Err(_) => {\n\n let mut user_command_tx = command_rx.recv().unwrap();\n\n user_command_tx\n\n }\n\n };\n\n\n\n thread::sleep(Duration::new(3, 0));\n\n }\n\n}", "file_path": "examples/mqtt.rs", "rank": 5, "score": 22438.188221136505 }, { "content": "use std::io::{self, ErrorKind, Cursor};\n\nuse std::error::Error;\n\nuse bytes::BytesMut;\n\nuse tokio_io::codec::{Encoder, Decoder};\n\n\n\nuse mqtt3::{self, Packet, MqttWrite, MqttRead};\n\n\n\npub struct MqttCodec;\n\n\n\nimpl Decoder for MqttCodec {\n\n type Item = Packet;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<Packet>> {\n\n // NOTE: `decode` might be called with `buf.len == 0` when prevous\n\n // decode call read all the bytes in the stream. We should return\n\n // Ok(None) in those cases or else the `read` call will return\n\n // Ok(0) => translated to UnexpectedEOF by `byteorder` crate.\n\n // `read` call Ok(0) happens when buffer specified was 0 bytes in len\n\n // https://doc.rust-lang.org/std/io/trait.Read.html#tymethod.read\n", "file_path": "src/codec.rs", "rank": 6, "score": 16613.95709868109 }, { "content": " }\n\n}\n\n\n\nimpl Encoder for MqttCodec {\n\n type Item = Packet;\n\n type Error = io::Error;\n\n\n\n fn encode(&mut self, msg: Packet, buf: &mut BytesMut) -> io::Result<()> {\n\n let mut stream = Cursor::new(Vec::new());\n\n\n\n // TODO: Implement `write_packet` for `&mut BytesMut`\n\n if let Err(e) = stream.write_packet(&msg) {\n\n error!(\"Encode error. Error = {:?}\", e);\n\n return Err(io::Error::new(io::ErrorKind::Other, \"Unable to encode!\"));\n\n }\n\n\n\n buf.extend(stream.get_ref());\n\n\n\n Ok(())\n\n }\n\n}", "file_path": "src/codec.rs", "rank": 7, "score": 16611.14939326141 }, { "content": "// }\n\n\n\n// pub fn gen_publish_packet(topic_name: String,\n\n// qos: QoS,\n\n// pkid: Option<PacketIdentifier>,\n\n// retain: bool,\n\n// dup: bool,\n\n// payload: Arc<Vec<u8>>)\n\n// -> Packet {\n\n// Packet::Publish(Publish {\n\n// dup: dup,\n\n// qos: qos,\n\n// retain: retain,\n\n// topic_name: topic_name,\n\n// pid: pkid,\n\n// payload: payload,\n\n// })\n\n// }\n\n\n\n// pub fn gen_puback_packet(pkid: PacketIdentifier) -> Packet {\n", "file_path": "src/packet.rs", "rank": 8, "score": 16609.788019875446 }, { "content": "// Packet::Puback(pkid)\n\n// }\n\n\n\n// pub fn gen_pubrec_packet(pkid: PacketIdentifier) -> Packet {\n\n// Packet::Pubrec(pkid)\n\n// }\n\n\n\n// pub fn gen_pubrel_packet(pkid: PacketIdentifier) -> Packet {\n\n// Packet::Pubrel(pkid)\n\n// }\n\n\n\n// pub fn gen_pubcomp_packet(pkid: PacketIdentifier) -> Packet {\n\n// Packet::Pubcomp(pkid)\n\n// }", "file_path": "src/packet.rs", "rank": 9, "score": 16609.576435501123 }, { "content": "// use std::sync::Arc;\n\n\n\nuse mqtt3::*;\n\n\n", "file_path": "src/packet.rs", "rank": 10, "score": 16609.414113416846 }, { "content": " if buf.len() < 2 {\n\n return Ok(None);\n\n }\n\n\n\n let (packet, len) = {\n\n let mut buf_ref = buf.as_ref();\n\n match buf_ref.read_packet_with_len() {\n\n Err(e) => {\n\n if let mqtt3::Error::Io(e) = e {\n\n match e.kind() {\n\n ErrorKind::TimedOut | ErrorKind::WouldBlock => return Ok(None),\n\n ErrorKind::UnexpectedEof => return Ok(None),\n\n _ => {\n\n error!(\"mqtt3 io error = {:?}\", e);\n\n return Err(io::Error::new(e.kind(), e.description()))\n\n },\n\n }\n\n } else {\n\n error!(\"mqtt3 read error = {:?}\", e);\n\n return Err(io::Error::new(ErrorKind::Other, e.description()));\n", "file_path": "src/codec.rs", "rank": 11, "score": 16608.992079609037 }, { "content": " }\n\n }\n\n Ok(v) => v,\n\n }\n\n };\n\n\n\n // NOTE: It's possible that `decode` got called before `buf` has full bytes\n\n // necessary to frame raw bytes into a packet. In that case return Ok(None)\n\n // and the next time decode` gets called, there will be more bytes in `buf`,\n\n // hopefully enough to frame the packet\n\n if buf.len() < len {\n\n return Ok(None);\n\n }\n\n\n\n // println!(\"buf = {:?}\", buf);\n\n // println!(\"{:?}, {:?}, {:?}\", len, packet, buf.len());\n\n\n\n buf.split_to(len);\n\n\n\n Ok(Some(packet))\n", "file_path": "src/codec.rs", "rank": 12, "score": 16608.92032090779 }, { "content": "extern crate rumqtt_coroutines;\n\nextern crate futures_await as futures;\n\nextern crate mqtt3;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\nuse std::sync::Arc;\n\n\n\n// use futures::sync::mpsc;\n\nuse std::sync::mpsc;\n\nuse futures::{Future, Sink};\n\nuse mqtt3::*;\n\n\n\nuse rumqtt_coroutines::NetworkRequest;\n\n\n", "file_path": "examples/mqtt.rs", "rank": 15, "score": 8.528148065059405 } ]
Rust
src/parser_combinator.rs
KoheiAsano/sym_diff
17cdb5972825b9aa1c0662da5499020f53c94cf6
use super::expr::Env; pub type ParseResult<'a, Output> = Result<(&'a str, &'a Env, Output), &'a str>; pub trait Parser<'a, Output> { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output>; fn map<F, NewOutput>(self, map_fn: F) -> BoxedParser<'a, NewOutput> where Self: Sized + 'a, Output: 'a, NewOutput: 'a, F: Fn(Output) -> NewOutput + 'a, { BoxedParser::new(map(self, map_fn)) } fn pred<F>(self, pred_fn: F) -> BoxedParser<'a, Output> where Self: Sized + 'a, Output: 'a, F: Fn(&Output) -> bool + 'a, { BoxedParser::new(pred(self, pred_fn)) } fn and_then<F, NextParser, NewOutput>(self, f: F) -> BoxedParser<'a, NewOutput> where Self: Sized + 'a, Output: 'a, NewOutput: 'a, NextParser: Parser<'a, NewOutput> + 'a, F: Fn(Output) -> NextParser + 'a, { BoxedParser::new(and_then(self, f)) } } pub struct BoxedParser<'a, Output> { parser: Box<dyn Parser<'a, Output> + 'a>, } impl<'a, Output> BoxedParser<'a, Output> { fn new<P>(parser: P) -> Self where P: Parser<'a, Output> + 'a, { BoxedParser { parser: Box::new(parser), } } } impl<'a, Output> Parser<'a, Output> for BoxedParser<'a, Output> { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output> { self.parser.parse(input, env) } } impl<'a, F, Output> Parser<'a, Output> for F where F: Fn(&'a str, &'a Env) -> ParseResult<'a, Output>, { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output> { self(input, env) } } pub fn identifier<'a>(input: &'a str, env: &'a Env) -> ParseResult<'a, (String, &'a Env)> { let mut matched = String::new(); let mut chars = input.chars(); match chars.next() { Some(next) if next.is_alphabetic() => matched.push(next), _ => return Err(input), } while let Some(next) = chars.next() { if next.is_alphanumeric() || next == '-' { matched.push(next); } else { break; } } let next_index = matched.len(); Ok((&input[next_index..], env, (matched, env))) } pub fn any_char<'a>(input: &'a str, env: &'a Env) -> ParseResult<'a, (char, &'a Env)> { match input.chars().next() { Some(next) => Ok((&input[next.len_utf8()..], env, (next, env))), _ => Err(input), } } pub fn match_literal<'a>(expected: &'static str) -> impl Parser<'a, ()> { move |input: &'a str, env: &'a Env| match input.get(0..expected.len()) { Some(next) if next == expected => Ok((&input[expected.len()..], env, ())), _ => Err(input), } } pub fn one_of<'a>(expected: Vec<&'static str>) -> impl Parser<'a, &'static str> { move |input: &'a str, env: &'a Env| { for &e in expected.iter() { match input.get(0..e.len()) { Some(next) if next == e => return Ok((&input[e.len()..], env, e)), _ => continue, } } Err(input) } } pub fn whitespace_char<'a>() -> impl Parser<'a, (char, &'a Env)> { pred(any_char, |c| c.0.is_whitespace()) } pub fn space1<'a>() -> impl Parser<'a, Vec<(char, &'a Env)>> { one_or_more(whitespace_char()) } pub fn space0<'a>() -> impl Parser<'a, Vec<(char, &'a Env)>> { zero_or_more(whitespace_char()) } pub fn map<'a, P, F, A, B>(parser: P, map_fn: F) -> impl Parser<'a, B> where P: Parser<'a, A>, F: Fn(A) -> B, { move |input, env| { parser .parse(input, env) .map(|(next_input, next_env, result)| (next_input, next_env, map_fn(result))) } } pub fn pair<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, (R1, R2)> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { move |input, env| { parser1 .parse(input, env) .and_then(|(next_input, next_env, result1)| { parser2 .parse(next_input, next_env) .map(|(last_input, last_env, result2)| { (last_input, last_env, (result1, result2)) }) }) } } pub fn left<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R1> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { map(pair(parser1, parser2), |(left, _right)| left) } pub fn right<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R2> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { map(pair(parser1, parser2), |(_left, right)| right) } pub fn zero_or_one<'a, P, A>(parser: P) -> impl Parser<'a, Option<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = None; if let Ok((next_input, next_env, item)) = parser.parse(input, env) { env = next_env; input = next_input; result = Some(item); } Ok((input, env, result)) } } pub fn one_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = Vec::new(); if let Ok((next_input, new_env, first_item)) = parser.parse(input, env) { env = new_env; input = next_input; result.push(first_item); } else { return Err(input); } while let Ok((next_input, new_env, first_item)) = parser.parse(input, env) { env = new_env; input = next_input; result.push(first_item); } Ok((input, env, result)) } } pub fn zero_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = Vec::new(); while let Ok((next_input, next_env, first_item)) = parser.parse(input, env) { env = next_env; input = next_input; result.push(first_item); } Ok((input, env, result)) } } pub fn pred<'a, P, A, F>(parser: P, predicate: F) -> impl Parser<'a, A> where P: Parser<'a, A>, F: Fn(&A) -> bool, { move |input, env| { if let Ok((next_input, next_env, value)) = parser.parse(input, env) { if predicate(&value) { return Ok((next_input, next_env, value)); } } Err(input) } } pub fn either<'a, P1, P2, A>(parser1: P1, parser2: P2) -> impl Parser<'a, A> where P1: Parser<'a, A>, P2: Parser<'a, A>, { move |input, env| match parser1.parse(input, env) { ok @ Ok(_) => ok, Err(_) => parser2.parse(input, env), } } pub fn and_then<'a, P, F, A, B, NextP>(parser: P, f: F) -> impl Parser<'a, B> where P: Parser<'a, A>, NextP: Parser<'a, B>, F: Fn(A) -> NextP, { move |input, env| match parser.parse(input, env) { Ok((next_input, next_env, result)) => f(result).parse(next_input, next_env), Err(err) => Err(err), } } pub fn whitespace_wrap<'a, P, A>(parser: P) -> impl Parser<'a, A> where P: Parser<'a, A>, { right(space0(), left(parser, space0())) }
use super::expr::Env; pub type ParseResult<'a, Output> = Result<(&'a str, &'a Env, Output), &'a str>; pub trait Parser<'a, Output> { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output>; fn map<F, NewOutput>(self, map_fn: F) -> BoxedParser<'a, NewOutput> where Self: Sized + 'a, Output: 'a, NewOutput: 'a, F: Fn(Output) -> NewOutput + 'a, { BoxedParser::new(map(self, map_fn)) } fn pred<F>(self, pred_fn: F) -> BoxedParser<'a, Output> where Self: Sized + 'a, Output: 'a, F: Fn(&Output) -> bool + 'a, { BoxedParser::new(pred(self, pred_fn)) } fn and_then<F, NextParser, NewOutput>(self, f: F) -> BoxedParser<'a, NewOutput> where Self: Sized + 'a, Output: 'a, NewOutput: 'a, NextParser: Parser<'a, NewOutput> + 'a, F: Fn(Output) -> NextParser + 'a, { BoxedParser::new(and_then(self, f)) } } pub struct BoxedParser<'a, Output> { parser: Box<dyn Parser<'a, Output> + 'a>, } impl<'a, Output> BoxedParser<'a, Output> { fn new<P>(parser: P) -> Self where P: Parser<'a, Output> + 'a, { BoxedParser { parser: Box::new(parser), } } } impl<'a, Output> Parser<'a, Output> for BoxedParser<'a, Output> { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output> { self.parser.parse(input, env) } } impl<'a, F, Output> Parser<'a, Output> for F where F: Fn(&'a str, &'a Env) -> ParseResult<'a, Output>, { fn parse(&self, input: &'a str, env: &'a Env) -> ParseResult<'a, Output> { self(input, env) } } pub fn identifier<'a>(input: &'a str, env: &'a Env) -> ParseResult<'a, (String, &'a Env)> { let mut matched = String::new(); let mut chars = input.chars(); match chars.next() { Some(next) if next.is_alphabetic() => matched.push(next), _ => return Err(input), } while let Some(next) = chars.next() { if next.is_alphanumeric() || next == '-' { matched.push(next); } else { break; } } let next_index = matched.len(); Ok((&input[next_index..], env, (matched, env))) } pub fn any_char<'a>(input: &'a str, env: &'a Env) -> ParseResult<'a, (char, &'a Env)> { match input.chars().next() { Some(next) => Ok((&input[next.len_utf8()..], env, (next, env))), _ => Err(input), } } pub fn match_literal<'a>(expected: &'static str) -> impl Parser<'a, ()> { move |input: &'a str, env: &'a Env| match input.get(0..expected.len()) { Some(next) if next == expected => Ok((&input[expected.len()..], env, ())), _ => Err(input), } } pub fn one_of<'a>(expected: Vec<&'static str>) -> impl Parser<'a, &'static str> { move |input: &'a str, env: &'a Env| { for &e in expected.iter() { match input.get(0..e.len()) { Some(next) if next == e => return Ok((&input[e.len()..], env, e)), _ => continue, } } Err(input) } } pub fn whitespace_char<'a>() -> impl Parser<'a, (char, &'a Env)> { pred(any_char, |c| c.0.is_whitespace()) } pub fn space1<'a>() -> impl Parser<'a, Vec<(char, &'a Env)>> { one_or_more(whitespace_char()) } pub fn space0<'a>() -> impl Parser<'a, Vec<(char, &'a Env)>> { zero_or_more(whitespace_char()) } pub fn map<'a, P, F, A, B>(parser: P, map_fn: F)
ext_input, next_env, result1)| { parser2 .parse(next_input, next_env) .map(|(last_input, last_env, result2)| { (last_input, last_env, (result1, result2)) }) }) } } pub fn left<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R1> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { map(pair(parser1, parser2), |(left, _right)| left) } pub fn right<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R2> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { map(pair(parser1, parser2), |(_left, right)| right) } pub fn zero_or_one<'a, P, A>(parser: P) -> impl Parser<'a, Option<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = None; if let Ok((next_input, next_env, item)) = parser.parse(input, env) { env = next_env; input = next_input; result = Some(item); } Ok((input, env, result)) } } pub fn one_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = Vec::new(); if let Ok((next_input, new_env, first_item)) = parser.parse(input, env) { env = new_env; input = next_input; result.push(first_item); } else { return Err(input); } while let Ok((next_input, new_env, first_item)) = parser.parse(input, env) { env = new_env; input = next_input; result.push(first_item); } Ok((input, env, result)) } } pub fn zero_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>> where P: Parser<'a, A>, { move |mut input, mut env| { let mut result = Vec::new(); while let Ok((next_input, next_env, first_item)) = parser.parse(input, env) { env = next_env; input = next_input; result.push(first_item); } Ok((input, env, result)) } } pub fn pred<'a, P, A, F>(parser: P, predicate: F) -> impl Parser<'a, A> where P: Parser<'a, A>, F: Fn(&A) -> bool, { move |input, env| { if let Ok((next_input, next_env, value)) = parser.parse(input, env) { if predicate(&value) { return Ok((next_input, next_env, value)); } } Err(input) } } pub fn either<'a, P1, P2, A>(parser1: P1, parser2: P2) -> impl Parser<'a, A> where P1: Parser<'a, A>, P2: Parser<'a, A>, { move |input, env| match parser1.parse(input, env) { ok @ Ok(_) => ok, Err(_) => parser2.parse(input, env), } } pub fn and_then<'a, P, F, A, B, NextP>(parser: P, f: F) -> impl Parser<'a, B> where P: Parser<'a, A>, NextP: Parser<'a, B>, F: Fn(A) -> NextP, { move |input, env| match parser.parse(input, env) { Ok((next_input, next_env, result)) => f(result).parse(next_input, next_env), Err(err) => Err(err), } } pub fn whitespace_wrap<'a, P, A>(parser: P) -> impl Parser<'a, A> where P: Parser<'a, A>, { right(space0(), left(parser, space0())) }
-> impl Parser<'a, B> where P: Parser<'a, A>, F: Fn(A) -> B, { move |input, env| { parser .parse(input, env) .map(|(next_input, next_env, result)| (next_input, next_env, map_fn(result))) } } pub fn pair<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, (R1, R2)> where P1: Parser<'a, R1>, P2: Parser<'a, R2>, { move |input, env| { parser1 .parse(input, env) .and_then(|(n
random
[ { "content": "pub fn expr<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n whitespace_wrap(term()).and_then(|(one, env)| {\n\n zero_or_more(whitespace_wrap(pair(\n\n whitespace_wrap(any_char.pred(|(c, _e)| *c == '+' || *c == '-')),\n\n term(),\n\n )))\n\n .map(move |mut terms| {\n\n if terms.len() == 0 {\n\n (one.clone(), env)\n\n } else {\n\n let env = terms.last().unwrap().1 .1;\n\n let mut res = one.clone();\n\n // わざわざReverseしなくていいよね...\n\n terms.reverse();\n\n while let Some(((c, _e1), (t, _e2))) = terms.pop() {\n\n match c {\n\n '+' => {\n\n res = Expr::new_binop(Bop::Add, res, t, env);\n\n }\n\n '-' => {\n", "file_path": "src/parse.rs", "rank": 15, "score": 143545.51209731537 }, { "content": "fn primary<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n either(unsigned_number(), either(variable(), parenthesized_expr()))\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 16, "score": 117686.31862313279 }, { "content": "fn unary<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n zero_or_more(whitespace_wrap(\n\n any_char.pred(|(c, _e)| *c == '+' || *c == '-'),\n\n ))\n\n .and_then(|vec_c_r| {\n\n either(func(), primary()).map(move |(mut res, env)| {\n\n if vec_c_r.iter().filter(|(c, _e)| *c == '-').count() % 2 != 0 {\n\n res = Expr::new_unop(Uop::Neg, res, env);\n\n return (res, env);\n\n } else {\n\n return (res, env);\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 17, "score": 117686.31862313279 }, { "content": "fn func<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n pair(\n\n one_of(vec![\"sin\", \"cos\", \"tan\", \"log\", \"exp\"]),\n\n parenthesized_expr(),\n\n )\n\n .map(|(name, (exp, env))| {\n\n let op;\n\n match name {\n\n \"sin\" => {\n\n op = Uop::Sin;\n\n }\n\n \"cos\" => {\n\n op = Uop::Cos;\n\n }\n\n \"tan\" => {\n\n op = Uop::Tan;\n\n }\n\n \"log\" => {\n\n op = Uop::Log;\n\n }\n\n \"exp\" => {\n\n op = Uop::Exp;\n\n }\n\n _ => unimplemented!(),\n\n }\n\n (Expr::new_unop(op, exp, env), env)\n\n })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 18, "score": 117686.31862313279 }, { "content": "fn term<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n factor().and_then(|(one, env)| {\n\n zero_or_more(whitespace_wrap(pair(\n\n whitespace_wrap(any_char.pred(|(c, _e)| *c == '*' || *c == '/')),\n\n factor(),\n\n )))\n\n .map(move |mut factors| {\n\n if factors.len() == 0 {\n\n (one.clone(), env)\n\n } else {\n\n let env = factors.last().unwrap().1 .1;\n\n let mut res = one.clone();\n\n factors.reverse();\n\n while let Some(((c, _e1), (f, _e2))) = factors.pop() {\n\n match c {\n\n '*' => {\n\n res = Expr::new_binop(Bop::Mul, res, f, env);\n\n }\n\n '/' => {\n\n res = Expr::new_binop(Bop::Div, res, f, env);\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n (res, env)\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 19, "score": 117686.31862313279 }, { "content": "fn factor<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n unary().and_then(|(one, env)| {\n\n zero_or_more(right(whitespace_wrap(match_literal(\"^\")), unary())).map(move |mut unaries| {\n\n if unaries.len() == 0 {\n\n (one.clone(), env)\n\n } else {\n\n let env = unaries.last().unwrap().1;\n\n let mut res = unaries.pop().unwrap().0;\n\n while let Some((una, _env)) = unaries.pop() {\n\n res = Expr::new_binop(Bop::Pow, una, res, env);\n\n }\n\n res = Expr::new_binop(Bop::Pow, one.clone(), res, env);\n\n (res, env)\n\n }\n\n })\n\n })\n\n}\n", "file_path": "src/parse.rs", "rank": 20, "score": 117686.31862313279 }, { "content": "fn variable<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n identifier.map(|(s, env)| (Expr::new_var(s, env), env))\n\n}\n", "file_path": "src/parse.rs", "rank": 21, "score": 117686.31862313279 }, { "content": "fn parenthesized_expr<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n right(\n\n match_literal(\"(\"),\n\n left(whitespace_wrap(expr()), match_literal(\")\")),\n\n )\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 22, "score": 114373.06640076131 }, { "content": "fn unsigned_number<'a>() -> impl Parser<'a, (Rc<Expr>, &'a Env)> {\n\n one_or_more(any_char.pred(|c| c.0.is_numeric())).map(|chars| {\n\n let env = chars.last().expect(\"\").1;\n\n (\n\n Expr::new_num(\n\n chars\n\n .iter()\n\n .fold(0, |s, c| s * 10 + c.0.to_digit(10).expect(\"\") as i64),\n\n env,\n\n ),\n\n env,\n\n )\n\n })\n\n}\n", "file_path": "src/parse.rs", "rank": 23, "score": 114373.0664007613 }, { "content": "pub fn variables<'a>() -> impl Parser<'a, Vec<Rc<Expr>>> {\n\n one_or_more(whitespace_wrap(variable())).map(|v| v.into_iter().map(|(v, _e)| v).collect())\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 24, "score": 112697.9103945894 }, { "content": "fn read_line() -> String {\n\n use std::io::Read;\n\n let stdin = std::io::stdin();\n\n let stdin = stdin.lock();\n\n let token: String = stdin\n\n .bytes()\n\n .map(|c| c.expect(\"failed to read char\") as char)\n\n .take_while(|c| !c.is_whitespace())\n\n .collect();\n\n token.parse().ok().expect(\"failed to parse token\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 74282.83080462612 }, { "content": "fn read<T: std::str::FromStr>() -> T {\n\n use std::io::Read;\n\n let stdin = std::io::stdin();\n\n let stdin = stdin.lock();\n\n let token: String = stdin\n\n .bytes()\n\n .map(|c| c.expect(\"failed to read char\") as char)\n\n .skip_while(|c| c.is_whitespace())\n\n .take_while(|c| !c.is_whitespace())\n\n .collect();\n\n token.parse().ok().expect(\"failed to parse token\")\n\n}\n", "file_path": "src/lib.rs", "rank": 30, "score": 70041.74251793548 }, { "content": "#[test]\n\nfn variable_parser() {\n\n let e = &Environment::new();\n\n let xs = String::from(\"x1\");\n\n assert_eq!(\n\n Ok((\"\", e, (Expr::new_var(xs.clone(), e), e))),\n\n variable().parse(\"x1\", e)\n\n );\n\n assert_eq!(\n\n Ok((\"\", e, (Expr::new_var(xs, e), e))),\n\n variable().parse(\"x1\", e)\n\n );\n\n println!(\"{:?}\", e);\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 31, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn unary_parser() {\n\n let e = &Environment::new();\n\n let x = String::from(\"x\");\n\n let exptcted_expr = Expr::new_unop(Uop::Log, Expr::new_var(x, e), e);\n\n assert_eq!(\n\n Ok((\"\", e, (exptcted_expr, e))),\n\n unary().parse(\" - + - + log(x)\", e)\n\n );\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 32, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn expr_parser() {\n\n let e = &Environment::new();\n\n let x = String::from(\"x\");\n\n let y = String::from(\"y\");\n\n let num = Expr::new_binop(\n\n Bop::Add,\n\n Expr::new_num(2, e),\n\n Expr::new_unop(Uop::Log, Expr::new_var(x.clone(), e), e),\n\n e,\n\n );\n\n let deno = Expr::new_unop(Uop::Tan, Expr::new_var(x.clone(), e), e);\n\n let expr1 = Expr::new_binop(\n\n Bop::Pow,\n\n Expr::new_binop(Bop::Div, num, deno, e),\n\n Expr::new_var(y.clone(), e),\n\n e,\n\n );\n\n let exptcted_expr = Expr::new_binop(\n\n Bop::Add,\n\n expr1,\n", "file_path": "src/parse.rs", "rank": 33, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn number_parser() {\n\n let e = &Environment::new();\n\n assert_eq!(\n\n Ok((\"\", e, (Expr::new_num(64, e), e))),\n\n unsigned_number().parse(\"64\", e)\n\n );\n\n assert_eq!(\n\n Ok((\"\", e, (Expr::new_num(12333, e), e))),\n\n unsigned_number().parse(\"12333\", e)\n\n );\n\n assert_eq!(Err(\"\"), unsigned_number().parse(\"\", e));\n\n assert_eq!(Err(\"-123\"), unsigned_number().parse(\"-123\", e));\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 34, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn factor_parser() {\n\n let e = &Environment::new();\n\n let expected_factor1 = Expr::new_binop(\n\n Bop::Pow,\n\n Expr::new_var(String::from(\"x1\"), e),\n\n Expr::new_binop(Bop::Pow, Expr::new_num(3, e), Expr::new_num(2, e), e),\n\n e,\n\n );\n\n assert_eq!(\n\n Ok((\"\", e, (expected_factor1, e))),\n\n factor().parse(\"x1 ^ 3 ^ 2\", e)\n\n );\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 35, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn term_parser() {\n\n let e = &Environment::new();\n\n let x1 = String::from(\"x1\");\n\n let y1 = String::from(\"y1\");\n\n let expected_term = Expr::new_binop(\n\n Bop::Mul,\n\n Expr::new_binop(\n\n Bop::Mul,\n\n Expr::new_binop(\n\n Bop::Pow,\n\n Expr::new_var(x1.clone(), e),\n\n Expr::new_num(3, e),\n\n e,\n\n ),\n\n Expr::new_binop(Bop::Pow, Expr::new_var(y1, e), Expr::new_num(2, e), e),\n\n e,\n\n ),\n\n Expr::new_binop(Bop::Pow, Expr::new_var(x1, e), Expr::new_num(4, e), e),\n\n e,\n\n );\n\n assert_eq!(\n\n Ok((\"\", e, (expected_term, e))),\n\n term().parse(\"x1 ^ 3 * y1 ^ 2 * x1 ^ 4\", e)\n\n );\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 36, "score": 52328.000329956354 }, { "content": "#[test]\n\nfn create_diff_graph() {\n\n let e = &Environment::new();\n\n let res = expr().parse(\n\n \"sin(sin(sin(x)+cos(x))+cos(sin(x)+cos(x)))+cos(sin(sin(x)+cos(x))+cos(sin(x)+cos(x)))\n\n \",\n\n e,\n\n );\n\n match res {\n\n Ok((_, _, (expr, env))) => {\n\n expr.diff(\"x\", env).reduce(env).print(env);\n\n let mut d = Deriv::new(expr, env, \"x\");\n\n for (i, l) in d.graph.iter().enumerate() {\n\n for e in l {\n\n println!(\"{} -> {}\", i, e.to);\n\n e.exp.print(env);\n\n }\n\n }\n\n let doms = d.dom_rel();\n\n let pdoms = d.pdom_rel();\n\n let factor_subgraphs = d.factor_subgraphs(&doms, &pdoms);\n", "file_path": "src/diff.rs", "rank": 37, "score": 28695.54481093359 }, { "content": " pub exprs: HashMap<Expr, Rc<Expr>>,\n\n}\n\n\n\npub type Env = RefCell<Environment>;\n\n\n\nimpl Environment {\n\n pub fn new() -> Env {\n\n RefCell::new(Environment {\n\n vars: HashMap::new(),\n\n rev_vars: HashMap::new(),\n\n exprs: HashMap::new(),\n\n })\n\n }\n\n\n\n pub fn extend_var(&mut self, var_str: String) -> Var {\n\n match self.search_var(&var_str) {\n\n Some(v) => v,\n\n None => {\n\n let v = Var::new(self.vars.len());\n\n self.vars.insert(v, var_str.clone());\n", "file_path": "src/expr.rs", "rank": 41, "score": 13.06527217117701 }, { "content": " pub graph: Vec<Vec<Edge>>,\n\n pub reverse_graph: Vec<Vec<Edge>>,\n\n}\n\n\n\nimpl Deriv {\n\n pub fn new(expr: Rc<Expr>, e: &Env, v: &str) -> Self {\n\n let mut m = HashMap::new();\n\n let expr = expr.reduce(e);\n\n expr.post_index(&mut 0, &mut m);\n\n let size = m.len();\n\n let (mut graph, mut reverse_graph) = (vec![vec![]; size], vec![vec![]; size]);\n\n let mut leafs = HashMap::new();\n\n let mut memo = HashSet::new();\n\n Deriv::construct(\n\n &(*expr),\n\n e,\n\n v,\n\n &m,\n\n &mut graph,\n\n &mut reverse_graph,\n", "file_path": "src/diff.rs", "rank": 42, "score": 12.80728538115594 }, { "content": "\n\n pub fn diff(&self, v: &str, e: &Env) -> Rc<Expr> {\n\n let var = e.borrow().search_var(&String::from(v));\n\n match var {\n\n Some(v) => self.diff_internal(v, e),\n\n None => {\n\n // unreachable!();\n\n Rc::new(Expr::Num(C::new(0, 1)))\n\n }\n\n }\n\n }\n\n // 合成関数の微分の一段目\n\n // 一旦Vecで可変長にする\n\n pub fn diff_comp(&self, v: &str, e: &Env) -> Vec<Rc<Expr>> {\n\n let v = match e.borrow().search_var(&String::from(v)) {\n\n Some(var) => var,\n\n None => return vec![Expr::new_num(0, e)],\n\n };\n\n\n\n match self {\n", "file_path": "src/expr.rs", "rank": 43, "score": 12.791861298902568 }, { "content": "use super::expr::{Bop, Env, Environment, Expr, Uop, Var, C};\n\npub use super::parser_combinator::*;\n\nuse std::rc::Rc;\n\n\n", "file_path": "src/parse.rs", "rank": 44, "score": 12.257640659849232 }, { "content": "use super::parse::*;\n\npub use num_rational::Rational64;\n\npub use num_traits::identities::{One, Zero};\n\npub use std::cell::RefCell;\n\npub use std::collections::HashMap;\n\npub use std::rc::Rc;\n\npub type C = Rational64;\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Var {\n\n pub id: usize,\n\n}\n\n\n\nimpl Var {\n\n pub fn new(id: usize) -> Self {\n\n Var { id }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n", "file_path": "src/expr.rs", "rank": 45, "score": 11.188148524974897 }, { "content": " self.rev_vars.insert(var_str, v);\n\n v\n\n }\n\n }\n\n }\n\n\n\n pub fn search_var(&self, var_str: &String) -> Option<Var> {\n\n match self.rev_vars.get(var_str) {\n\n Some(v) => Some(*v),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn extend_expr(&mut self, e: Expr) -> Rc<Expr> {\n\n match self.search_expr(&e) {\n\n Some(ptr_e) => ptr_e,\n\n None => {\n\n let ptr_e = Rc::new(e.clone());\n\n self.exprs.insert(e, ptr_e.clone());\n\n ptr_e\n", "file_path": "src/expr.rs", "rank": 46, "score": 10.566071972010157 }, { "content": "mod diff;\n\nmod expr;\n\nmod parse;\n\nmod parser_combinator;\n\n\n\nuse chrono::Duration;\n\nuse diff::*;\n\nuse expr::*;\n\nuse parse::*;\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn large_example_chain() {\n\n let e = &Environment::new();\n\n let size = 15;\n\n let sec_max = 5;\n\n let v = match variables().parse(&\"x\", e) {\n\n Ok((_, _, mut vars)) => {\n", "file_path": "src/lib.rs", "rank": 47, "score": 9.893431376790343 }, { "content": " }\n\n\n\n pub fn reduce(&mut self, env: &Env) {\n\n let doms = self.dom_rel();\n\n let pdoms = self.pdom_rel();\n\n let factor_subgraphs = self.factor_subgraphs(&doms, &pdoms);\n\n let mut cnt = 0;\n\n for fsub in factor_subgraphs {\n\n self.shrink(fsub, &doms, &pdoms, env);\n\n }\n\n }\n\n // diff by v をvalsで評価(forward)\n\n // vはVarなの??型ごちゃごちゃすぎん\n\n pub fn forward_eval(&self, v: Var, vars: &str, vals: &Vec<f64>, env: &Env) -> f64 {\n\n let mut varvec: Vec<Var>;\n\n match variables().parse(vars, env) {\n\n Ok((_, _, vars)) => {\n\n varvec = vars\n\n .iter()\n\n .map(|v| match **v {\n", "file_path": "src/diff.rs", "rank": 48, "score": 9.617532084023615 }, { "content": " res\n\n }\n\n\n\n pub fn forward_eval_dp(&self, v: Var, vars: &str, vals: &Vec<f64>, env: &Env) -> f64 {\n\n let mut varvec: Vec<Var>;\n\n match variables().parse(vars, env) {\n\n Ok((_, _, vars)) => {\n\n varvec = vars\n\n .iter()\n\n .map(|v| match **v {\n\n Expr::Var(vv) => vv,\n\n _ => unreachable!(),\n\n })\n\n .collect();\n\n }\n\n Err(_) => panic!(\"failed to parse variables\"),\n\n }\n\n varvec.sort();\n\n let mut memo = HashMap::new();\n\n self.forward_eval_dp_internal(self.vars[&v], &varvec, vals, &mut memo)\n", "file_path": "src/diff.rs", "rank": 49, "score": 9.594491697350342 }, { "content": "\n\n fn is_minus_one(&self) -> bool {\n\n match self {\n\n Expr::Num(n) => *n == -C::one(),\n\n _ => false,\n\n }\n\n }\n\n // post-orderでIndexを振る\n\n pub fn post_index(&self, i: &mut usize, postids: &mut HashMap<Expr, usize>) {\n\n match self {\n\n Expr::UnOp { exp, .. } => {\n\n exp.post_index(i, postids);\n\n match postids.get(self) {\n\n Some(_v) => return,\n\n None => {\n\n postids.insert(self.clone(), *i);\n\n *i += 1;\n\n }\n\n }\n\n }\n", "file_path": "src/expr.rs", "rank": 50, "score": 9.492975267448141 }, { "content": " }\n\n }\n\n\n\n pub fn backward_grad(&self, vars: &str, vals: &Vec<f64>, env: &Env) -> Vec<f64> {\n\n let mut varvec: Vec<Var>;\n\n match variables().parse(vars, env) {\n\n Ok((_, _, vars)) => {\n\n varvec = vars\n\n .iter()\n\n .map(|v| match **v {\n\n Expr::Var(vv) => vv,\n\n _ => unreachable!(),\n\n })\n\n .collect();\n\n }\n\n Err(_) => panic!(\"failed to parse variables\"),\n\n }\n\n varvec.sort();\n\n self.backward_grad_internal(&varvec, vals)\n\n }\n", "file_path": "src/diff.rs", "rank": 51, "score": 9.434637766593433 }, { "content": " Expr::Var(vt) => {\n\n print!(\"{}\", e.borrow().vars[vt]);\n\n }\n\n Expr::Num(n) => {\n\n print!(\"{}\", n);\n\n }\n\n }\n\n }\n\n\n\n pub fn eval(&self, vars: &str, vals: &Vec<f64>, e: &Env) -> f64 {\n\n let mut varvec: Vec<Var>;\n\n match variables().parse(vars, e) {\n\n Ok((_, _, vars)) => {\n\n varvec = vars\n\n .iter()\n\n .map(|v| match **v {\n\n Expr::Var(vv) => vv,\n\n _ => unreachable!(),\n\n })\n\n .collect();\n", "file_path": "src/expr.rs", "rank": 52, "score": 9.076932569814321 }, { "content": "pub enum Expr {\n\n Var(Var),\n\n Num(C),\n\n UnOp {\n\n op: Uop,\n\n exp: Rc<Expr>,\n\n },\n\n BinOp {\n\n op: Bop,\n\n exp1: Rc<Expr>,\n\n exp2: Rc<Expr>,\n\n },\n\n}\n\n\n\nimpl Expr {\n\n pub fn new_unop(op: Uop, one: Rc<Expr>, env: &Env) -> Rc<Expr> {\n\n let e = Expr::UnOp { op, exp: one };\n\n env.borrow_mut().extend_expr(e)\n\n }\n\n\n", "file_path": "src/expr.rs", "rank": 53, "score": 9.026464836431671 }, { "content": " fn is_const(&self) -> bool {\n\n match self {\n\n Expr::Num(_n) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n fn is_zero(&self) -> bool {\n\n match self {\n\n Expr::Num(n) => n.is_zero(),\n\n _ => false,\n\n }\n\n }\n\n\n\n fn is_one(&self) -> bool {\n\n match self {\n\n Expr::Num(n) => *n == C::one(),\n\n _ => false,\n\n }\n\n }\n", "file_path": "src/expr.rs", "rank": 54, "score": 8.626925147139948 }, { "content": " pub fn new_binop(op: Bop, one: Rc<Expr>, other: Rc<Expr>, env: &Env) -> Rc<Expr> {\n\n let exp1;\n\n let exp2;\n\n if one < other {\n\n exp1 = one;\n\n exp2 = other;\n\n } else {\n\n exp1 = other;\n\n exp2 = one;\n\n }\n\n let e = Expr::BinOp { op, exp1, exp2 };\n\n env.borrow_mut().extend_expr(e)\n\n }\n\n\n\n pub fn new_num(n: i64, env: &Env) -> Rc<Expr> {\n\n let e = Expr::Num(C::new(n, 1));\n\n let p = env.borrow_mut().extend_expr(e);\n\n p\n\n }\n\n\n", "file_path": "src/expr.rs", "rank": 55, "score": 8.554849729626012 }, { "content": " pub fn pi(env: &Env) -> Rc<Expr> {\n\n let pnum = Expr::new_num(1833616417, env);\n\n let pden = Expr::new_num(583658233, env);\n\n Expr::new_binop(Bop::Div, pnum, pden, env).reduce(env)\n\n }\n\n pub fn sqrt(expr: Rc<Expr>, env: &Env) -> Rc<Expr> {\n\n let pnum = Expr::new_num(1, env);\n\n let pden = Expr::new_num(2, env);\n\n let half = Expr::new_binop(Bop::Div, pnum, pden, env).reduce(env);\n\n Expr::new_binop(Bop::Pow, expr, half, env)\n\n }\n\n\n\n pub fn new_var(s: String, env: &Env) -> Rc<Expr> {\n\n let v = env.borrow_mut().extend_var(s);\n\n let e = Expr::Var(v);\n\n env.borrow_mut().extend_expr(e)\n\n }\n\n\n\n fn new_num_from_op(op: Bop, left: Rc<Expr>, right: Rc<Expr>, env: &Env) -> Rc<Expr> {\n\n match *left {\n", "file_path": "src/expr.rs", "rank": 56, "score": 8.476846569496056 }, { "content": " // }\n\n // Err(_) => panic!(\"\"),\n\n // };\n\n // let (mut deriv_tree_xs, mut deriv_tree_ys, mut deriv_tree_zs) = (vec![], vec![], vec![]);\n\n // for var in [\"x\", \"y\", \"z\"].iter().map(|s| s.to_owned()) {\n\n // for t in &target_exprs {\n\n // if var == String::from(\"x\") {\n\n // deriv_tree_xs.push(t.diff(&var, env).reduce(env));\n\n // } else if var == String::from(\"y\") {\n\n // deriv_tree_ys.push(t.diff(&var, env).reduce(env));\n\n // } else {\n\n // deriv_tree_zs.push(t.diff(&var, env).reduce(env));\n\n // }\n\n // }\n\n // }\n\n // let var: String = String::from(\"hoge\");\n\n // let mut derivative_graphs = vec![];\n\n // let mut derivative_graphs_for_dp = vec![];\n\n // for t in target_exprs {\n\n // let d = Deriv::new(t, env, &var);\n", "file_path": "src/lib.rs", "rank": 57, "score": 8.285433294770284 }, { "content": "\n\n pub fn print(&self, e: &Env) {\n\n self.print_internal(e);\n\n println!(\"\");\n\n }\n\n\n\n fn print_func(&self, name: &str, e: &Env) {\n\n print!(\"{}(\", name);\n\n self.print_internal(e);\n\n print!(\")\");\n\n }\n\n\n\n fn print_internal(&self, e: &Env) {\n\n match self {\n\n Expr::UnOp { op, exp: inexp } => match op {\n\n Uop::Sin => {\n\n inexp.print_func(\"sin\", e);\n\n }\n\n Uop::Cos => {\n\n inexp.print_func(\"cos\", e);\n", "file_path": "src/expr.rs", "rank": 58, "score": 8.174171214575694 }, { "content": " pdoms: &Vec<HashSet<usize>>,\n\n env: &Env,\n\n ) {\n\n let (start, goal) = (std::cmp::max(fsub.0, fsub.1), std::cmp::min(fsub.0, fsub.1));\n\n if self.graph[start].len() < 2 || self.reverse_graph[goal].len() < 2 {\n\n return;\n\n }\n\n let mut stack = vec![(start, vec![])];\n\n let mut paths = vec![];\n\n while 0 < stack.len() {\n\n let (cur, path) = stack.pop().unwrap();\n\n if cur == goal {\n\n paths.push(path);\n\n continue;\n\n } else if self.leafs.contains_key(&cur) {\n\n continue;\n\n } else {\n\n for Edge { to: next, .. } in &self.graph[cur] {\n\n let mut p = path.clone();\n\n p.push(*next);\n", "file_path": "src/diff.rs", "rank": 59, "score": 8.145970012010034 }, { "content": " stack.push((*next, p));\n\n }\n\n }\n\n }\n\n // domなら 0 > 1, pdomなら 0 < 1\n\n let mut res = Expr::new_num(0, env);\n\n let mut edges_will_be_removed: HashSet<(usize, usize)> = HashSet::new();\n\n use super::expr::Bop;\n\n for path in paths {\n\n let mut cur = start;\n\n let mut temp_expr = Expr::new_num(1, env);\n\n for next in path {\n\n // edgeをみつける\n\n for Edge { to: v, exp } in &self.graph[cur] {\n\n if *v == next {\n\n temp_expr = Expr::new_binop(Bop::Mul, temp_expr, exp.clone(), env);\n\n // v < cur\n\n // fsub.0 is dominator\n\n if fsub.1 < fsub.0 {\n\n if pdoms[*v].contains(&fsub.1) {\n", "file_path": "src/diff.rs", "rank": 60, "score": 7.805445169119928 }, { "content": "use super::expr::{Env, Environment, Expr, Var};\n\nuse super::parse::*;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Edge {\n\n pub to: usize,\n\n pub exp: Rc<Expr>,\n\n}\n\n/*\n\nlet graph: Vec<Vec<Edge>> = vec![vec![]; nodesize];\n\nlet reverse_graph: Vec<Vec<Edge>> = vec![vec![]; nodesize];\n\n*/\n\n#[derive(Debug, Clone)]\n\npub struct Deriv {\n\n size: usize,\n\n pub root: usize,\n\n leafs: HashMap<usize, Option<Var>>,\n\n pub vars: HashMap<Var, usize>,\n", "file_path": "src/diff.rs", "rank": 61, "score": 7.687181995692395 }, { "content": " }\n\n }\n\n }\n\n\n\n fn search_expr(&self, e: &Expr) -> Option<Rc<Expr>> {\n\n match self.exprs.get(&e) {\n\n Some(ptr) => Some(ptr.clone()),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn remove_expr(&mut self, e: &Expr) {\n\n match self.exprs.remove(e) {\n\n Some(_) => (),\n\n None => unreachable!(),\n\n }\n\n }\n\n\n\n pub fn clean(&mut self) {\n\n let mut remove_list = std::collections::HashSet::new();\n", "file_path": "src/expr.rs", "rank": 62, "score": 7.427917041816534 }, { "content": " fn backward_grad_internal(&self, vars: &Vec<Var>, vals: &Vec<f64>) -> Vec<f64> {\n\n let mut res = vec![std::f64::NAN];\n\n let mut stack = vec![(self.root, 1.)];\n\n let mut paths: Vec<f64> = vec![];\n\n while 0 < stack.len() {\n\n let (cur, path) = stack.pop().unwrap();\n\n if self.leafs.contains_key(&cur) {\n\n match self.leafs[&cur] {\n\n Some(v) => match vars.binary_search(&v) {\n\n Ok(i) => res[i] = path,\n\n Err(_) => panic!(\"no value is given\"),\n\n },\n\n _ => continue,\n\n }\n\n } else {\n\n for Edge { to: next, exp } in &self.graph[cur] {\n\n stack.push((*next, path * exp.eval_internal(vars, vals)));\n\n }\n\n }\n\n }\n", "file_path": "src/diff.rs", "rank": 63, "score": 7.348207757460735 }, { "content": " // }\n\n\n\n // #[test]\n\n // fn spherical_harmonics() {\n\n // let env = &Environment::new();\n\n // let max_l = 5;\n\n // let mut target_exprs = vec![];\n\n // for l in 0..max_l {\n\n // for m in -l..l {\n\n // target_exprs.push(y(l, m, env));\n\n // }\n\n // }\n\n // let (x, y, z) = match variables().parse(&\"x y z\", env) {\n\n // Ok((_, _, mut vars)) => {\n\n // assert!(vars.len() == 3);\n\n // (\n\n // vars.pop().unwrap(),\n\n // vars.pop().unwrap(),\n\n // vars.pop().unwrap(),\n\n // )\n", "file_path": "src/lib.rs", "rank": 64, "score": 6.968892804487043 }, { "content": " Expr::Num(n) => match *right {\n\n Expr::Num(m) => match op {\n\n Bop::Add => Rc::new(Expr::Num(n + m)),\n\n Bop::Sub => Rc::new(Expr::Num(n - m)),\n\n Bop::Mul => Rc::new(Expr::Num(n * m)),\n\n Bop::Div => Rc::new(Expr::Num(n / m)),\n\n // Powは無理(無理数)\n\n Bop::Pow => unimplemented!(),\n\n },\n\n _ => unreachable!(),\n\n },\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n fn new_num_from_rat(c: C, env: &Env) -> Rc<Expr> {\n\n let e = Expr::Num(c);\n\n env.borrow_mut().extend_expr(e)\n\n }\n\n\n", "file_path": "src/expr.rs", "rank": 65, "score": 6.943917713401267 }, { "content": " &mut leafs,\n\n &mut memo,\n\n );\n\n // ここでLeafも計算はできる.\n\n Deriv {\n\n size,\n\n root: size - 1,\n\n leafs: leafs.clone(),\n\n vars: leafs\n\n .into_iter()\n\n .filter(|(_, v)| v.is_some())\n\n .map(|(k, v)| (v.unwrap(), k))\n\n .collect(),\n\n graph,\n\n reverse_graph,\n\n }\n\n }\n\n fn construct(\n\n expr: &Expr,\n\n e: &Env,\n", "file_path": "src/diff.rs", "rank": 66, "score": 6.578512562242729 }, { "content": " while b2 < b1 {\n\n b2 = doms[b2].expect(\"dominator intersection failure\");\n\n }\n\n }\n\n b1\n\n }\n\n\n\n // 支配関係を求める.\n\n fn dom_rel(&self) -> Vec<HashSet<usize>> {\n\n let mut doms = vec![None; self.size];\n\n doms[self.root] = Some(self.root);\n\n let mut changed = true;\n\n while changed {\n\n changed = false;\n\n for u in (0..self.size - 1).rev() {\n\n let mut new_idom = std::usize::MAX;\n\n for &Edge { to: v, .. } in &self.reverse_graph[u] {\n\n if let Some(_i) = doms[v] {\n\n if new_idom == std::usize::MAX {\n\n new_idom = v;\n", "file_path": "src/diff.rs", "rank": 67, "score": 6.1153165596165735 }, { "content": " edges_will_be_removed.insert((*v, cur));\n\n }\n\n } else {\n\n if doms[cur].contains(&fsub.1) {\n\n edges_will_be_removed.insert((*v, cur));\n\n }\n\n }\n\n cur = next;\n\n break;\n\n }\n\n }\n\n }\n\n res = Expr::new_binop(Bop::Add, res, temp_expr, env);\n\n }\n\n // いらないのを消す\n\n for (to, from) in edges_will_be_removed {\n\n for i in 0..self.graph[from].len() {\n\n if self.graph[from][i].to == to {\n\n self.graph[from].remove(i);\n\n break;\n", "file_path": "src/diff.rs", "rank": 68, "score": 5.959203389083756 }, { "content": " Expr::new_unop(Uop::Sin, Expr::new_var(y.clone(), e), e),\n\n e,\n\n );\n\n assert_eq!(\n\n Ok((\"\", e, (exptcted_expr, e))),\n\n expr().parse(\"( ( 2 + log(x) ) / tan(x) ) ^ y + sin(y) \", e)\n\n );\n\n let res = expr().parse(\"1 / tan( x )\", e);\n\n\n\n match res {\n\n Ok((_, _, (expr, env))) => {\n\n let d = expr.diff(\"x\", env).reduce(env);\n\n d.print(env);\n\n env.borrow_mut().clean();\n\n println!(\"{}\", d.eval(\"x\", &vec![std::f64::consts::FRAC_PI_2], env));\n\n }\n\n Err(_) => panic!(\"\"),\n\n }\n\n}\n", "file_path": "src/parse.rs", "rank": 69, "score": 5.789633438751941 }, { "content": " };\n\n let mut target_expr = v.clone();\n\n let mut cos = Expr::new_unop(Uop::Cos, target_expr.clone(), e);\n\n let mut sin = Expr::new_unop(Uop::Cos, target_expr, e);\n\n for _ in 0..size {\n\n cos = Expr::new_unop(Uop::Cos, sin, e);\n\n sin = Expr::new_unop(Uop::Sin, cos.clone(), e);\n\n }\n\n target_expr = Expr::new_binop(Bop::Add, cos, sin, e);\n\n let var: String = String::from(\"x\");\n\n let naive_d = target_expr.diff(&var, e).reduce(e);\n\n let mut d = Deriv::new(target_expr, e, &var);\n\n let d_for_dp = d.clone();\n\n\n\n // optimization\n\n let now = time::Instant::now();\n\n d.reduce(e);\n\n println!(\"optimized in {} mill sec\", now.elapsed().as_millis());\n\n let xs = String::from(\"x\");\n\n let v = e.borrow().rev_vars[&xs];\n", "file_path": "src/lib.rs", "rank": 70, "score": 5.457598920455865 }, { "content": " Bop::Div => exp1.eval_internal(vars, vals) / exp2.eval_internal(vars, vals),\n\n Bop::Pow => exp1\n\n .eval_internal(vars, vals)\n\n .powf(exp2.eval_internal(vars, vals)),\n\n },\n\n Expr::Var(vt) => match vars.binary_search(&vt) {\n\n Ok(i) => vals[i],\n\n Err(_) => panic!(\"var {} is not specified\", vt.id),\n\n },\n\n Expr::Num(n) => *n.numer() as f64 / *n.denom() as f64,\n\n }\n\n }\n\n}\n\n\n\n// 文字列からの検索, 変数からの検索を両方早くしたいんだけど, Mapにすると重そう\n\n// かといってそうじゃなければ変数の数だけはかかる?\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Environment {\n\n pub vars: HashMap<Var, String>,\n\n pub rev_vars: HashMap<String, Var>,\n", "file_path": "src/expr.rs", "rank": 71, "score": 5.308697690417647 }, { "content": " fn pdom_rel(&self) -> Vec<HashSet<usize>> {\n\n let mut pdoms: Vec<Option<usize>> = (0..self.size + 1).map(|e| None).collect();\n\n pdoms[self.size] = Some(self.size);\n\n\n\n let mut changed = true;\n\n while changed {\n\n changed = false;\n\n for u in 0..self.size {\n\n let mut new_idom = std::usize::MAX;\n\n for &Edge { to: v, .. } in &self.graph[u] {\n\n if let Some(_i) = pdoms[v] {\n\n if new_idom == std::usize::MAX {\n\n new_idom = v;\n\n } else {\n\n new_idom = Deriv::intersect_p(v, new_idom, &pdoms);\n\n }\n\n }\n\n }\n\n if pdoms[u] != Some(new_idom) {\n\n pdoms[u] = Some(new_idom);\n", "file_path": "src/diff.rs", "rank": 72, "score": 5.11421326756799 }, { "content": " for fsub in factor_subgraphs {\n\n d.shrink(fsub, &doms, &pdoms, env);\n\n }\n\n println!(\"shrinked\");\n\n for (i, l) in d.graph.iter().enumerate() {\n\n for e in l {\n\n println!(\"{} -> {}\", i, e.to);\n\n e.exp.print(env);\n\n }\n\n }\n\n let x = String::from(\"x\");\n\n let v = env.borrow().rev_vars[&x];\n\n println!(\"{}\", d.forward_eval(v, \"x\", &vec![1.], env));\n\n }\n\n Err(_) => panic!(\"\"),\n\n }\n\n}\n", "file_path": "src/diff.rs", "rank": 73, "score": 5.011801195173518 }, { "content": " cnt = 0;\n\n let now = time::Instant::now();\n\n while now.elapsed().as_secs() < sec_max {\n\n d.forward_eval(v, &var, &vec![x], e);\n\n cnt += 1;\n\n }\n\n }\n\n\n\n #[test]\n\n fn large_example_circle() {\n\n let sec_max = 5;\n\n let e = &Environment::new();\n\n // 大きすぎるとstack overflow\n\n let size = 100;\n\n let v = match variables().parse(&\"x\", e) {\n\n Ok((_, _, mut vars)) => {\n\n assert!(vars.len() == 1);\n\n vars.pop().unwrap()\n\n }\n\n Err(_) => panic!(\"\"),\n", "file_path": "src/lib.rs", "rank": 74, "score": 4.92518210707046 }, { "content": " }\n\n }\n\n res\n\n }\n\n\n\n // 逆支配関係を求める.(pdomされてるのが入ってる)\n\n // さっきと逆\n\n fn intersect_p(mut b1: usize, mut b2: usize, pdoms: &Vec<Option<usize>>) -> usize {\n\n while b1 != b2 {\n\n while b2 < b1 || b2 == pdoms.len() - 1 {\n\n b1 = pdoms[b1].expect(\"dominator intersection failure\");\n\n }\n\n while b1 < b2 || b1 == pdoms.len() - 1 {\n\n b2 = pdoms[b2].expect(\"dominator intersection failure\");\n\n }\n\n }\n\n b1\n\n }\n\n // 逆支配関係を求める.(pdomされてるのが入ってる)\n\n // self.sizeがsuper_root\n", "file_path": "src/diff.rs", "rank": 75, "score": 4.91965604288976 }, { "content": " assert!(vars.len() == 1);\n\n vars.pop().unwrap()\n\n }\n\n Err(_) => panic!(\"\"),\n\n };\n\n let mut target_expr = v.clone();\n\n for _ in 0..size {\n\n let cos = Expr::new_unop(Uop::Cos, target_expr.clone(), e);\n\n let sin = Expr::new_unop(Uop::Sin, target_expr, e);\n\n target_expr = Expr::new_binop(Bop::Add, cos, sin, e);\n\n }\n\n // target_expr.print(e);\n\n let var: String = String::from(\"x\");\n\n let naive_d = target_expr.diff(&var, e).reduce(e);\n\n let mut d = Deriv::new(target_expr, e, &var);\n\n let d_for_dp = d.clone();\n\n // optimization\n\n let now = time::Instant::now();\n\n d.reduce(e);\n\n println!(\"optimized in {} mill sec\", now.elapsed().as_millis());\n", "file_path": "src/lib.rs", "rank": 76, "score": 4.8391545391887405 }, { "content": " // derivative_graphs.push(d.clone());\n\n // derivative_graphs_for_dp.push(d);\n\n // }\n\n // let values = vec![1., 1., 1.];\n\n // let var = String::from(\"x y z\");\n\n // use std::time;\n\n // // let now = time::Instant::now();\n\n // // while now.elapsed().as_secs() < 2 {\n\n // // for derives in [deriv_tree_xs, deriv_tree_ys, deriv_tree_zs].iter() {\n\n // // for d in derives {\n\n // // d.eval(&var, &values, env);\n\n // // }\n\n // // }\n\n // // }\n\n // let now = time::Instant::now();\n\n // while now.elapsed().as_secs() < 2 {\n\n // for d in &derivative_graphs_for_dp {\n\n // d.backward_grad(&var, &values, env);\n\n // }\n\n // }\n\n // let now = time::Instant::now();\n\n // while now.elapsed().as_secs() < 2 {\n\n // for d in &derivative_graphs {\n\n // d.backward_grad(&var, &values, env);\n\n // }\n\n // }\n\n // }\n\n}\n", "file_path": "src/lib.rs", "rank": 77, "score": 4.821972775710616 }, { "content": " changed = true;\n\n }\n\n }\n\n }\n\n // TODO: leafs is deprecated\n\n for (&i, &_v) in &self.leafs {\n\n pdoms[i] = Some(i);\n\n }\n\n for i in 0..pdoms.len() - 1 {\n\n assert!(pdoms[i] != Some(self.size));\n\n }\n\n let pdomtree = pdoms;\n\n let mut res: Vec<HashSet<usize>> = vec![HashSet::new(); self.size];\n\n for i in 0..self.size {\n\n res[i].insert(i);\n\n let mut cur = i;\n\n while cur != pdomtree[cur].unwrap() {\n\n let pdom = pdomtree[cur].unwrap();\n\n res[i].insert(pdom);\n\n cur = pdom;\n", "file_path": "src/diff.rs", "rank": 78, "score": 4.815287915030174 }, { "content": " // let c = Expr::new_binop(\n\n // Bop::Div,\n\n // Expr::new_num(2 * l + 1, env),\n\n // Expr::new_binop(Bop::Mul, Expr::new_num(4, env), p, env),\n\n // env,\n\n // );\n\n // Expr::sqrt(c, env)\n\n // } else {\n\n // let c = Expr::new_binop(\n\n // Bop::Div,\n\n // Expr::new_num(2 * l + 1, env),\n\n // Expr::new_binop(Bop::Mul, Expr::new_num(2, env), p, env),\n\n // env,\n\n // );\n\n // let c1 = Expr::new_num(fact(l - m) / fact(l + m), env);\n\n // Expr::sqrt(c1, env)\n\n // }\n\n // }\n\n\n\n // fn y(l: i64, mut m: i64, env: &Env) -> Rc<Expr> {\n", "file_path": "src/lib.rs", "rank": 79, "score": 4.598489242273209 }, { "content": " v: &str,\n\n postids: &HashMap<Expr, usize>,\n\n graph: &mut Vec<Vec<Edge>>,\n\n reverse_graph: &mut Vec<Vec<Edge>>,\n\n leafs: &mut HashMap<usize, Option<Var>>,\n\n memo: &mut HashSet<(usize, usize)>,\n\n ) {\n\n // 子のIndexをふる\n\n // 辺を追加する\n\n match expr {\n\n Expr::UnOp { exp, .. } => {\n\n let parent_id = postids[&expr];\n\n let child_id = postids[&(*exp)];\n\n if memo.contains(&(parent_id, child_id)) {\n\n return;\n\n } else {\n\n memo.insert((parent_id, child_id));\n\n }\n\n Deriv::construct(&(*exp), e, v, postids, graph, reverse_graph, leafs, memo);\n\n // diffじゃだめで, 一段だけやらなきゃ\n", "file_path": "src/diff.rs", "rank": 80, "score": 4.495603825039929 }, { "content": " } else {\n\n new_idom = Deriv::intersect(v, new_idom, &doms);\n\n }\n\n }\n\n }\n\n if doms[u] != Some(new_idom) {\n\n doms[u] = Some(new_idom);\n\n changed = true;\n\n }\n\n }\n\n }\n\n let domtree = doms;\n\n let mut res: Vec<HashSet<usize>> = vec![HashSet::new(); self.size];\n\n for i in 0..self.size {\n\n res[i].insert(i);\n\n let mut cur = i;\n\n while cur != domtree[cur].unwrap() {\n\n let dom = domtree[cur].unwrap();\n\n res[i].insert(dom);\n\n cur = dom;\n", "file_path": "src/diff.rs", "rank": 81, "score": 4.385635653679623 }, { "content": " }\n\n fn forward_eval_dp_internal(\n\n &self,\n\n cur: usize,\n\n vars: &Vec<Var>,\n\n vals: &Vec<f64>,\n\n memo: &mut HashMap<usize, f64>,\n\n ) -> f64 {\n\n if let Some(v) = memo.get(&cur) {\n\n *v\n\n } else if cur == self.root {\n\n 1.\n\n } else {\n\n let mut res = 0.;\n\n for Edge { to: next, exp } in &self.reverse_graph[cur] {\n\n let temp = exp.eval_internal(vars, vals);\n\n res += self.forward_eval_dp_internal(*next, vars, vals, memo) * temp;\n\n }\n\n memo.insert(cur, res);\n\n res\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/diff.rs", "rank": 82, "score": 4.138405566710087 }, { "content": " }\n\n Err(_) => panic!(\"failed to parse variables\"),\n\n }\n\n varvec.sort();\n\n self.eval_internal(&varvec, vals)\n\n }\n\n pub fn eval_internal(&self, vars: &Vec<Var>, vals: &Vec<f64>) -> f64 {\n\n match self {\n\n Expr::UnOp { op, exp } => match op {\n\n Uop::Sin => exp.eval_internal(vars, vals).sin(),\n\n Uop::Cos => exp.eval_internal(vars, vals).cos(),\n\n Uop::Tan => exp.eval_internal(vars, vals).tan(),\n\n Uop::Log => exp.eval_internal(vars, vals).log(std::f64::consts::E),\n\n Uop::Exp => std::f64::consts::E.powf(exp.eval_internal(vars, vals)),\n\n Uop::Neg => -exp.eval_internal(vars, vals),\n\n },\n\n Expr::BinOp { op, exp1, exp2 } => match op {\n\n Bop::Add => exp1.eval_internal(vars, vals) + exp2.eval_internal(vars, vals),\n\n Bop::Sub => exp1.eval_internal(vars, vals) - exp2.eval_internal(vars, vals),\n\n Bop::Mul => exp1.eval_internal(vars, vals) * exp2.eval_internal(vars, vals),\n", "file_path": "src/expr.rs", "rank": 83, "score": 4.066007024087751 }, { "content": " let xs = String::from(\"x\");\n\n let v = e.borrow().rev_vars[&xs];\n\n use std::time;\n\n let now = time::Instant::now();\n\n let mut cnt = 0;\n\n let x = 1.;\n\n println!(\"differentiation w.r.t single variable in {} sec\", sec_max);\n\n while now.elapsed().as_secs() < sec_max {\n\n naive_d.eval(&var, &vec![x], e);\n\n cnt += 1;\n\n }\n\n println!(\"naive expression tree walk: {} times\", cnt);\n\n\n\n cnt = 0;\n\n let now = time::Instant::now();\n\n while now.elapsed().as_secs() < sec_max {\n\n d_for_dp.forward_eval_dp(v, &var, &vec![x], e);\n\n cnt += 1;\n\n }\n\n println!(\"derivative graph: {} times\", cnt);\n", "file_path": "src/lib.rs", "rank": 84, "score": 3.850307074543093 }, { "content": " }\n\n }\n\n let mut res = vec![];\n\n // domなら fd > n\n\n for fd in factor_dom_nodes {\n\n for n in 0..self.size {\n\n if n != fd && doms[n].contains(&fd) && 2 <= self.reverse_graph[n].len() {\n\n res.push((fd, n));\n\n }\n\n }\n\n }\n\n // pdomなら fpd < n\n\n for fpd in factor_pdom_nodes {\n\n for n in 0..self.size {\n\n if n != fpd && pdoms[n].contains(&fpd) && 2 <= self.graph[n].len() {\n\n res.push((fpd, n));\n\n }\n\n }\n\n }\n\n res.sort_by(|(x1, y1), (x2, y2)| {\n", "file_path": "src/diff.rs", "rank": 85, "score": 3.6177677166041797 }, { "content": " // println!(\"{:?}\", (l, m));\n\n // let t;\n\n // if m < 0 {\n\n // m = -m;\n\n // t = Expr::new_binop(\n\n // Bop::Mul,\n\n // p(l, m, Expr::new_var(\"z\".to_owned(), env), env),\n\n // s(m, env),\n\n // env,\n\n // );\n\n // } else {\n\n // t = Expr::new_binop(\n\n // Bop::Mul,\n\n // p(l, m, Expr::new_var(\"z\".to_owned(), env), env),\n\n // c(m, env),\n\n // env,\n\n // );\n\n // }\n\n // println!(\"{:?}\", \"yeaw\");\n\n // Expr::new_binop(Bop::Mul, n(l, m, env), t, env)\n", "file_path": "src/lib.rs", "rank": 86, "score": 3.5394573685144244 }, { "content": " // let a1 = Expr::new_num((2 * l - 1) / l - m, env);\n\n // let b1 = Expr::new_binop(Bop::Mul, a1, z.clone(), env);\n\n // let c = Expr::new_binop(Bop::Mul, b1, p(l - 1, m, z.clone(), env), env);\n\n // let a2 = Expr::new_num((l + m - 1) / l - m, env);\n\n // let b2 = Expr::new_binop(Bop::Mul, a2, p(l - 2, m, z, env), env);\n\n // Expr::new_binop(Bop::Sub, c, b2, env).reduce(env)\n\n // }\n\n // }\n\n // fn s(m: i64, env: &Env) -> Rc<Expr> {\n\n // if m == 0 {\n\n // Expr::new_num(0, env)\n\n // } else {\n\n // let x = Expr::new_var(\"x\".to_owned(), env);\n\n // let y = Expr::new_var(\"y\".to_owned(), env);\n\n // let a = Expr::new_binop(Bop::Mul, x, c(m - 1, env), env);\n\n // let b = Expr::new_binop(Bop::Mul, y, s(m - 1, env), env);\n\n // Expr::new_binop(Bop::Sub, a, b, env).reduce(env)\n\n // }\n\n // }\n\n // fn c(m: i64, env: &Env) -> Rc<Expr> {\n", "file_path": "src/lib.rs", "rank": 87, "score": 3.5291397245868743 }, { "content": " // if m == 0 {\n\n // Expr::new_num(1, env).reduce(env)\n\n // } else {\n\n // let x = Expr::new_var(\"x\".to_owned(), env);\n\n // let y = Expr::new_var(\"y\".to_owned(), env);\n\n // let a = Expr::new_binop(Bop::Mul, x, s(m - 1, env), env);\n\n // let b = Expr::new_binop(Bop::Mul, y, c(m - 1, env), env);\n\n // Expr::new_binop(Bop::Add, a, b, env).reduce(env)\n\n // }\n\n // }\n\n // fn fact(n: i64) -> i64 {\n\n // if n == 0 {\n\n // 1\n\n // } else {\n\n // n * fact(n - 1)\n\n // }\n\n // }\n\n // fn n(m: i64, l: i64, env: &Env) -> Rc<Expr> {\n\n // let p = Expr::pi(env);\n\n // if m == 0 {\n", "file_path": "src/lib.rs", "rank": 88, "score": 3.5271079943010766 }, { "content": " Expr::Var(vv) => vv,\n\n _ => unreachable!(),\n\n })\n\n .collect();\n\n }\n\n Err(_) => panic!(\"failed to parse variables\"),\n\n }\n\n varvec.sort();\n\n self.forward_eval_internal(self.vars[&v], &varvec, vals)\n\n }\n\n fn forward_eval_internal(&self, cur: usize, vars: &Vec<Var>, vals: &Vec<f64>) -> f64 {\n\n if cur == self.root {\n\n 1.\n\n } else {\n\n let mut res = 0.;\n\n for Edge { to: next, exp } in &self.reverse_graph[cur] {\n\n let temp = exp.eval_internal(vars, vals);\n\n res += self.forward_eval_internal(*next, vars, vals) * temp;\n\n }\n\n res\n", "file_path": "src/diff.rs", "rank": 89, "score": 3.462131318319963 }, { "content": " }\n\n Expr::Var(vt) => {\n\n if *vt == v {\n\n vec![Expr::new_num(1, e)]\n\n } else {\n\n vec![Expr::new_num(0, e)]\n\n }\n\n }\n\n Expr::Num(_n) => vec![Expr::new_num(0, e)],\n\n }\n\n }\n\n fn diff_internal(&self, v: Var, e: &Env) -> Rc<Expr> {\n\n match self {\n\n Expr::UnOp { op, exp: inexp } => match op {\n\n Uop::Sin => Expr::new_binop(\n\n Bop::Mul,\n\n Expr::new_unop(Uop::Cos, inexp.clone(), e),\n\n inexp.diff_internal(v, e),\n\n e,\n\n ),\n", "file_path": "src/expr.rs", "rank": 90, "score": 3.4507751529464765 }, { "content": " while now.elapsed().as_secs() < sec_max {\n\n d.forward_eval(v, &var, &vec![x], e);\n\n cnt += 1;\n\n }\n\n println!(\"derivative graph optimize: {} times\", cnt);\n\n }\n\n\n\n // fn p(l: i64, m: i64, z: Rc<Expr>, env: &Env) -> Rc<Expr> {\n\n // if l == 0 && m == 0 {\n\n // Expr::new_num(1, env)\n\n // } else if l == m {\n\n // let a = Expr::new_binop(Bop::Mul, Expr::new_num(2, env), Expr::new_num(m, env), env);\n\n // let b = Expr::new_binop(Bop::Sub, Expr::new_num(1, env), a, env);\n\n // Expr::new_binop(Bop::Mul, b, p(m - 1, m - 1, z, env), env).reduce(env)\n\n // } else if l == (m + 1) {\n\n // let a = Expr::new_binop(Bop::Mul, Expr::new_num(2, env), Expr::new_num(m, env), env);\n\n // let b = Expr::new_binop(Bop::Sub, a, Expr::new_num(1, env), env);\n\n // let c = Expr::new_binop(Bop::Mul, b, z.clone(), env);\n\n // Expr::new_binop(Bop::Mul, c, p(m, m, z, env), env).reduce(env)\n\n // } else {\n", "file_path": "src/lib.rs", "rank": 91, "score": 3.442408980728428 }, { "content": " }\n\n Bop::Sub => {\n\n ops = String::from(\"-\");\n\n }\n\n Bop::Mul => {\n\n ops = String::from(\"*\");\n\n }\n\n Bop::Div => {\n\n ops = String::from(\"/\");\n\n }\n\n Bop::Pow => {\n\n ops = String::from(\"^\");\n\n }\n\n }\n\n print!(\"(\");\n\n exp1.print_internal(e);\n\n print!(\"{}\", ops);\n\n exp2.print_internal(e);\n\n print!(\")\");\n\n }\n", "file_path": "src/expr.rs", "rank": 92, "score": 3.3925930101254727 }, { "content": " self.reduce_internal(e)\n\n }\n\n fn reduce_internal(&self, e: &Env) -> Rc<Expr> {\n\n match self {\n\n Expr::UnOp { op, exp: inexp } => match op {\n\n Uop::Sin => {\n\n let inexp = inexp.reduce(e);\n\n // rationalなので, 完全な定数化は無理\n\n if inexp.is_zero() {\n\n Expr::new_num(0, e)\n\n } else {\n\n Expr::new_unop(Uop::Sin, inexp, e)\n\n }\n\n }\n\n Uop::Cos => {\n\n let inexp = inexp.reduce(e);\n\n if inexp.is_zero() {\n\n Expr::new_num(1, e)\n\n } else {\n\n Expr::new_unop(Uop::Cos, inexp, e)\n", "file_path": "src/expr.rs", "rank": 93, "score": 3.3857584076950573 }, { "content": " }\n\n }\n\n for i in 0..self.reverse_graph[to].len() {\n\n if self.reverse_graph[to][i].to == from {\n\n self.reverse_graph[to].remove(i);\n\n break;\n\n }\n\n }\n\n }\n\n res = res.reduce(env);\n\n let new_edge = Edge {\n\n to: goal,\n\n exp: res.clone(),\n\n };\n\n let new_redge = Edge {\n\n to: start,\n\n exp: res,\n\n };\n\n self.graph[start].push(new_edge);\n\n self.reverse_graph[goal].push(new_redge);\n", "file_path": "src/diff.rs", "rank": 94, "score": 3.3668636076899663 }, { "content": " }\n\n Uop::Tan => {\n\n inexp.print_func(\"tan\", e);\n\n }\n\n Uop::Log => {\n\n inexp.print_func(\"log\", e);\n\n }\n\n Uop::Exp => {\n\n inexp.print_func(\"exp\", e);\n\n }\n\n Uop::Neg => {\n\n print!(\"-\");\n\n inexp.print_internal(e);\n\n }\n\n },\n\n Expr::BinOp { op, exp1, exp2 } => {\n\n let ops;\n\n match op {\n\n Bop::Add => {\n\n ops = String::from(\"+\");\n", "file_path": "src/expr.rs", "rank": 95, "score": 3.354267043093835 }, { "content": " } else {\n\n memo.insert((parent_id, child1_id));\n\n }\n\n Deriv::construct(&(*exp1), e, v, postids, graph, reverse_graph, leafs, memo);\n\n Deriv::construct(&(*exp2), e, v, postids, graph, reverse_graph, leafs, memo);\n\n let mut v = expr.diff_comp(v, e);\n\n assert!(v.len() == 2);\n\n let d2 = v.pop().expect(\"\");\n\n let d1 = v.pop().expect(\"\");\n\n let edge1 = Edge {\n\n to: child1_id,\n\n exp: d1.clone(),\n\n };\n\n let edge2 = Edge {\n\n to: child2_id,\n\n exp: d2.clone(),\n\n };\n\n let redge1 = Edge {\n\n to: parent_id,\n\n exp: d1,\n", "file_path": "src/diff.rs", "rank": 96, "score": 3.348471398760349 }, { "content": " }\n\n }\n\n res\n\n }\n\n\n\n fn factor_subgraphs(\n\n &self,\n\n doms: &Vec<HashSet<usize>>,\n\n pdoms: &Vec<HashSet<usize>>,\n\n ) -> Vec<(usize, usize)> {\n\n let mut factor_dom_nodes: HashSet<usize> = HashSet::new();\n\n let mut factor_pdom_nodes: HashSet<usize> = HashSet::new();\n\n // 支配木をたどってfactorを探す\n\n for i in 0..self.size {\n\n if 2 <= self.graph[i].len() {\n\n factor_dom_nodes.insert(i);\n\n }\n\n // factor_pdom\n\n if 2 <= self.reverse_graph[i].len() {\n\n factor_pdom_nodes.insert(i);\n", "file_path": "src/diff.rs", "rank": 97, "score": 3.2098174257351357 }, { "content": " res = Expr::new_binop(Bop::Sub, res, t, env);\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n (res, env)\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 98, "score": 3.051022874699239 }, { "content": " };\n\n let redge2 = Edge {\n\n to: parent_id,\n\n exp: d2,\n\n };\n\n graph[parent_id].push(edge1);\n\n graph[parent_id].push(edge2);\n\n reverse_graph[child1_id].push(redge1);\n\n reverse_graph[child2_id].push(redge2);\n\n }\n\n Expr::Var(v) => drop(leafs.insert(postids[expr], Some(*v))),\n\n Expr::Num(_) => drop(leafs.insert(postids[expr], None)),\n\n }\n\n }\n\n\n\n fn intersect(mut b1: usize, mut b2: usize, doms: &Vec<Option<usize>>) -> usize {\n\n while b1 != b2 {\n\n while b1 < b2 {\n\n b1 = doms[b1].expect(\"dominator intersection failure\");\n\n }\n", "file_path": "src/diff.rs", "rank": 99, "score": 2.9749640404811486 } ]
Rust
src/message.rs
gridgentoo/rust-rdkafka
fcb0bab41894e96208fd745e79ce7f73e4bcebbe
use rdsys; use rdsys::types::*; use std::ffi::CStr; use std::fmt; use std::marker::PhantomData; use std::slice; use std::str; use consumer::{Consumer, ConsumerContext}; #[derive(Debug,PartialEq,Eq,Clone,Copy)] pub enum Timestamp { NotAvailable, CreateTime(i64), LogAppendTime(i64) } impl Timestamp { pub fn to_millis(&self) -> Option<i64> { match *self { Timestamp::NotAvailable => None, Timestamp::CreateTime(-1) | Timestamp::LogAppendTime(-1) => None, Timestamp::CreateTime(t) | Timestamp::LogAppendTime(t) => Some(t), } } } pub trait Message { fn key(&self) -> Option<&[u8]>; fn payload(&self) -> Option<&[u8]>; fn topic(&self) -> &str; fn partition(&self) -> i32; fn offset(&self) -> i64; fn timestamp(&self) -> Timestamp; fn payload_view<P: ?Sized + FromBytes>(&self) -> Option<Result<&P, P::Error>> { self.payload().map(P::from_bytes) } fn key_view<K: ?Sized + FromBytes>(&self) -> Option<Result<&K, K::Error>> { self.key().map(K::from_bytes) } } pub struct BorrowedMessage<'a> { ptr: *mut RDKafkaMessage, _p: PhantomData<&'a u8>, } impl<'a> fmt::Debug for BorrowedMessage<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Message {{ ptr: {:?} }}", self.ptr()) } } impl<'a> BorrowedMessage<'a> { pub fn new<C, X>(ptr: *mut RDKafkaMessage, _consumer: &'a C) -> BorrowedMessage<'a> where X: ConsumerContext, C: Consumer<X> { BorrowedMessage { ptr: ptr, _p: PhantomData, } } pub fn ptr(&self) -> *mut RDKafkaMessage { self.ptr } pub fn topic_ptr(&self) -> *mut RDKafkaTopic { unsafe { (*self.ptr).rkt } } pub fn key_len(&self) -> usize { unsafe { (*self.ptr).key_len } } pub fn payload_len(&self) -> usize { unsafe { (*self.ptr).len } } pub fn detach(&self) -> OwnedMessage { OwnedMessage { key: self.key().map(|k| k.to_vec()), payload: self.payload().map(|p| p.to_vec()), topic: self.topic().to_owned(), timestamp: self.timestamp(), partition: self.partition(), offset: self.offset(), } } } impl<'a> Message for BorrowedMessage<'a> { fn key(&self) -> Option<&[u8]> { unsafe { if (*self.ptr).key.is_null() { None } else { Some(slice::from_raw_parts::<u8>((*self.ptr).key as *const u8, (*self.ptr).key_len)) } } } fn payload(&self) -> Option<&[u8]> { unsafe { if (*self.ptr).payload.is_null() { None } else { Some(slice::from_raw_parts::<u8>((*self.ptr).payload as *const u8, (*self.ptr).len)) } } } fn topic(&self) -> &str { unsafe { CStr::from_ptr(rdsys::rd_kafka_topic_name((*self.ptr).rkt)) .to_str() .expect("Topic name is not valid UTF-8") } } fn partition(&self) -> i32 { unsafe { (*self.ptr).partition } } fn offset(&self) -> i64 { unsafe { (*self.ptr).offset } } fn timestamp(&self) -> Timestamp { let mut timestamp_type = rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_NOT_AVAILABLE; let timestamp = unsafe { rdsys::rd_kafka_message_timestamp( self.ptr, &mut timestamp_type ) }; match timestamp_type { rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_NOT_AVAILABLE => Timestamp::NotAvailable, rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_CREATE_TIME => Timestamp::CreateTime(timestamp), rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_LOG_APPEND_TIME => Timestamp::LogAppendTime(timestamp) } } } impl<'a> Drop for BorrowedMessage<'a> { fn drop(&mut self) { trace!("Destroying message {:?}", self); unsafe { rdsys::rd_kafka_message_destroy(self.ptr) }; } } pub struct OwnedMessage { payload: Option<Vec<u8>>, key: Option<Vec<u8>>, topic: String, timestamp: Timestamp, partition: i32, offset: i64 } impl OwnedMessage { pub fn new( payload: Option<Vec<u8>>, key: Option<Vec<u8>>, topic: String, timestamp: Timestamp, partition: i32, offset: i64 ) -> OwnedMessage { OwnedMessage { payload: payload, key: key, topic: topic, timestamp: timestamp, partition: partition, offset: offset } } } impl Message for OwnedMessage { fn key(&self) -> Option<&[u8]> { match self.key { Some(ref k) => Some(k.as_slice()), None => None, } } fn payload(&self) -> Option<&[u8]> { match self.payload { Some(ref p) => Some(p.as_slice()), None => None, } } fn topic(&self) -> &str { self.topic.as_ref() } fn partition(&self) -> i32 { self.partition } fn offset(&self) -> i64 { self.offset } fn timestamp(&self) -> Timestamp { self.timestamp } } pub trait FromBytes { type Error; fn from_bytes(&[u8]) -> Result<&Self, Self::Error>; } impl FromBytes for [u8] { type Error = (); fn from_bytes(bytes: &[u8]) -> Result<&Self, Self::Error> { Ok(bytes) } } impl FromBytes for str { type Error = str::Utf8Error; fn from_bytes(bytes: &[u8]) -> Result<&Self, Self::Error> { str::from_utf8(bytes) } } pub trait ToBytes { fn to_bytes(&self) -> &[u8]; } impl ToBytes for [u8] { fn to_bytes(&self) -> &[u8] { self } } impl ToBytes for str { fn to_bytes(&self) -> &[u8] { self.as_bytes() } } impl ToBytes for Vec<u8> { fn to_bytes(&self) -> &[u8] { self.as_slice() } } impl ToBytes for String { fn to_bytes(&self) -> &[u8] { self.as_bytes() } } impl<'a, T: ToBytes> ToBytes for &'a T { fn to_bytes(&self) -> &[u8] { (*self).to_bytes() } } impl ToBytes for () { fn to_bytes(&self) -> &[u8] { &[] } }
use rdsys; use rdsys::types::*; use std::ffi::CStr; use std::fmt; use std::marker::PhantomData; use std::slice; use std::str; use consumer::{Consumer, ConsumerContext}; #[derive(Debug,PartialEq,Eq,Clone,Copy)] pub enum Timestamp { NotAvailable, CreateTime(i64), LogAppendTime(i64) } impl Timestamp { pub fn to_millis(&self) -> Option<i64> { match *self { Timestamp::NotAvailable => None, Timestamp::CreateTime(-1) | Timestamp::LogAppendTime(-1) => None, Timestamp::CreateTime(t) | Timestamp::LogAppendTime(t) => Some(t), } } } pub trait Message { fn key(&self) -> Option<&[u8]>; fn payload(&self) -> Option<&[u8]>; fn topic(&self) -> &str; fn partition(&self) -> i32; fn offset(&self) -> i64; fn timestamp(&self) -> Timestamp; fn payload_view<P: ?Sized + FromBytes>(&self) -> Option<Result<&P, P::Error>> { self.payload().map(P::from_bytes) } fn key_view<K: ?Sized + FromBytes>(&self) -> Option<Result<&K, K::Error>> { self.key().map(K::from_bytes) } } pub stru
one } else { Some(slice::from_raw_parts::<u8>((*self.ptr).payload as *const u8, (*self.ptr).len)) } } } fn topic(&self) -> &str { unsafe { CStr::from_ptr(rdsys::rd_kafka_topic_name((*self.ptr).rkt)) .to_str() .expect("Topic name is not valid UTF-8") } } fn partition(&self) -> i32 { unsafe { (*self.ptr).partition } } fn offset(&self) -> i64 { unsafe { (*self.ptr).offset } } fn timestamp(&self) -> Timestamp { let mut timestamp_type = rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_NOT_AVAILABLE; let timestamp = unsafe { rdsys::rd_kafka_message_timestamp( self.ptr, &mut timestamp_type ) }; match timestamp_type { rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_NOT_AVAILABLE => Timestamp::NotAvailable, rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_CREATE_TIME => Timestamp::CreateTime(timestamp), rdsys::rd_kafka_timestamp_type_t::RD_KAFKA_TIMESTAMP_LOG_APPEND_TIME => Timestamp::LogAppendTime(timestamp) } } } impl<'a> Drop for BorrowedMessage<'a> { fn drop(&mut self) { trace!("Destroying message {:?}", self); unsafe { rdsys::rd_kafka_message_destroy(self.ptr) }; } } pub struct OwnedMessage { payload: Option<Vec<u8>>, key: Option<Vec<u8>>, topic: String, timestamp: Timestamp, partition: i32, offset: i64 } impl OwnedMessage { pub fn new( payload: Option<Vec<u8>>, key: Option<Vec<u8>>, topic: String, timestamp: Timestamp, partition: i32, offset: i64 ) -> OwnedMessage { OwnedMessage { payload: payload, key: key, topic: topic, timestamp: timestamp, partition: partition, offset: offset } } } impl Message for OwnedMessage { fn key(&self) -> Option<&[u8]> { match self.key { Some(ref k) => Some(k.as_slice()), None => None, } } fn payload(&self) -> Option<&[u8]> { match self.payload { Some(ref p) => Some(p.as_slice()), None => None, } } fn topic(&self) -> &str { self.topic.as_ref() } fn partition(&self) -> i32 { self.partition } fn offset(&self) -> i64 { self.offset } fn timestamp(&self) -> Timestamp { self.timestamp } } pub trait FromBytes { type Error; fn from_bytes(&[u8]) -> Result<&Self, Self::Error>; } impl FromBytes for [u8] { type Error = (); fn from_bytes(bytes: &[u8]) -> Result<&Self, Self::Error> { Ok(bytes) } } impl FromBytes for str { type Error = str::Utf8Error; fn from_bytes(bytes: &[u8]) -> Result<&Self, Self::Error> { str::from_utf8(bytes) } } pub trait ToBytes { fn to_bytes(&self) -> &[u8]; } impl ToBytes for [u8] { fn to_bytes(&self) -> &[u8] { self } } impl ToBytes for str { fn to_bytes(&self) -> &[u8] { self.as_bytes() } } impl ToBytes for Vec<u8> { fn to_bytes(&self) -> &[u8] { self.as_slice() } } impl ToBytes for String { fn to_bytes(&self) -> &[u8] { self.as_bytes() } } impl<'a, T: ToBytes> ToBytes for &'a T { fn to_bytes(&self) -> &[u8] { (*self).to_bytes() } } impl ToBytes for () { fn to_bytes(&self) -> &[u8] { &[] } }
ct BorrowedMessage<'a> { ptr: *mut RDKafkaMessage, _p: PhantomData<&'a u8>, } impl<'a> fmt::Debug for BorrowedMessage<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Message {{ ptr: {:?} }}", self.ptr()) } } impl<'a> BorrowedMessage<'a> { pub fn new<C, X>(ptr: *mut RDKafkaMessage, _consumer: &'a C) -> BorrowedMessage<'a> where X: ConsumerContext, C: Consumer<X> { BorrowedMessage { ptr: ptr, _p: PhantomData, } } pub fn ptr(&self) -> *mut RDKafkaMessage { self.ptr } pub fn topic_ptr(&self) -> *mut RDKafkaTopic { unsafe { (*self.ptr).rkt } } pub fn key_len(&self) -> usize { unsafe { (*self.ptr).key_len } } pub fn payload_len(&self) -> usize { unsafe { (*self.ptr).len } } pub fn detach(&self) -> OwnedMessage { OwnedMessage { key: self.key().map(|k| k.to_vec()), payload: self.payload().map(|p| p.to_vec()), topic: self.topic().to_owned(), timestamp: self.timestamp(), partition: self.partition(), offset: self.offset(), } } } impl<'a> Message for BorrowedMessage<'a> { fn key(&self) -> Option<&[u8]> { unsafe { if (*self.ptr).key.is_null() { None } else { Some(slice::from_raw_parts::<u8>((*self.ptr).key as *const u8, (*self.ptr).key_len)) } } } fn payload(&self) -> Option<&[u8]> { unsafe { if (*self.ptr).payload.is_null() { N
random
[ { "content": "pub fn value_fn(id: i32) -> String {\n\n format!(\"Message {}\", id)\n\n}\n\n\n", "file_path": "tests/test_utils.rs", "rank": 3, "score": 121805.88676946462 }, { "content": "pub fn key_fn(id: i32) -> String {\n\n format!(\"Key {}\", id)\n\n}\n", "file_path": "tests/test_utils.rs", "rank": 4, "score": 121805.88676946462 }, { "content": "pub fn produce_messages<P, K, J, Q>(topic_name: &str, count: i32, value_fn: &P, key_fn: &K,\n\n partition: Option<i32>, timestamp: Option<i64>)\n\n -> HashMap<(i32, i64), i32>\n\n where P: Fn(i32) -> J,\n\n K: Fn(i32) -> Q,\n\n J: ToBytes,\n\n Q: ToBytes {\n\n\n\n let prod_context = TestContext { _some_data: 1234 };\n\n\n\n // Produce some messages\n\n let producer = ClientConfig::new()\n\n .set(\"bootstrap.servers\", get_bootstrap_server().as_str())\n\n .set(\"statistics.interval.ms\", \"500\")\n\n .set(\"api.version.request\", \"true\")\n\n .set_default_topic_config(TopicConfig::new()\n\n .set(\"produce.offset.report\", \"true\")\n\n .set(\"message.timeout.ms\", \"30000\")\n\n .finalize())\n\n .create_with_context::<TestContext, FutureProducer<_>>(prod_context)\n", "file_path": "tests/test_utils.rs", "rank": 5, "score": 121367.6656240116 }, { "content": "/// Create a new client based on the provided configuration.\n\npub trait FromClientConfig: Sized {\n\n fn from_config(&ClientConfig) -> KafkaResult<Self>;\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 6, "score": 119390.72728800488 }, { "content": "pub fn millis_to_epoch(time: SystemTime) -> i64 {\n\n duration_to_millis(time.duration_since(UNIX_EPOCH).unwrap_or(Duration::from_secs(0))) as i64\n\n}\n\n\n\n\n\n// TODO: check if the implementation returns a copy of the data and update the documentation\n\n/// Converts a byte array representing a C string into a String.\n\npub unsafe fn bytes_cstr_to_owned(bytes_cstr: &[i8]) -> String {\n\n CStr::from_ptr(bytes_cstr.as_ptr()).to_string_lossy().into_owned()\n\n}\n\n\n\n/// Converts a C string into a String.\n\npub unsafe fn cstr_to_owned(cstr: *const i8) -> String {\n\n CStr::from_ptr(cstr).to_string_lossy().into_owned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::time::Duration;\n\n\n\n #[test]\n\n fn test_duration_to_millis() {\n\n assert_eq!(duration_to_millis(Duration::from_secs(1)), 1000);\n\n assert_eq!(duration_to_millis(Duration::from_millis(1500)), 1500);\n\n assert_eq!(duration_to_millis(Duration::new(5, 123_000_000)), 5123);\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 7, "score": 110910.40345464874 }, { "content": "/// Consumer specific Context. This user-defined object can be used to provide custom callbacks to\n\n/// consumer events. Refer to the list of methods to check which callbacks can be specified.\n\npub trait ConsumerContext: Context {\n\n /// Implements the default rebalancing strategy and calls the `pre_rebalance` and\n\n /// `post_rebalance` methods. If this method is overridden, it will be responsibility\n\n /// of the user to call them if needed.\n\n fn rebalance(\n\n &self,\n\n native_client: &NativeClient,\n\n err: RDKafkaRespErr,\n\n tpl: &TopicPartitionList,\n\n ) {\n\n\n\n let rebalance = match err {\n\n RDKafkaRespErr::RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS => {\n\n Rebalance::Assign(tpl)\n\n }\n\n RDKafkaRespErr::RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS => Rebalance::Revoke,\n\n _ => {\n\n let error = unsafe { cstr_to_owned(rdsys::rd_kafka_err2str(err)) };\n\n error!(\"Error rebalancing: {}\", error);\n\n Rebalance::Error(error)\n", "file_path": "src/consumer/mod.rs", "rank": 8, "score": 102715.61157791971 }, { "content": "/// Verify if the value represents an error condition.\n\npub trait IsError {\n\n /// Return true if the value represents an error.\n\n fn is_error(self) -> bool;\n\n}\n\n\n\nimpl IsError for RDKafkaRespErr {\n\n fn is_error(self) -> bool {\n\n self as i32 != RDKafkaRespErr::RD_KAFKA_RESP_ERR_NO_ERROR as i32\n\n }\n\n}\n\n\n\nimpl IsError for RDKafkaConfRes {\n\n fn is_error(self) -> bool {\n\n self as i32 != RDKafkaConfRes::RD_KAFKA_CONF_OK as i32\n\n }\n\n}\n\n\n\n/// Represents all Kafka errors. Check the underlying `RDKafkaError` to get details.\n\npub enum KafkaError {\n\n ClientConfig(RDKafkaConfRes, String, String, String),\n", "file_path": "src/error.rs", "rank": 9, "score": 102387.95765623404 }, { "content": "/// Create a new client based on the provided configuration and context.\n\npub trait FromClientConfigAndContext<C: Context>: Sized {\n\n fn from_config_and_context(&ClientConfig, C) -> KafkaResult<Self>;\n\n}\n\n\n\n//\n\n// ********** TOPIC CONFIG **********\n\n//\n\n\n\n/// A native rdkafka-sys client config.\n\npub struct NativeTopicConfig {\n\n ptr: *mut RDKafkaTopicConf,\n\n}\n\n\n\nimpl NativeTopicConfig {\n\n /// Wraps a pointer to an `RDKafkaTopicConf` object and returns a new `NativeTopicConfig`.\n\n pub fn from_ptr(ptr: *mut RDKafkaTopicConf) -> NativeTopicConfig {\n\n NativeTopicConfig {ptr: ptr}\n\n }\n\n\n\n /// Returns the pointer to the librdkafka RDKafkaTopicConf structure.\n", "file_path": "src/config.rs", "rank": 10, "score": 100178.19675695992 }, { "content": "fn print_metadata(brokers: &str, topic: Option<&str>, timeout_ms: i32, fetch_offsets: bool) {\n\n let consumer = ClientConfig::new()\n\n .set(\"bootstrap.servers\", brokers)\n\n .create::<BaseConsumer<_>>()\n\n .expect(\"Consumer creation failed\");\n\n\n\n trace!(\"Consumer created\");\n\n\n\n let metadata = consumer.fetch_metadata(topic, timeout_ms)\n\n .expect(\"Failed to fetch metadata\");\n\n\n\n println!(\"Cluster information:\");\n\n println!(\" Broker count: {}\", metadata.brokers().len());\n\n println!(\" Topics count: {}\", metadata.topics().len());\n\n println!(\" Metadata broker name: {}\", metadata.orig_broker_name());\n\n println!(\" Metadata broker id: {}\\n\", metadata.orig_broker_id());\n\n\n\n println!(\"Brokers:\");\n\n for broker in metadata.brokers() {\n\n println!(\" Id: {} Host: {}:{} \", broker.id(), broker.host(), broker.port());\n", "file_path": "examples/metadata.rs", "rank": 11, "score": 98197.70976160199 }, { "content": "/// Common trait for all consumers.\n\npub trait Consumer<C: ConsumerContext> {\n\n /// Returns a reference to the BaseConsumer.\n\n fn get_base_consumer(&self) -> &BaseConsumer<C>;\n\n\n\n // Default implementations\n\n\n\n /// Subscribe the consumer to a list of topics.\n\n fn subscribe(&self, topics: &Vec<&str>) -> KafkaResult<()> {\n\n self.get_base_consumer().subscribe(topics)\n\n }\n\n\n\n /// Manually assign topics and partitions to the consumer.\n\n fn assign(&self, assignment: &TopicPartitionList) -> KafkaResult<()> {\n\n self.get_base_consumer().assign(assignment)\n\n }\n\n\n\n /// Commit offsets on broker for the provided list of partitions, or the underlying consumers state if `None`.\n\n /// If mode is set to CommitMode::Sync, the call will block until\n\n /// the message has been successfully committed.\n\n fn commit(&self, topic_partition_list: Option<&TopicPartitionList>, mode: CommitMode) -> KafkaResult<()> {\n", "file_path": "src/consumer/mod.rs", "rank": 12, "score": 93789.70567776145 }, { "content": "fn remove_pre(version: &str) -> &str {\n\n version.split('-')\n\n .next()\n\n .expect(\"Crate version is not valid\")\n\n}\n\n\n", "file_path": "rdkafka-sys/tests/version_check.rs", "rank": 13, "score": 93669.12603999044 }, { "content": "pub fn setup_logger(log_thread: bool, rust_log: Option<&str>) {\n\n let output_format = move |record: &LogRecord| {\n\n let thread_name = if log_thread {\n\n format!(\"(t: {}) \", thread::current().name().unwrap_or(\"unknown\"))\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n let local_time: DateTime<Local> = Local::now();\n\n let time_str = local_time.format(\"%H:%M:%S%.3f\").to_string();\n\n format!(\"{} {}{} - {} - {}\", time_str, thread_name, record.level(), record.target(), record.args())\n\n };\n\n\n\n let mut builder = LogBuilder::new();\n\n builder.format(output_format).filter(None, LogLevelFilter::Info);\n\n\n\n rust_log.map(|conf| builder.parse(conf));\n\n\n\n builder.init().unwrap();\n\n}\n\n\n", "file_path": "examples/example_utils.rs", "rank": 14, "score": 92564.05373304326 }, { "content": "fn produce(brokers: &str, topic_name: &str) {\n\n let producer = ClientConfig::new()\n\n .set(\"bootstrap.servers\", brokers)\n\n .set_default_topic_config(TopicConfig::new()\n\n .set(\"produce.offset.report\", \"true\")\n\n .finalize())\n\n .create::<FutureProducer<_>>()\n\n .expect(\"Producer creation error\");\n\n\n\n // This loop is non blocking: all messages will be sent one after the other, without waiting\n\n // for the results.\n\n let futures = (0..5)\n\n .map(|i| {\n\n let value = format!(\"Message {}\", i);\n\n // The send operation on the topic returns a future, that will be completed once the\n\n // result or failure from Kafka will be received.\n\n producer.send_copy(topic_name, None, Some(&value), Some(&vec![0, 1, 2, 3]), None)\n\n .expect(\"Production failed\")\n\n .map(move |delivery_status| { // This will be executed onw the result is received\n\n info!(\"Delivery status for message {} received\", i);\n", "file_path": "examples/simple_producer.rs", "rank": 15, "score": 92279.87764893274 }, { "content": "fn run_command_or_fail(dir: &str, cmd: &str, args: &[&str]) {\n\n println_stderr!(\"Running command: \\\"{} {}\\\" in dir: {}\", cmd, args.join(\" \"), dir);\n\n let ret = Command::new(cmd).current_dir(dir).args(args).status();\n\n match ret.map(|status| (status.success(), status.code())) {\n\n Ok((true, _)) => { return },\n\n Ok((false, Some(c))) => { panic!(\"Command failed with error code {}\", c) },\n\n Ok((false, None)) => { panic!(\"Command got killed\") },\n\n Err(e) => { panic!(\"Command failed with error: {}\", e) },\n\n }\n\n}\n\n\n", "file_path": "rdkafka-sys/build.rs", "rank": 16, "score": 92212.92726587167 }, { "content": "/// A `ProducerContext` is a `Context` specific for producers. It can be used to store user-specified\n\n/// callbacks, such as `delivery`.\n\npub trait ProducerContext: Context {\n\n /// A DeliveryContext is a user-defined structure that will be passed to the producer when\n\n /// producing a message, and returned to the `delivery` method once the message has been\n\n /// delivered, or failed to.\n\n type DeliveryContext: Send + Sync;\n\n\n\n /// This method will be called once the message has been delivered (or failed to). The\n\n /// `DeliveryContext` will be the one provided by the user when calling send.\n\n fn delivery(&self, DeliveryReport, Self::DeliveryContext);\n\n}\n\n\n\n/// Simple empty producer context that can be use when the producer context is not required.\n\n#[derive(Clone)]\n\npub struct EmptyProducerContext;\n\n\n\nimpl Context for EmptyProducerContext { }\n\nimpl ProducerContext for EmptyProducerContext {\n\n type DeliveryContext = ();\n\n\n\n fn delivery(&self, _: DeliveryReport, _: Self::DeliveryContext) { }\n", "file_path": "src/producer.rs", "rank": 17, "score": 89891.03470934488 }, { "content": "// Create consumer\n\npub fn create_stream_consumer(group_id: &str, config_overrides: Option<HashMap<&'static str, &'static str>>) -> StreamConsumer<TestContext> {\n\n let cons_context = TestContext { _some_data: 64 };\n\n let mut config = ClientConfig::new();\n\n\n\n config.set(\"group.id\", group_id);\n\n config.set(\"client.id\", \"rdkafka_integration_test_client\");\n\n config.set(\"bootstrap.servers\", get_bootstrap_server().as_str());\n\n config.set(\"enable.partition.eof\", \"false\");\n\n config.set(\"session.timeout.ms\", \"6000\");\n\n config.set(\"enable.auto.commit\", \"false\");\n\n config.set(\"statistics.interval.ms\", \"500\");\n\n config.set(\"api.version.request\", \"true\");\n\n config.set_default_topic_config(\n\n TopicConfig::new()\n\n .set(\"auto.offset.reset\", \"earliest\")\n\n .finalize()\n\n );\n\n\n\n match config_overrides {\n\n Some(overrides) => {\n", "file_path": "tests/test_utils.rs", "rank": 18, "score": 89481.91263190439 }, { "content": "// Creates all the resources and runs the event loop. The event loop will:\n\n// 1) receive a stream of messages from the `StreamConsumer`.\n\n// 2) filter out eventual Kafka errors.\n\n// 3) send the message to a thread pool for processing.\n\n// 4) produce the result to the output topic.\n\n// Moving each message from one stage of the pipeline to next one is handled by the event loop,\n\n// that runs on a single thread. The expensive CPU-bound computation is handled by the `CpuPool`,\n\n// without blocking the event loop.\n\nfn run_async_processor(brokers: &str, group_id: &str, input_topic: &str, output_topic: &str) {\n\n // Create the event loop. The event loop will run on a single thread and drive the pipeline.\n\n let mut core = Core::new().unwrap();\n\n\n\n // Create the CPU pool, for CPU-intensive message processing.\n\n let cpu_pool = Builder::new().pool_size(4).create();\n\n\n\n // Create the `StreamConsumer`, to receive the messages from the topic in form of a `Stream`.\n\n let consumer = ClientConfig::new()\n\n .set(\"group.id\", group_id)\n\n .set(\"bootstrap.servers\", brokers)\n\n .set(\"enable.partition.eof\", \"false\")\n\n .set(\"session.timeout.ms\", \"6000\")\n\n .set(\"enable.auto.commit\", \"false\")\n\n .set_default_topic_config(TopicConfig::new()\n\n // .set(\"auto.offset.reset\", \"smallest\")\n\n .finalize())\n\n .create::<StreamConsumer<_>>()\n\n .expect(\"Consumer creation failed\");\n\n\n", "file_path": "examples/asynchronous_processing.rs", "rank": 19, "score": 88531.24747362381 }, { "content": "fn consume_and_print(brokers: &str, group_id: &str, topics: &Vec<&str>) {\n\n let context = ConsumerContextExample;\n\n\n\n let consumer = ClientConfig::new()\n\n .set(\"group.id\", group_id)\n\n .set(\"bootstrap.servers\", brokers)\n\n .set(\"enable.partition.eof\", \"false\")\n\n .set(\"session.timeout.ms\", \"6000\")\n\n .set(\"enable.auto.commit\", \"true\")\n\n .set(\"statistics.interval.ms\", \"5000\")\n\n .set_default_topic_config(TopicConfig::new()\n\n //.set(\"auto.offset.reset\", \"smallest\")\n\n .finalize())\n\n .set_log_level(RDKafkaLogLevel::Debug)\n\n .create_with_context::<_, LoggingConsumer>(context)\n\n .expect(\"Consumer creation failed\");\n\n\n\n consumer.subscribe(topics).expect(\"Can't subscribe to specified topics\");\n\n\n\n // consumer.start() returns a stream. The stream can be used ot chain together expensive steps,\n", "file_path": "examples/simple_consumer.rs", "rank": 20, "score": 88278.03193611946 }, { "content": "fn create_consumer(brokers: &str, group_id: &str, topic: &str) -> LoggingConsumer {\n\n let context = LoggingConsumerContext;\n\n\n\n let consumer = ClientConfig::new()\n\n .set(\"group.id\", group_id)\n\n .set(\"bootstrap.servers\", brokers)\n\n .set(\"enable.partition.eof\", \"false\")\n\n .set(\"session.timeout.ms\", \"6000\")\n\n // Commit automatically every 5 seconds.\n\n .set(\"enable.auto.commit\", \"true\")\n\n .set(\"auto.commit.interval.ms\", \"5000\")\n\n // but only commit the offsets explicitly stored via `consumer.store_offset`.\n\n .set(\"enable.auto.offset.store\", \"false\")\n\n .set_default_topic_config(TopicConfig::new()\n\n .finalize())\n\n .set_log_level(RDKafkaLogLevel::Debug)\n\n .create_with_context::<_, LoggingConsumer>(context)\n\n .expect(\"Consumer creation failed\");\n\n\n\n consumer.subscribe(&vec![topic]).expect(\"Can't subscribe to specified topic\");\n\n\n\n consumer\n\n}\n\n\n\n\n", "file_path": "examples/at_least_once.rs", "rank": 21, "score": 88278.03193611946 }, { "content": "/// This is not great. For legacy reasons some usage of rd_kafka_resp_err_t is\n\n/// passed as an integer in some places. There seems to be no easy way in Rust to\n\n/// match this the other way around so we do it manually.\n\npub fn primitive_to_rd_kafka_resp_err_t(error: i32) -> Option<RDKafkaRespErr> {\n\n match error {\n\n -200 => Some(RD_KAFKA_RESP_ERR__BEGIN),\n\n -199 => Some(RD_KAFKA_RESP_ERR__BAD_MSG),\n\n -198 => Some(RD_KAFKA_RESP_ERR__BAD_COMPRESSION),\n\n -197 => Some(RD_KAFKA_RESP_ERR__DESTROY),\n\n -196 => Some(RD_KAFKA_RESP_ERR__FAIL),\n\n -195 => Some(RD_KAFKA_RESP_ERR__TRANSPORT),\n\n -194 => Some(RD_KAFKA_RESP_ERR__CRIT_SYS_RESOURCE),\n\n -193 => Some(RD_KAFKA_RESP_ERR__RESOLVE),\n\n -192 => Some(RD_KAFKA_RESP_ERR__MSG_TIMED_OUT),\n\n -191 => Some(RD_KAFKA_RESP_ERR__PARTITION_EOF),\n\n -190 => Some(RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION),\n\n -189 => Some(RD_KAFKA_RESP_ERR__FS),\n\n -188 => Some(RD_KAFKA_RESP_ERR__UNKNOWN_TOPIC),\n\n -187 => Some(RD_KAFKA_RESP_ERR__ALL_BROKERS_DOWN),\n\n -186 => Some(RD_KAFKA_RESP_ERR__INVALID_ARG),\n\n -185 => Some(RD_KAFKA_RESP_ERR__TIMED_OUT),\n\n -184 => Some(RD_KAFKA_RESP_ERR__QUEUE_FULL),\n\n -183 => Some(RD_KAFKA_RESP_ERR__ISR_INSUFF),\n", "file_path": "rdkafka-sys/src/helpers.rs", "rank": 22, "score": 87146.06383069967 }, { "content": "/// A Context is an object that can store user-defined data and on which callbacks can be\n\n/// defined. Refer to the list of methods to see which callbacks can currently be overridden.\n\n/// The context must be thread safe, and might be owned by multiple threads.\n\npub trait Context: Send + Sync {\n\n /// Receives log lines from librdkafka.\n\n fn log(&self, level: RDKafkaLogLevel, fac: &str, log_message: &str) {\n\n match level {\n\n RDKafkaLogLevel::Emerg => error!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Alert => error!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Critical => error!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Error => error!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Warning => warn!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Notice => info!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Info => info!(\"librdkafka: {} {}\", fac, log_message),\n\n RDKafkaLogLevel::Debug => debug!(\"librdkafka: {} {}\", fac, log_message),\n\n }\n\n }\n\n\n\n /// Receives the statistics of the librdkafka client. To enable, the\n\n /// \"statistics.interval.ms\" configuration parameter must be specified.\n\n fn stats(&self, statistics: Statistics) {\n\n info!(\"Client stats: {:?}\", statistics);\n\n }\n", "file_path": "src/client.rs", "rank": 23, "score": 85692.49137185767 }, { "content": "pub fn rand_test_group() -> String {\n\n let id = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(10)\n\n .collect::<String>();\n\n format!(\"__test_{}\", id)\n\n}\n\n\n", "file_path": "tests/test_utils.rs", "rank": 24, "score": 82847.62348519586 }, { "content": "pub fn rand_test_topic() -> String {\n\n let id = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(10)\n\n .collect::<String>();\n\n format!(\"__test_{}\", id)\n\n}\n\n\n", "file_path": "tests/test_utils.rs", "rank": 25, "score": 82847.62348519586 }, { "content": "/// Return a tuple representing the version of `librdkafka` in\n\n/// hexadecimal and string format.\n\npub fn get_rdkafka_version() -> (u16, String) {\n\n let version_number = unsafe { rdsys::rd_kafka_version() } as u16;\n\n let c_str = unsafe { CStr::from_ptr(rdsys::rd_kafka_version_str()) };\n\n (version_number, c_str.to_string_lossy().into_owned())\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 26, "score": 78801.08247850352 }, { "content": "/// Converts a Duration into milliseconds\n\npub fn duration_to_millis(duration: Duration) -> u64 {\n\n let nanos = duration.subsec_nanos() as u64;\n\n duration.as_secs() * 1000 + nanos/1_000_000\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 27, "score": 75717.15475093624 }, { "content": "#[test]\n\nfn test_produce_consume_with_timestamp() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n let message_map = produce_messages(&topic_name, 100, &value_fn, &key_fn, Some(0), Some(1111));\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n let _consumer_future = consumer.start()\n\n .take(100)\n\n .for_each(|message| {\n\n match message {\n\n Ok(m) => {\n\n let id = message_map.get(&(m.partition(), m.offset())).unwrap();\n\n assert_eq!(m.timestamp(), Timestamp::CreateTime(1111));\n\n assert_eq!(m.payload_view::<str>().unwrap().unwrap(), value_fn(*id));\n\n assert_eq!(m.key_view::<str>().unwrap().unwrap(), key_fn(*id));\n\n },\n\n Err(e) => panic!(\"Error receiving message: {:?}\", e)\n\n };\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 28, "score": 71747.47807305872 }, { "content": "#[test]\n\nfn test_consumer_commit_message() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, Some(0), None);\n\n produce_messages(&topic_name, 11, &value_fn, &key_fn, Some(1), None);\n\n produce_messages(&topic_name, 12, &value_fn, &key_fn, Some(2), None);\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n let _consumer_future = consumer.start()\n\n .take(33)\n\n .for_each(|message| {\n\n match message {\n\n Ok(m) => {\n\n if m.partition() == 1 {\n\n consumer.commit_message(&m, CommitMode::Async).unwrap();\n\n }\n\n },\n\n Err(e) => panic!(\"Error receiving message: {:?}\", e)\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 29, "score": 71623.18405941201 }, { "content": "#[test]\n\nfn test_consume_with_no_message_error() {\n\n let _r = env_logger::init();\n\n\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n\n\n let message_stream = consumer.start_with(Duration::from_millis(200), true);\n\n\n\n let mut first_poll_time = None;\n\n let mut timeouts_count = 0;\n\n for message in message_stream.wait() {\n\n match message {\n\n Ok(Err(KafkaError::NoMessageReceived)) => {\n\n // TODO: use entry interface for Options once available\n\n if first_poll_time.is_none() {\n\n first_poll_time = Some(Instant::now());\n\n }\n\n timeouts_count += 1;\n\n if timeouts_count == 5 {\n\n break;\n\n }\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 30, "score": 71623.18405941201 }, { "content": "fn create_producer(brokers: &str) -> FutureProducer<EmptyContext> {\n\n ClientConfig::new()\n\n .set(\"bootstrap.servers\", brokers)\n\n .set(\"queue.buffering.max.ms\", \"0\") // Do not buffer\n\n .set_default_topic_config(TopicConfig::new()\n\n .finalize())\n\n .create::<FutureProducer<EmptyContext>>()\n\n .expect(\"Producer creation failed\")\n\n}\n\n\n", "file_path": "examples/at_least_once.rs", "rank": 31, "score": 71375.3712675023 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_message_s() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_message_s>() , 72usize , concat\n\n ! ( \"Size of: \" , stringify ! ( rd_kafka_message_s ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_message_s>() , 8usize , concat\n\n ! ( \"Alignment of \" , stringify ! ( rd_kafka_message_s ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_message_s ) ) . err as * const _\n\n as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_message_s ) ,\n\n \"::\" , stringify ! ( err ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_message_s ) ) . rkt as * const _\n\n as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_message_s ) ,\n\n \"::\" , stringify ! ( rkt ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_message_s ) ) . partition as *\n\n const _ as usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_message_s ) ,\n\n \"::\" , stringify ! ( partition ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 32, "score": 67484.98553599376 }, { "content": "// Emulates an expensive, synchronous computation. This function returns a string with the length\n\n// of the message payload, if any.\n\nfn _expensive_computation(msg: BorrowedMessage) -> String {\n\n info!(\"Starting expensive computation on message\");\n\n thread::sleep(Duration::from_millis(rand::random::<u64>() % 5000));\n\n info!(\"Expensive computation completed\");\n\n match msg.payload_view::<str>() {\n\n Some(Ok(payload)) => format!(\"Payload len for {} is {}\", payload, payload.len()),\n\n Some(Err(_)) => format!(\"Error processing message payload\"),\n\n None => format!(\"No payload\"),\n\n }\n\n}\n\n\n", "file_path": "examples/asynchronous_processing.rs", "rank": 33, "score": 63078.307862622794 }, { "content": "pub fn rd_kafka_resp_err_t_to_rdkafka_error(err: RDKafkaRespErr) -> RDKafkaError {\n\n match err {\n\n RD_KAFKA_RESP_ERR__BEGIN => Begin,\n\n RD_KAFKA_RESP_ERR__BAD_MSG => BadMessage,\n\n RD_KAFKA_RESP_ERR__BAD_COMPRESSION => BadCompression,\n\n RD_KAFKA_RESP_ERR__DESTROY => BrokerDestroy,\n\n RD_KAFKA_RESP_ERR__FAIL => Fail,\n\n RD_KAFKA_RESP_ERR__TRANSPORT => BrokerTransportFailure,\n\n RD_KAFKA_RESP_ERR__CRIT_SYS_RESOURCE => CriticalSystemResource,\n\n RD_KAFKA_RESP_ERR__RESOLVE => Resolve,\n\n RD_KAFKA_RESP_ERR__MSG_TIMED_OUT => MessageTimedOut,\n\n RD_KAFKA_RESP_ERR__PARTITION_EOF => PartitionEOF,\n\n RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION => UnknownPartition,\n\n RD_KAFKA_RESP_ERR__FS => FileSystem,\n\n RD_KAFKA_RESP_ERR__UNKNOWN_TOPIC => UnknownTopic,\n\n RD_KAFKA_RESP_ERR__ALL_BROKERS_DOWN => AllBrokersDown,\n\n RD_KAFKA_RESP_ERR__INVALID_ARG => InvalidArgument,\n\n RD_KAFKA_RESP_ERR__TIMED_OUT => MessageTimedOut,\n\n RD_KAFKA_RESP_ERR__QUEUE_FULL => QueueFull,\n\n RD_KAFKA_RESP_ERR__ISR_INSUFF => ISRInsufficient,\n", "file_path": "rdkafka-sys/src/helpers.rs", "rank": 34, "score": 57773.92236006193 }, { "content": "fn main() {\n\n let matches = App::new(\"at-least-once\")\n\n .version(option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"\"))\n\n .about(\"At-least-once delivery example\")\n\n .arg(Arg::with_name(\"brokers\")\n\n .short(\"b\")\n\n .long(\"brokers\")\n\n .help(\"Broker list in kafka format\")\n\n .takes_value(true)\n\n .default_value(\"localhost:9092\"))\n\n .arg(Arg::with_name(\"group-id\")\n\n .short(\"g\")\n\n .long(\"group-id\")\n\n .help(\"Consumer group id\")\n\n .takes_value(true)\n\n .default_value(\"example_consumer_group_id\"))\n\n .arg(Arg::with_name(\"log-conf\")\n\n .long(\"log-conf\")\n\n .help(\"Configure the logging format (example: 'rdkafka=trace')\")\n\n .takes_value(true))\n", "file_path": "examples/at_least_once.rs", "rank": 35, "score": 55353.096607438936 }, { "content": "fn main() {\n\n let matches = App::new(\"metadata fetch example\")\n\n .version(option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"\"))\n\n .about(\"Fetch and print the cluster metadata\")\n\n .arg(Arg::with_name(\"brokers\")\n\n .short(\"b\")\n\n .long(\"brokers\")\n\n .help(\"Broker list in kafka format\")\n\n .takes_value(true)\n\n .default_value(\"localhost:9092\"))\n\n .arg(Arg::with_name(\"offsets\")\n\n .long(\"offsets\")\n\n .help(\"Enables offset fetching\"))\n\n .arg(Arg::with_name(\"topic\")\n\n .long(\"topic\")\n\n .help(\"Only fetch the metadata of the specified topic\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"log-conf\")\n\n .long(\"log-conf\")\n\n .help(\"Configure the logging format (example: 'rdkafka=trace')\")\n", "file_path": "examples/metadata.rs", "rank": 36, "score": 55353.096607438936 }, { "content": "/// Internal consumer loop. This is the main body of the thread that will drive the\n\n/// stream consumer.\n\nfn poll_loop<C: ConsumerContext>(\n\n consumer: Arc<BaseConsumer<C>>,\n\n sender: mpsc::Sender<KafkaResult<PolledMessagePtr>>,\n\n should_stop: Arc<AtomicBool>,\n\n poll_interval: Duration,\n\n no_message_error: bool,\n\n) {\n\n trace!(\"Polling thread loop started\");\n\n let mut curr_sender = sender;\n\n let poll_interval_ms = duration_to_millis(poll_interval) as i32;\n\n while !should_stop.load(Ordering::Relaxed) {\n\n trace!(\"Polling base consumer\");\n\n let future_sender = match consumer.poll_raw(poll_interval_ms) {\n\n Ok(None) => {\n\n if no_message_error {\n\n curr_sender.send(Err(KafkaError::NoMessageReceived))\n\n } else {\n\n continue // TODO: check stream closed\n\n }\n\n },\n", "file_path": "src/consumer/stream_consumer.rs", "rank": 37, "score": 55303.426624216416 }, { "content": "fn main() {\n\n let matches = App::new(\"producer example\")\n\n .version(option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"\"))\n\n .about(\"Simple command line producer\")\n\n .arg(Arg::with_name(\"brokers\")\n\n .short(\"b\")\n\n .long(\"brokers\")\n\n .help(\"Broker list in kafka format\")\n\n .takes_value(true)\n\n .default_value(\"localhost:9092\"))\n\n .arg(Arg::with_name(\"log-conf\")\n\n .long(\"log-conf\")\n\n .help(\"Configure the logging format (example: 'rdkafka=trace')\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"topic\")\n\n .short(\"t\")\n\n .long(\"topic\")\n\n .help(\"Destination topic\")\n\n .takes_value(true)\n\n .required(true))\n", "file_path": "examples/simple_producer.rs", "rank": 38, "score": 53386.58309585019 }, { "content": "#[allow(dead_code)]\n\nfn main() {\n\n println!(\"This is not an example\");\n\n}\n", "file_path": "examples/example_utils.rs", "rank": 39, "score": 53386.58309585019 }, { "content": "fn main() {\n\n let matches = App::new(\"Async example\")\n\n .version(option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"\"))\n\n .about(\"Asynchronous computation example\")\n\n .arg(Arg::with_name(\"brokers\")\n\n .short(\"b\")\n\n .long(\"brokers\")\n\n .help(\"Broker list in kafka format\")\n\n .takes_value(true)\n\n .default_value(\"localhost:9092\"))\n\n .arg(Arg::with_name(\"group-id\")\n\n .short(\"g\")\n\n .long(\"group-id\")\n\n .help(\"Consumer group id\")\n\n .takes_value(true)\n\n .default_value(\"example_consumer_group_id\"))\n\n .arg(Arg::with_name(\"log-conf\")\n\n .long(\"log-conf\")\n\n .help(\"Configure the logging format (example: 'rdkafka=trace')\")\n\n .takes_value(true))\n", "file_path": "examples/asynchronous_processing.rs", "rank": 40, "score": 53386.58309585019 }, { "content": "fn main() {\n\n let librdkafka_version = env!(\"CARGO_PKG_VERSION\")\n\n .split('-')\n\n .next()\n\n .expect(\"Crate version is not valid\");\n\n\n\n let pkg_probe = pkg_config::Config::new()\n\n .cargo_metadata(true)\n\n .atleast_version(librdkafka_version)\n\n .probe(\"rdkafka\");\n\n\n\n match pkg_probe {\n\n Ok(library) => {\n\n println_stderr!(\"librdkafka found on the system:\");\n\n println_stderr!(\" Name: {:?}\", library.libs);\n\n println_stderr!(\" Path: {:?}\", library.link_paths);\n\n println_stderr!(\" Version: {}\", library.version);\n\n }\n\n Err(_) => {\n\n println_stderr!(\"librdkafka not found, building\");\n\n build_librdkafka();\n\n }\n\n }\n\n}\n\n\n", "file_path": "rdkafka-sys/build.rs", "rank": 41, "score": 53386.58309585019 }, { "content": "fn main() {\n\n let matches = App::new(\"consumer example\")\n\n .version(option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"\"))\n\n .about(\"Simple command line consumer\")\n\n .arg(Arg::with_name(\"brokers\")\n\n .short(\"b\")\n\n .long(\"brokers\")\n\n .help(\"Broker list in kafka format\")\n\n .takes_value(true)\n\n .default_value(\"localhost:9092\"))\n\n .arg(Arg::with_name(\"group-id\")\n\n .short(\"g\")\n\n .long(\"group-id\")\n\n .help(\"Consumer group id\")\n\n .takes_value(true)\n\n .default_value(\"example_consumer_group_id\"))\n\n .arg(Arg::with_name(\"log-conf\")\n\n .long(\"log-conf\")\n\n .help(\"Configure the logging format (example: 'rdkafka=trace')\")\n\n .takes_value(true))\n", "file_path": "examples/simple_consumer.rs", "rank": 42, "score": 53386.58309585019 }, { "content": "fn build_librdkafka() {\n\n let mut configure_flags = Vec::new();\n\n\n\n if env::var(\"CARGO_FEATURE_SASL\").is_ok() {\n\n configure_flags.push(\"--enable-sasl\");\n\n } else {\n\n configure_flags.push(\"--disable-sasl\");\n\n }\n\n\n\n if env::var(\"CARGO_FEATURE_SSL\").is_ok() {\n\n configure_flags.push(\"--enable-ssl\");\n\n } else {\n\n configure_flags.push(\"--disable-ssl\");\n\n }\n\n\n\n configure_flags.push(\"--enable-static\");\n\n\n\n if !Path::new(\"librdkafka/LICENSE\").exists() {\n\n println_stderr!(\"Setting up submodules\");\n\n run_command_or_fail(\"../\", \"git\", &[\"submodule\", \"update\", \"--init\"]);\n", "file_path": "rdkafka-sys/build.rs", "rank": 43, "score": 51633.66256859439 }, { "content": "#[test]\n\nfn test_subscription() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, None, None);\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n let _consumer_future = consumer.start().take(10).wait();\n\n\n\n let mut tpl = TopicPartitionList::new();\n\n tpl.add_topic_unassigned(&topic_name);\n\n assert_eq!(tpl, consumer.subscription().unwrap());\n\n}\n\n\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 44, "score": 48643.04156514287 }, { "content": "#[test]\n\nfn test_metadata() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(0), None);\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(1), None);\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(2), None);\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n\n\n let metadata = consumer.fetch_metadata(None, 5000).unwrap();\n\n\n\n let topic_metadata = metadata.topics().iter()\n\n .find(|m| m.name() == topic_name).unwrap();\n\n\n\n let mut ids = topic_metadata.partitions().iter().map(|p| p.id()).collect::<Vec<_>>();\n\n ids.sort();\n\n\n\n assert_eq!(ids, vec![0, 1, 2]);\n\n // assert_eq!(topic_metadata.error(), None);\n\n assert_eq!(topic_metadata.partitions().len(), 3);\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 45, "score": 48643.04156514287 }, { "content": "#[test]\n\nfn check_version() {\n\n let version_str_c = unsafe { CStr::from_ptr(rdsys::rd_kafka_version_str()) };\n\n let rdsys_version = version_str_c.to_string_lossy();\n\n\n\n assert_eq!(remove_pre(&rdsys_version), remove_pre(PKG_VERSION))\n\n}\n", "file_path": "rdkafka-sys/tests/version_check.rs", "rank": 46, "score": 48643.04156514287 }, { "content": "#[test]\n\nfn test_group_membership() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n let group_name = rand_test_group();\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(0), None);\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(1), None);\n\n produce_messages(&topic_name, 1, &value_fn, &key_fn, Some(2), None);\n\n let consumer = create_stream_consumer(&group_name, None);\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n // Make sure the consumer joins the group\n\n let _consumer_future = consumer.start()\n\n .take(1)\n\n .for_each(|_| Ok(()))\n\n .wait();\n\n\n\n let group_list = consumer.fetch_group_list(None, 5000).unwrap();\n\n\n\n // Print all the data, valgrind will check memory access\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 47, "score": 47357.22038209281 }, { "content": "#[test]\n\nfn test_produce_partition() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n let message_map = produce_messages(&topic_name, 100, &value_fn, &key_fn, Some(0), None);\n\n\n\n let res = message_map.iter()\n\n .filter(|&(&(partition, _), _)| partition == 0)\n\n .count();\n\n\n\n assert_eq!(res, 100);\n\n}\n\n\n\n// All produced messages should be consumed.\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 48, "score": 47357.22038209281 }, { "content": "#[test]\n\nfn bindgen_test_layout___sFILE() {\n\n assert_eq!(::std::mem::size_of::<__sFILE>() , 152usize , concat ! (\n\n \"Size of: \" , stringify ! ( __sFILE ) ));\n\n assert_eq! (::std::mem::align_of::<__sFILE>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( __sFILE ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __sFILE ) ) . _p as * const _ as usize }\n\n , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __sFILE ) , \"::\" ,\n\n stringify ! ( _p ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __sFILE ) ) . _r as * const _ as usize }\n\n , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __sFILE ) , \"::\" ,\n\n stringify ! ( _r ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __sFILE ) ) . _w as * const _ as usize }\n\n , 12usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __sFILE ) , \"::\" ,\n\n stringify ! ( _w ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 49, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout___mbstate_t() {\n\n assert_eq!(::std::mem::size_of::<__mbstate_t>() , 128usize , concat ! (\n\n \"Size of: \" , stringify ! ( __mbstate_t ) ));\n\n assert_eq! (::std::mem::align_of::<__mbstate_t>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( __mbstate_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __mbstate_t ) ) . __mbstate8 as * const _\n\n as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __mbstate_t ) , \"::\" ,\n\n stringify ! ( __mbstate8 ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __mbstate_t ) ) . _mbstateL as * const _\n\n as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __mbstate_t ) , \"::\" ,\n\n stringify ! ( _mbstateL ) ));\n\n}\n\nimpl Clone for __mbstate_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\npub type __darwin_mbstate_t = __mbstate_t;\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 50, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_msghdr() {\n\n assert_eq!(::std::mem::size_of::<msghdr>() , 48usize , concat ! (\n\n \"Size of: \" , stringify ! ( msghdr ) ));\n\n assert_eq! (::std::mem::align_of::<msghdr>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( msghdr ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const msghdr ) ) . msg_name as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( msghdr ) , \"::\" ,\n\n stringify ! ( msg_name ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const msghdr ) ) . msg_namelen as * const _ as\n\n usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( msghdr ) , \"::\" ,\n\n stringify ! ( msg_namelen ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const msghdr ) ) . msg_iov as * const _ as\n\n usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( msghdr ) , \"::\" ,\n\n stringify ! ( msg_iov ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 51, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_sockproto() {\n\n assert_eq!(::std::mem::size_of::<sockproto>() , 4usize , concat ! (\n\n \"Size of: \" , stringify ! ( sockproto ) ));\n\n assert_eq! (::std::mem::align_of::<sockproto>() , 2usize , concat ! (\n\n \"Alignment of \" , stringify ! ( sockproto ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockproto ) ) . sp_family as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockproto ) , \"::\" ,\n\n stringify ! ( sp_family ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockproto ) ) . sp_protocol as * const _\n\n as usize } , 2usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockproto ) , \"::\" ,\n\n stringify ! ( sp_protocol ) ));\n\n}\n\nimpl Clone for sockproto {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\npub struct sockaddr_storage {\n\n pub ss_len: __uint8_t,\n\n pub ss_family: sa_family_t,\n\n pub __ss_pad1: [::std::os::raw::c_char; 6usize],\n\n pub __ss_align: __int64_t,\n\n pub __ss_pad2: [::std::os::raw::c_char; 112usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 52, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_imaxdiv_t() {\n\n assert_eq!(::std::mem::size_of::<imaxdiv_t>() , 16usize , concat ! (\n\n \"Size of: \" , stringify ! ( imaxdiv_t ) ));\n\n assert_eq! (::std::mem::align_of::<imaxdiv_t>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( imaxdiv_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const imaxdiv_t ) ) . quot as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( imaxdiv_t ) , \"::\" ,\n\n stringify ! ( quot ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const imaxdiv_t ) ) . rem as * const _ as usize\n\n } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( imaxdiv_t ) , \"::\" ,\n\n stringify ! ( rem ) ));\n\n}\n\nimpl Clone for imaxdiv_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\nextern \"C\" {\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 53, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_cmsghdr() {\n\n assert_eq!(::std::mem::size_of::<cmsghdr>() , 12usize , concat ! (\n\n \"Size of: \" , stringify ! ( cmsghdr ) ));\n\n assert_eq! (::std::mem::align_of::<cmsghdr>() , 4usize , concat ! (\n\n \"Alignment of \" , stringify ! ( cmsghdr ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const cmsghdr ) ) . cmsg_len as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( cmsghdr ) , \"::\" ,\n\n stringify ! ( cmsg_len ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const cmsghdr ) ) . cmsg_level as * const _ as\n\n usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( cmsghdr ) , \"::\" ,\n\n stringify ! ( cmsg_level ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const cmsghdr ) ) . cmsg_type as * const _ as\n\n usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( cmsghdr ) , \"::\" ,\n\n stringify ! ( cmsg_type ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 54, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn test_produce_consume_base() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n let message_map = produce_messages(&topic_name, 100, &value_fn, &key_fn, None, None);\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n let _consumer_future = consumer.start()\n\n .take(100)\n\n .for_each(|message| {\n\n match message {\n\n Ok(m) => {\n\n let id = message_map.get(&(m.partition(), m.offset())).unwrap();\n\n match m.timestamp() {\n\n Timestamp::CreateTime(timestamp) => assert!(timestamp > 1489495183000),\n\n _ => panic!(\"Expected createtime for message timestamp\")\n\n };\n\n assert_eq!(m.payload_view::<str>().unwrap().unwrap(), value_fn(*id));\n\n assert_eq!(m.key_view::<str>().unwrap().unwrap(), key_fn(*id));\n\n assert_eq!(m.topic(), topic_name.as_str());\n\n },\n\n Err(e) => panic!(\"Error receiving message: {:?}\", e)\n\n };\n\n Ok(())\n\n })\n\n .wait();\n\n}\n\n\n\n// All produced messages should be consumed.\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 55, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_sockaddr() {\n\n assert_eq!(::std::mem::size_of::<sockaddr>() , 16usize , concat ! (\n\n \"Size of: \" , stringify ! ( sockaddr ) ));\n\n assert_eq! (::std::mem::align_of::<sockaddr>() , 1usize , concat ! (\n\n \"Alignment of \" , stringify ! ( sockaddr ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr ) ) . sa_len as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr ) , \"::\" ,\n\n stringify ! ( sa_len ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr ) ) . sa_family as * const _ as\n\n usize } , 1usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr ) , \"::\" ,\n\n stringify ! ( sa_family ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr ) ) . sa_data as * const _ as\n\n usize } , 2usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr ) , \"::\" ,\n\n stringify ! ( sa_data ) ));\n\n}\n\nimpl Clone for sockaddr {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct sockproto {\n\n pub sp_family: __uint16_t,\n\n pub sp_protocol: __uint16_t,\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 56, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_linger() {\n\n assert_eq!(::std::mem::size_of::<linger>() , 8usize , concat ! (\n\n \"Size of: \" , stringify ! ( linger ) ));\n\n assert_eq! (::std::mem::align_of::<linger>() , 4usize , concat ! (\n\n \"Alignment of \" , stringify ! ( linger ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const linger ) ) . l_onoff as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( linger ) , \"::\" ,\n\n stringify ! ( l_onoff ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const linger ) ) . l_linger as * const _ as\n\n usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( linger ) , \"::\" ,\n\n stringify ! ( l_linger ) ));\n\n}\n\nimpl Clone for linger {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct so_np_extensions {\n\n pub npx_flags: u_int32_t,\n\n pub npx_mask: u_int32_t,\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 57, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout___sbuf() {\n\n assert_eq!(::std::mem::size_of::<__sbuf>() , 16usize , concat ! (\n\n \"Size of: \" , stringify ! ( __sbuf ) ));\n\n assert_eq! (::std::mem::align_of::<__sbuf>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( __sbuf ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __sbuf ) ) . _base as * const _ as usize\n\n } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __sbuf ) , \"::\" ,\n\n stringify ! ( _base ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __sbuf ) ) . _size as * const _ as usize\n\n } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __sbuf ) , \"::\" ,\n\n stringify ! ( _size ) ));\n\n}\n\nimpl Clone for __sbuf {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 58, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout_iovec() {\n\n assert_eq!(::std::mem::size_of::<iovec>() , 16usize , concat ! (\n\n \"Size of: \" , stringify ! ( iovec ) ));\n\n assert_eq! (::std::mem::align_of::<iovec>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( iovec ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const iovec ) ) . iov_base as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( iovec ) , \"::\" ,\n\n stringify ! ( iov_base ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const iovec ) ) . iov_len as * const _ as usize\n\n } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( iovec ) , \"::\" ,\n\n stringify ! ( iov_len ) ));\n\n}\n\nimpl Clone for iovec {\n\n fn clone(&self) -> Self { *self }\n\n}\n\npub type sae_associd_t = __uint32_t;\n\npub type sae_connid_t = __uint32_t;\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct sa_endpoints {\n\n pub sae_srcif: ::std::os::raw::c_uint,\n\n pub sae_srcaddr: *const sockaddr,\n\n pub sae_srcaddrlen: socklen_t,\n\n pub sae_dstaddr: *const sockaddr,\n\n pub sae_dstaddrlen: socklen_t,\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 59, "score": 46186.13473124789 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_t>() , 8192usize , concat\n\n ! ( \"Size of: \" , stringify ! ( _opaque_pthread_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_t>() , 8usize , concat\n\n ! ( \"Alignment of \" , stringify ! ( _opaque_pthread_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_t ) ) . __sig as * const\n\n _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_t ) ,\n\n \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_t ) ) . __cleanup_stack\n\n as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_t ) ,\n\n \"::\" , stringify ! ( __cleanup_stack ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_t ) ) . __opaque as *\n\n const _ as usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_t ) ,\n\n \"::\" , stringify ! ( __opaque ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 60, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout_sockaddr_storage() {\n\n assert_eq!(::std::mem::size_of::<sockaddr_storage>() , 128usize , concat !\n\n ( \"Size of: \" , stringify ! ( sockaddr_storage ) ));\n\n assert_eq! (::std::mem::align_of::<sockaddr_storage>() , 8usize , concat !\n\n ( \"Alignment of \" , stringify ! ( sockaddr_storage ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr_storage ) ) . ss_len as * const\n\n _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr_storage ) ,\n\n \"::\" , stringify ! ( ss_len ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr_storage ) ) . ss_family as *\n\n const _ as usize } , 1usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr_storage ) ,\n\n \"::\" , stringify ! ( ss_family ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sockaddr_storage ) ) . __ss_pad1 as *\n\n const _ as usize } , 2usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sockaddr_storage ) ,\n\n \"::\" , stringify ! ( __ss_pad1 ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 61, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout_sa_endpoints() {\n\n assert_eq!(::std::mem::size_of::<sa_endpoints>() , 40usize , concat ! (\n\n \"Size of: \" , stringify ! ( sa_endpoints ) ));\n\n assert_eq! (::std::mem::align_of::<sa_endpoints>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( sa_endpoints ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sa_endpoints ) ) . sae_srcif as * const _\n\n as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sa_endpoints ) , \"::\" ,\n\n stringify ! ( sae_srcif ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sa_endpoints ) ) . sae_srcaddr as * const\n\n _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sa_endpoints ) , \"::\" ,\n\n stringify ! ( sae_srcaddr ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sa_endpoints ) ) . sae_srcaddrlen as *\n\n const _ as usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sa_endpoints ) , \"::\" ,\n\n stringify ! ( sae_srcaddrlen ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 62, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn test_consumer_store_offset_commit() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, Some(0), None);\n\n produce_messages(&topic_name, 11, &value_fn, &key_fn, Some(1), None);\n\n produce_messages(&topic_name, 12, &value_fn, &key_fn, Some(2), None);\n\n let mut config = HashMap::new();\n\n config.insert(\"enable.auto.offset.store\", \"false\");\n\n let consumer = create_stream_consumer(&rand_test_group(), Some(config));\n\n consumer.subscribe(&vec![topic_name.as_str()]).unwrap();\n\n\n\n let _consumer_future = consumer.start()\n\n .take(33)\n\n .for_each(|message| {\n\n match message {\n\n Ok(m) => {\n\n if m.partition() == 1 {\n\n consumer.store_offset(&m).unwrap();\n\n }\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 63, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout_fd_set() {\n\n assert_eq!(::std::mem::size_of::<fd_set>() , 128usize , concat ! (\n\n \"Size of: \" , stringify ! ( fd_set ) ));\n\n assert_eq! (::std::mem::align_of::<fd_set>() , 4usize , concat ! (\n\n \"Alignment of \" , stringify ! ( fd_set ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const fd_set ) ) . fds_bits as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( fd_set ) , \"::\" ,\n\n stringify ! ( fds_bits ) ));\n\n}\n\nimpl Clone for fd_set {\n\n fn clone(&self) -> Self { *self }\n\n}\n\npub type fd_mask = __int32_t;\n\npub type pthread_attr_t = __darwin_pthread_attr_t;\n\npub type pthread_cond_t = __darwin_pthread_cond_t;\n\npub type pthread_condattr_t = __darwin_pthread_condattr_t;\n\npub type pthread_mutex_t = __darwin_pthread_mutex_t;\n\npub type pthread_mutexattr_t = __darwin_pthread_mutexattr_t;\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 64, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout_sf_hdtr() {\n\n assert_eq!(::std::mem::size_of::<sf_hdtr>() , 32usize , concat ! (\n\n \"Size of: \" , stringify ! ( sf_hdtr ) ));\n\n assert_eq! (::std::mem::align_of::<sf_hdtr>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( sf_hdtr ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sf_hdtr ) ) . headers as * const _ as\n\n usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sf_hdtr ) , \"::\" ,\n\n stringify ! ( headers ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sf_hdtr ) ) . hdr_cnt as * const _ as\n\n usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sf_hdtr ) , \"::\" ,\n\n stringify ! ( hdr_cnt ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const sf_hdtr ) ) . trailers as * const _ as\n\n usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( sf_hdtr ) , \"::\" ,\n\n stringify ! ( trailers ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 65, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_once_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_once_t>() , 16usize ,\n\n concat ! ( \"Size of: \" , stringify ! ( _opaque_pthread_once_t )\n\n ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_once_t>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_once_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_once_t ) ) . __sig as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_once_t\n\n ) , \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_once_t ) ) . __opaque as\n\n * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_once_t\n\n ) , \"::\" , stringify ! ( __opaque ) ));\n\n}\n\nimpl Clone for _opaque_pthread_once_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\npub struct _opaque_pthread_rwlock_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 192usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 66, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn test_produce_consume_base_assign() {\n\n let _r = env_logger::init();\n\n\n\n let topic_name = rand_test_topic();\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, Some(0), None);\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, Some(1), None);\n\n produce_messages(&topic_name, 10, &value_fn, &key_fn, Some(2), None);\n\n let consumer = create_stream_consumer(&rand_test_group(), None);\n\n let mut tpl = TopicPartitionList::new();\n\n tpl.add_partition_offset(&topic_name, 0, Offset::Beginning);\n\n tpl.add_partition_offset(&topic_name, 1, Offset::Offset(2));\n\n tpl.add_partition_offset(&topic_name, 2, Offset::Offset(9));\n\n consumer.assign(&tpl).unwrap();\n\n\n\n let mut partition_count = vec![0, 0, 0];\n\n\n\n let _consumer_future = consumer.start()\n\n .take(19)\n\n .for_each(|message| {\n\n match message {\n", "file_path": "tests/produce_consume_base_test.rs", "rank": 67, "score": 45115.0835063058 }, { "content": "#[test]\n\nfn bindgen_test_layout_so_np_extensions() {\n\n assert_eq!(::std::mem::size_of::<so_np_extensions>() , 8usize , concat ! (\n\n \"Size of: \" , stringify ! ( so_np_extensions ) ));\n\n assert_eq! (::std::mem::align_of::<so_np_extensions>() , 4usize , concat !\n\n ( \"Alignment of \" , stringify ! ( so_np_extensions ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const so_np_extensions ) ) . npx_flags as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( so_np_extensions ) ,\n\n \"::\" , stringify ! ( npx_flags ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const so_np_extensions ) ) . npx_mask as *\n\n const _ as usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( so_np_extensions ) ,\n\n \"::\" , stringify ! ( npx_mask ) ));\n\n}\n\nimpl Clone for so_np_extensions {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct sockaddr {\n\n pub sa_len: __uint8_t,\n\n pub sa_family: sa_family_t,\n\n pub sa_data: [::std::os::raw::c_char; 14usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 68, "score": 45115.0835063058 }, { "content": "fn get_bootstrap_server() -> String {\n\n env::var(\"KAFKA_HOST\").unwrap_or(\"localhost:9092\".to_owned())\n\n}\n\n\n\npub struct TestContext {\n\n _some_data: i64, // Add some data so that valgrind can check proper allocation\n\n}\n\n\n\nimpl Context for TestContext {\n\n fn log(&self, _level: RDKafkaLogLevel, fac: &str, log_message: &str) {\n\n // log line received, calculate length\n\n let _n = fac.len() + log_message.len();\n\n }\n\n\n\n fn stats(&self, _: Statistics) { }\n\n}\n\n\n\nimpl ConsumerContext for TestContext {\n\n fn commit_callback(&self, result: KafkaResult<()>, _offsets: *mut rdkafka_sys::RDKafkaTopicPartitionList) {\n\n println!(\"Committing offsets: {:?}\", result);\n\n }\n\n}\n\n\n", "file_path": "tests/test_utils.rs", "rank": 69, "score": 44739.33840691677 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_mutexattr_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_mutexattr_t>() , 16usize\n\n , concat ! (\n\n \"Size of: \" , stringify ! ( _opaque_pthread_mutexattr_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_mutexattr_t>() , 8usize\n\n , concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_mutexattr_t )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_mutexattr_t ) ) . __sig\n\n as * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_mutexattr_t ) , \"::\" , stringify ! ( __sig )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_mutexattr_t ) ) .\n\n __opaque as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_mutexattr_t ) , \"::\" , stringify ! ( __opaque\n\n ) ));\n\n}\n\nimpl Clone for _opaque_pthread_mutexattr_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct _opaque_pthread_once_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 8usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 70, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_rwlockattr_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_rwlockattr_t>() , 24usize\n\n , concat ! (\n\n \"Size of: \" , stringify ! ( _opaque_pthread_rwlockattr_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_rwlockattr_t>() ,\n\n 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_rwlockattr_t )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_rwlockattr_t ) ) . __sig\n\n as * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_rwlockattr_t ) , \"::\" , stringify ! ( __sig )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_rwlockattr_t ) ) .\n\n __opaque as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_rwlockattr_t ) , \"::\" , stringify ! ( __opaque\n\n ) ));\n\n}\n\nimpl Clone for _opaque_pthread_rwlockattr_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\npub struct _opaque_pthread_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __cleanup_stack: *mut __darwin_pthread_handler_rec,\n\n pub __opaque: [::std::os::raw::c_char; 8176usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 71, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_attr_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_attr_t>() , 64usize ,\n\n concat ! ( \"Size of: \" , stringify ! ( _opaque_pthread_attr_t )\n\n ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_attr_t>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_attr_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_attr_t ) ) . __sig as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_attr_t\n\n ) , \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_attr_t ) ) . __opaque as\n\n * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_attr_t\n\n ) , \"::\" , stringify ! ( __opaque ) ));\n\n}\n\n#[repr(C)]\n\npub struct _opaque_pthread_cond_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 40usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 72, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_rwlock_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_rwlock_t>() , 200usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( _opaque_pthread_rwlock_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_rwlock_t>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_rwlock_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_rwlock_t ) ) . __sig as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_rwlock_t ) , \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_rwlock_t ) ) . __opaque\n\n as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_rwlock_t ) , \"::\" , stringify ! ( __opaque )\n\n ));\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct _opaque_pthread_rwlockattr_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 16usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 73, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_metadata() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_metadata>() , 48usize , concat !\n\n ( \"Size of: \" , stringify ! ( rd_kafka_metadata ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_metadata>() , 8usize , concat\n\n ! ( \"Alignment of \" , stringify ! ( rd_kafka_metadata ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata ) ) . broker_cnt as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_metadata ) ,\n\n \"::\" , stringify ! ( broker_cnt ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata ) ) . brokers as *\n\n const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_metadata ) ,\n\n \"::\" , stringify ! ( brokers ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata ) ) . topic_cnt as *\n\n const _ as usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_metadata ) ,\n\n \"::\" , stringify ! ( topic_cnt ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 74, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_condattr_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_condattr_t>() , 16usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( _opaque_pthread_condattr_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_condattr_t>() , 8usize\n\n , concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_condattr_t )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_condattr_t ) ) . __sig as\n\n * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_condattr_t ) , \"::\" , stringify ! ( __sig )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_condattr_t ) ) . __opaque\n\n as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n _opaque_pthread_condattr_t ) , \"::\" , stringify ! ( __opaque )\n\n ));\n\n}\n\nimpl Clone for _opaque_pthread_condattr_t {\n\n fn clone(&self) -> Self { *self }\n\n}\n\n#[repr(C)]\n\npub struct _opaque_pthread_mutex_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 56usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 75, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout___va_list_tag() {\n\n assert_eq!(::std::mem::size_of::<__va_list_tag>() , 24usize , concat ! (\n\n \"Size of: \" , stringify ! ( __va_list_tag ) ));\n\n assert_eq! (::std::mem::align_of::<__va_list_tag>() , 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( __va_list_tag ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __va_list_tag ) ) . gp_offset as * const\n\n _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __va_list_tag ) , \"::\"\n\n , stringify ! ( gp_offset ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __va_list_tag ) ) . fp_offset as * const\n\n _ as usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __va_list_tag ) , \"::\"\n\n , stringify ! ( fp_offset ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __va_list_tag ) ) . overflow_arg_area as\n\n * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( __va_list_tag ) , \"::\"\n\n , stringify ! ( overflow_arg_area ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 76, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_cond_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_cond_t>() , 48usize ,\n\n concat ! ( \"Size of: \" , stringify ! ( _opaque_pthread_cond_t )\n\n ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_cond_t>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_cond_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_cond_t ) ) . __sig as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_cond_t\n\n ) , \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_cond_t ) ) . __opaque as\n\n * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_cond_t\n\n ) , \"::\" , stringify ! ( __opaque ) ));\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct _opaque_pthread_condattr_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 8usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 77, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout__opaque_pthread_mutex_t() {\n\n assert_eq!(::std::mem::size_of::<_opaque_pthread_mutex_t>() , 64usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( _opaque_pthread_mutex_t ) ));\n\n assert_eq! (::std::mem::align_of::<_opaque_pthread_mutex_t>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( _opaque_pthread_mutex_t ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_mutex_t ) ) . __sig as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_mutex_t\n\n ) , \"::\" , stringify ! ( __sig ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const _opaque_pthread_mutex_t ) ) . __opaque as\n\n * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( _opaque_pthread_mutex_t\n\n ) , \"::\" , stringify ! ( __opaque ) ));\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy)]\n\npub struct _opaque_pthread_mutexattr_t {\n\n pub __sig: ::std::os::raw::c_long,\n\n pub __opaque: [::std::os::raw::c_char; 8usize],\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 78, "score": 44131.77427065566 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_metadata_topic() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_metadata_topic>() , 32usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_metadata_topic ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_metadata_topic>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_metadata_topic ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_topic ) ) . topic as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_metadata_topic\n\n ) , \"::\" , stringify ! ( topic ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_topic ) ) .\n\n partition_cnt as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_metadata_topic\n\n ) , \"::\" , stringify ! ( partition_cnt ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_topic ) ) . partitions\n\n as * const _ as usize } , 16usize , concat ! (\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 79, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_group_info() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_group_info>() , 80usize , concat\n\n ! ( \"Size of: \" , stringify ! ( rd_kafka_group_info ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_group_info>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_group_info ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_info ) ) . broker as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_group_info ) ,\n\n \"::\" , stringify ! ( broker ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_info ) ) . group as *\n\n const _ as usize } , 24usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_group_info ) ,\n\n \"::\" , stringify ! ( group ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_info ) ) . err as * const\n\n _ as usize } , 32usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_group_info ) ,\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 80, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_metadata_partition() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_metadata_partition>() , 40usize\n\n , concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_metadata_partition ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_metadata_partition>() , 8usize\n\n , concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_metadata_partition )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_partition ) ) . id as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_metadata_partition ) , \"::\" , stringify ! ( id ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_partition ) ) . err as\n\n * const _ as usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_metadata_partition ) , \"::\" , stringify ! ( err ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_partition ) ) . leader\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 81, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_topic_partition_s() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_topic_partition_s>() , 64usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_topic_partition_s ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_topic_partition_s>() , 8usize\n\n , concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_topic_partition_s )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_topic_partition_s ) ) . topic as\n\n * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_topic_partition_s ) , \"::\" , stringify ! ( topic )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_topic_partition_s ) ) .\n\n partition as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_topic_partition_s ) , \"::\" , stringify ! ( partition\n\n ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 82, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_metadata_broker() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_metadata_broker>() , 24usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_metadata_broker ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_metadata_broker>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_metadata_broker ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_broker ) ) . id as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_metadata_broker ) , \"::\" , stringify ! ( id ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_broker ) ) . host as *\n\n const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_metadata_broker ) , \"::\" , stringify ! ( host ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_metadata_broker ) ) . port as *\n\n const _ as usize } , 16usize , concat ! (\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 83, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_err_desc() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_err_desc>() , 24usize , concat !\n\n ( \"Size of: \" , stringify ! ( rd_kafka_err_desc ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_err_desc>() , 8usize , concat\n\n ! ( \"Alignment of \" , stringify ! ( rd_kafka_err_desc ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_err_desc ) ) . code as * const _\n\n as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_err_desc ) ,\n\n \"::\" , stringify ! ( code ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_err_desc ) ) . name as * const _\n\n as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_err_desc ) ,\n\n \"::\" , stringify ! ( name ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_err_desc ) ) . desc as * const _\n\n as usize } , 16usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_err_desc ) ,\n\n \"::\" , stringify ! ( desc ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 84, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_group_list() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_group_list>() , 16usize , concat\n\n ! ( \"Size of: \" , stringify ! ( rd_kafka_group_list ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_group_list>() , 8usize ,\n\n concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_group_list ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_list ) ) . groups as *\n\n const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_group_list ) ,\n\n \"::\" , stringify ! ( groups ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_list ) ) . group_cnt as *\n\n const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! ( rd_kafka_group_list ) ,\n\n \"::\" , stringify ! ( group_cnt ) ));\n\n}\n\nimpl Clone for rd_kafka_group_list {\n\n fn clone(&self) -> Self { *self }\n\n}\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 85, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout___darwin_pthread_handler_rec() {\n\n assert_eq!(::std::mem::size_of::<__darwin_pthread_handler_rec>() , 24usize\n\n , concat ! (\n\n \"Size of: \" , stringify ! ( __darwin_pthread_handler_rec ) ));\n\n assert_eq! (::std::mem::align_of::<__darwin_pthread_handler_rec>() ,\n\n 8usize , concat ! (\n\n \"Alignment of \" , stringify ! ( __darwin_pthread_handler_rec )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __darwin_pthread_handler_rec ) ) .\n\n __routine as * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n __darwin_pthread_handler_rec ) , \"::\" , stringify ! (\n\n __routine ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const __darwin_pthread_handler_rec ) ) . __arg\n\n as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n __darwin_pthread_handler_rec ) , \"::\" , stringify ! ( __arg )\n\n ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 86, "score": 43225.84936452133 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_group_member_info() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_group_member_info>() , 56usize ,\n\n concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_group_member_info ) ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_group_member_info>() , 8usize\n\n , concat ! (\n\n \"Alignment of \" , stringify ! ( rd_kafka_group_member_info )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_member_info ) ) .\n\n member_id as * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_group_member_info ) , \"::\" , stringify ! ( member_id\n\n ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_group_member_info ) ) .\n\n client_id as * const _ as usize } , 8usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_group_member_info ) , \"::\" , stringify ! ( client_id\n\n ) ));\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 87, "score": 42388.51965935726 }, { "content": "#[test]\n\nfn bindgen_test_layout_rd_kafka_topic_partition_list_s() {\n\n assert_eq!(::std::mem::size_of::<rd_kafka_topic_partition_list_s>() ,\n\n 16usize , concat ! (\n\n \"Size of: \" , stringify ! ( rd_kafka_topic_partition_list_s )\n\n ));\n\n assert_eq! (::std::mem::align_of::<rd_kafka_topic_partition_list_s>() ,\n\n 8usize , concat ! (\n\n \"Alignment of \" , stringify ! (\n\n rd_kafka_topic_partition_list_s ) ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_topic_partition_list_s ) ) . cnt\n\n as * const _ as usize } , 0usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_topic_partition_list_s ) , \"::\" , stringify ! ( cnt )\n\n ));\n\n assert_eq! (unsafe {\n\n & ( * ( 0 as * const rd_kafka_topic_partition_list_s ) ) .\n\n size as * const _ as usize } , 4usize , concat ! (\n\n \"Alignment of field: \" , stringify ! (\n\n rd_kafka_topic_partition_list_s ) , \"::\" , stringify ! ( size\n", "file_path": "rdkafka-sys/src/bindings/macos.rs", "rank": 88, "score": 42388.51965935726 }, { "content": "/// Return the log level\n\nfn log_level_from_global_config() -> RDKafkaLogLevel {\n\n if log_enabled!(LogLevel::Debug) {\n\n RDKafkaLogLevel::Debug\n\n } else if log_enabled!(LogLevel::Info) {\n\n RDKafkaLogLevel::Info\n\n } else if log_enabled!(LogLevel::Warn) {\n\n RDKafkaLogLevel::Warning\n\n } else {\n\n RDKafkaLogLevel::Error\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 89, "score": 41211.3803480797 } ]
Rust
src/libnet/src/http2/error.rs
Veil-Project/Veil-Rust
bd32fb781a9fe6f22aede1bb7cd0aa227b820939
use std::error; use std::fmt; use std::io; use std::result; use super::hpack; pub type Result<T> = result::Result<T, Error>; #[derive(Debug, Clone, Copy)] pub enum ProtocolErrorKind { None = 0x0, Protocol = 0x1, Internal = 0x2, FlowControl = 0x3, SettingsTimeout = 0x4, StreamClosed = 0x5, FrameSize = 0x6, RefusedStream = 0x7, Cancel = 0x8, Compression = 0x9, Connect = 0xA, EnhanceYourCalm = 0xB, InadequateSecurity = 0xC, Http11Required = 0xD, Unknown = 0xE, } impl ProtocolErrorKind { pub fn from_code(code: isize) -> Self { use ProtocolErrorKind::*; match code { 0x0 => None, 0x1 => Protocol, 0x2 => Internal, 0x3 => FlowControl, 0x4 => SettingsTimeout, 0x5 => StreamClosed, 0x6 => FrameSize, 0x7 => RefusedStream, 0x8 => Cancel, 0x9 => Compression, 0xA => Connect, 0xB => EnhanceYourCalm, 0xC => InadequateSecurity, 0xD => Http11Required, 0xE => Unknown, _ => Unknown, } } pub(crate) fn as_code(&self) -> isize { *self as isize } pub(crate) fn as_id_str(&self) -> &str { use ProtocolErrorKind::*; match self { None => "NO_ERROR", Protocol => "PROTOCOL_ERROR", Internal => "INTERNAL_ERROR", FlowControl => "FLOW_CONTROL_ERROR", SettingsTimeout => "SETTINGS_TIMEOUT", StreamClosed => "STREAM_CLOSED", FrameSize => "FRAME_SIZE_ERROR", RefusedStream => "REFUSED_STREAM", Cancel => "CANCEL", Compression => "COMPRESSION_ERROR", Connect => "CONNECT_ERROR", EnhanceYourCalm => "ENHANCE_YOUR_CALM", InadequateSecurity => "INADEQUATE_SECURITY", Http11Required => "HTTP_1_1_REQUIRED", Unknown => "INTERNAL_ERROR", } } pub(crate) fn as_str(&self) -> &str { use ProtocolErrorKind::*; match self { None => "condition is not a result of an error", Protocol => "detected an unspecific protocol error", Internal => "encountered an unexpected internal error", FlowControl => "detected that its peer violated the flow-control protocol", SettingsTimeout => "sent a SETTINGS frame but did not receive a response in a timely manner", StreamClosed => "received a frame after a stream was half-closed", FrameSize => "received a frame with an invalid size", RefusedStream => "refused the stream prior to performing any application processing", Cancel => "indicates that the stream is no longer needed", Compression => "unable to maintain the header compression context for the connection", Connect => "connection established in response to a CONNECT request was reset or abnormally closed", EnhanceYourCalm => "detected that its peer is exhibiting a behavior that might be generating excessive load", InadequateSecurity => "underlying transport has properties that do not meet minimum security requirements", Http11Required => "requires that HTTP/1.1 be used instead of HTTP/2", Unknown => "encountered an unexpected error code", } } } impl fmt::Display for ProtocolErrorKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}: {}", self.as_id_str(), self.as_str()) } } #[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum ErrorKind { InvalidSetting(String), DataLength, } impl ErrorKind { pub(crate) fn as_str(&self) -> &str { use ErrorKind::*; match *self { InvalidSetting(ref e) => e, DataLength => "Data length is too long or too short", } } } pub enum Repr { Io(io::Error), Protocol(ProtocolErrorKind), Simple(ErrorKind), Hpack(hpack::ErrorKind), } impl fmt::Debug for Repr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Repr::*; match self { Io(ref e) => e.fmt(f), Protocol(kind) => f .debug_struct("Protocol") .field("code", &kind.as_code()) .field("id", &kind.as_id_str()) .field("description", &kind.as_str()) .finish(), Simple(kind) => f.debug_tuple("Kind").field(&kind).finish(), Hpack(kind) => f.debug_tuple("HpackKind").field(&kind).finish(), } } } pub struct Error(Box<Repr>); impl Error { pub fn new(kind: Repr) -> Error { Error(Box::new(kind)) } pub fn from_raw_protocol_error(code: isize) -> Error { Error(Box::new(Repr::Protocol(ProtocolErrorKind::from_code(code)))) } pub fn raw_protocol_error(&self) -> Option<isize> { use Repr::*; match *self.0 { Io(..) => None, Protocol(i) => Some(i as isize), } } pub fn kind(&self) -> &Repr { &self.0 } } impl From<ProtocolErrorKind> for Error { fn from(kind: ProtocolErrorKind) -> Self { Error::new(Repr::Protocol(kind)) } } impl From<ErrorKind> for Error { fn from(kind: ErrorKind) -> Self { Error::new(Repr::Simple(kind)) } } impl From<hpack::ErrorKind> for Error { fn from(kind: hpack::ErrorKind) -> Self { Error::new(Repr::Hpack(kind)) } } impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Repr::*; match *self.0 { Io(ref e) => e.fmt(f), Protocol(kind) => write!(f, "{}: {}", kind.as_id_str(), kind.as_str()), Simple(kind) => kind.fmt(f), Hpack(kind) => kind.fmt(f), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::Repr::*; match *self.0 { Io(ref e) => e.fmt(f), Protocol(kind) => write!(f, "{}", kind.as_str()), Simple(kind) => write!(f, "{}", kind.as_str()), Hpack(kind) => write!(f, "{}", kind.as_str()), } } } impl error::Error for Error { fn description(&self) -> &str { use Repr::*; match *self.0 { Io(e) => e.description(), Protocol(kind) => kind.as_str(), Simple(kind) => kind.as_str(), Hpack(kind) => kind.as_str(), } } fn source(&self) -> Option<&(dyn error::Error + 'static)> { use Repr::*; match *self.0 { Io(e) => e.source(), Protocol(..) => None, Simple(..) => None, } } } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Self::new(Repr::Io(e)) } }
use std::error; use std::fmt; use std::io; use std::result; use super::hpack; pub type Result<T> = result::Result<T, Error>; #[derive(Debug, Clone, Copy)] pub enum ProtocolErrorKind { None = 0x0, Protocol = 0x1, Internal = 0x2, FlowControl = 0x3, SettingsTimeout = 0x4, StreamClosed = 0x5, FrameSize = 0x6, RefusedStream = 0x7, Cancel = 0x8, Compression = 0x9, Connect = 0xA, EnhanceYourCalm = 0xB, InadequateSecurity = 0xC, Http11Required = 0xD, Unknown = 0xE, } impl ProtocolErrorKind { pub fn from_code(code: isize) -> Self { use ProtocolErrorKind::*; match code { 0x0 => None, 0x1 => Protocol, 0x2 => Internal, 0x3 => FlowControl, 0x4 => SettingsTimeout, 0x5 => StreamClosed, 0x6 => FrameSize, 0x7 => RefusedStream, 0x8 => Cancel, 0x9 => Compression, 0xA => Connect, 0xB => EnhanceYourCalm, 0xC => InadequateSecurity, 0xD => Http11Required, 0xE => Unknown, _ => Unknown, } } pub(crate) fn as_code(&self) -> isize { *self as isize } pub(crate) fn as_id_str(&self) -> &str { use ProtocolErrorKind::*; match self { None => "NO_ERROR", Protocol => "PROTOCOL_ERROR", Internal => "INTERNAL_ERROR", FlowControl => "FLOW_CONTROL_ERROR", SettingsTimeout => "SETTINGS_TIMEOUT", StreamClosed => "STREAM_CLOSED", FrameSize => "FRAME_SIZE_ERROR", RefusedStream => "REFUSED_STREAM", Cancel => "CANCEL", Compression => "COMPRESSION_ERROR", Connect => "CONNECT_ERROR", EnhanceYourCalm => "ENHANCE_YOUR_CALM", InadequateSecurity => "INADEQUATE_SECURITY", Http11Required => "HTTP_1_1_REQUIRED", Unknown => "INTERNAL_ERROR", } } pub(crate) fn as_str(&self) -> &str { use ProtocolErrorKind::*; match self { None => "condition is not a result of an error", Protocol => "detected an unspecific protocol error", Internal => "encountered an unexpected internal error", FlowControl => "detected that its peer violated the flow-control protocol", SettingsTimeout => "sent a SETTINGS frame but did not receive a response in a timely manner", StreamClosed => "received a frame after a stream was half-closed", FrameSize => "received a frame with an invalid size", RefusedStream => "refused the stream prior to performing any application processing", Cancel => "indicates that the stream is no longer needed", Compression => "unable to maintain the header compression context for the connection", Connect => "connection established in response to a CONNECT request was reset or abnormally closed", EnhanceYourCalm => "detected that its peer is exhibiting a behavior that might be generating excessive load", InadequateSecurity => "underlying transport has properties that do not meet minimum security requirements", Http11Required => "requires that HTTP/1.1 be used instead of HTTP/2", Unknown => "encountered an unexpected error code", } } } impl fmt::Display for ProtocolErrorKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}: {}", self.as_id_str(), self.as_str()) } } #[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum ErrorKind { InvalidSetting(String), DataLength, } impl ErrorKind { pub(crate) fn as_str(&self) -> &str { use ErrorKind::*; match *self { InvalidSetting(ref e) => e, DataLength => "Data length is too long or too short", } } } pub enum Repr { Io(io::Error), Protocol(ProtocolErrorKind), Simple(ErrorKind), Hpack(hpack::ErrorKind), } impl fmt::Debug for Repr {
} pub struct Error(Box<Repr>); impl Error { pub fn new(kind: Repr) -> Error { Error(Box::new(kind)) } pub fn from_raw_protocol_error(code: isize) -> Error { Error(Box::new(Repr::Protocol(ProtocolErrorKind::from_code(code)))) } pub fn raw_protocol_error(&self) -> Option<isize> { use Repr::*; match *self.0 { Io(..) => None, Protocol(i) => Some(i as isize), } } pub fn kind(&self) -> &Repr { &self.0 } } impl From<ProtocolErrorKind> for Error { fn from(kind: ProtocolErrorKind) -> Self { Error::new(Repr::Protocol(kind)) } } impl From<ErrorKind> for Error { fn from(kind: ErrorKind) -> Self { Error::new(Repr::Simple(kind)) } } impl From<hpack::ErrorKind> for Error { fn from(kind: hpack::ErrorKind) -> Self { Error::new(Repr::Hpack(kind)) } } impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Repr::*; match *self.0 { Io(ref e) => e.fmt(f), Protocol(kind) => write!(f, "{}: {}", kind.as_id_str(), kind.as_str()), Simple(kind) => kind.fmt(f), Hpack(kind) => kind.fmt(f), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::Repr::*; match *self.0 { Io(ref e) => e.fmt(f), Protocol(kind) => write!(f, "{}", kind.as_str()), Simple(kind) => write!(f, "{}", kind.as_str()), Hpack(kind) => write!(f, "{}", kind.as_str()), } } } impl error::Error for Error { fn description(&self) -> &str { use Repr::*; match *self.0 { Io(e) => e.description(), Protocol(kind) => kind.as_str(), Simple(kind) => kind.as_str(), Hpack(kind) => kind.as_str(), } } fn source(&self) -> Option<&(dyn error::Error + 'static)> { use Repr::*; match *self.0 { Io(e) => e.source(), Protocol(..) => None, Simple(..) => None, } } } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Self::new(Repr::Io(e)) } }
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Repr::*; match self { Io(ref e) => e.fmt(f), Protocol(kind) => f .debug_struct("Protocol") .field("code", &kind.as_code()) .field("id", &kind.as_id_str()) .field("description", &kind.as_str()) .finish(), Simple(kind) => f.debug_tuple("Kind").field(&kind).finish(), Hpack(kind) => f.debug_tuple("HpackKind").field(&kind).finish(), } }
function_block-full_function
[]
Rust
kernel/src/arch/x86_64/memory/address_space_manager.rs
aeleos/VeOS
4c766539ac10ad7b044bcf1b49fd5f0ae8fc21c6
use super::paging::inactive_page_table::InactivePageTable; use super::paging::page_table_entry::*; use super::paging::page_table_manager::PageTableManager; use super::paging::{convert_flags, Page, PageFrame, CURRENT_PAGE_TABLE}; use super::PAGE_SIZE; use super::{ KERNEL_STACK_AREA_BASE, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_OFFSET, USER_STACK_AREA_BASE, USER_STACK_MAX_SIZE, USER_STACK_OFFSET, }; use core::ptr; use crate::memory::{ address_space_manager, Address, AddressSpace, PageFlags, PhysicalAddress, VirtualAddress, }; use crate::multitasking::stack::AccessType; use crate::multitasking::{Stack, ThreadID}; pub struct AddressSpaceManager { table: InactivePageTable, } impl address_space_manager::AddressSpaceManager for AddressSpaceManager { fn new() -> AddressSpaceManager { AddressSpaceManager { table: InactivePageTable::copy_from_current(), } } fn idle() -> AddressSpaceManager { AddressSpaceManager { table: InactivePageTable::from_current_table(), } } fn write_to(&mut self, buffer: &[u8], address: VirtualAddress, flags: PageFlags) { let flags = convert_flags(flags); let start_page_num = address.page_num(); let end_page_num = (address + buffer.len() - 1).page_num() + 1; let mut current_offset = address.offset_in_page(); let mut current_buffer_position = 0; for page_num in start_page_num..end_page_num { let page_address = VirtualAddress::from_page_num(page_num); self.table.change_permissions_or_map( Page::from_address(page_address), PageTableEntryFlags::WRITABLE, ); let mut entry = self.table.get_entry_and_map(page_address); let physical_address = entry .points_to() .expect("The just mapped page isn't mapped."); let (new_current_buffer_position, new_current_offset) = CURRENT_PAGE_TABLE .lock() .with_temporary_page(PageFrame::from_address(physical_address), |page| { let start_address = page.get_address() + current_offset; let write_length = if (PAGE_SIZE - current_offset) >= buffer.len() - current_buffer_position { buffer.len() - current_buffer_position } else { PAGE_SIZE - current_offset }; unsafe { ptr::copy_nonoverlapping( buffer.as_ptr(), start_address.as_mut_ptr(), write_length, ); } ( current_buffer_position + write_length, (current_offset + write_length) % PAGE_SIZE, ) }); current_offset = new_current_offset; current_buffer_position = new_current_buffer_position; entry.set_flags(flags); } self.table.unmap(); } unsafe fn get_page_table_address(&self) -> PhysicalAddress { self.table.get_frame().get_address() } fn map_page(&mut self, page_address: VirtualAddress, flags: PageFlags) { let flags = convert_flags(flags); self.table.map_page(Page::from_address(page_address), flags); self.table.unmap(); } unsafe fn unmap_page(&mut self, start_address: VirtualAddress) { self.table.unmap_page(Page::from_address(start_address)); self.table.unmap(); } unsafe fn unmap_page_unchecked(&mut self, start_address: VirtualAddress) { self.table .unmap_page_unchecked(Page::from_address(start_address)); self.table.unmap(); } fn create_kernel_stack(id: ThreadID, address_space: &mut AddressSpace) -> Stack { let tid: usize = id.into(); Stack::new( 0x4000, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_AREA_BASE + KERNEL_STACK_OFFSET * tid, AccessType::KernelOnly, Some(address_space), ) } fn create_user_stack(id: ThreadID, address_space: &mut AddressSpace) -> Stack { let tid: usize = id.into(); Stack::new( 0x2000, USER_STACK_MAX_SIZE, USER_STACK_AREA_BASE + USER_STACK_OFFSET * tid, AccessType::UserAccessible, Some(address_space), ) } fn create_idle_stack(cpu_id: usize) -> Stack { Stack::new( 0x3000, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_AREA_BASE + KERNEL_STACK_OFFSET * cpu_id, AccessType::KernelOnly, None, ) } }
use super::paging::inactive_page_table::InactivePageTable; use super::paging::page_table_entry::*; use super::paging::page_table_manager::PageTableManager; use super::paging::{convert_flags, Page, PageFrame, CURRENT_PAGE_TABLE}; use super::PAGE_SIZE; use super::{ KERNEL_STACK_AREA_BASE, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_OFFSET, USER_STACK_AREA_BASE, USER_STACK_MAX_SIZE, USER_STACK_OFFSET, }; use core::ptr; use crate::memory::{ address_space_manager, Address, AddressSpace, PageFlags, PhysicalAddress, VirtualAddress, }; use crate::multitasking::stack::AccessType; use crate::multitasking::{Stack, ThreadID}; pub struct AddressSpaceManager { table: InactivePageTable, } impl address_space_manager::AddressSpaceManager for AddressSpaceManager { fn new() -> AddressSpaceManager { AddressSpaceManager { table: InactivePageTable::copy_from_current(), } } fn idle() -> AddressSpaceManager { AddressSpaceManager { table: InactivePageTable::from_current_table(), } } fn write_to(&mut self, buffer: &[u8], address: VirtualAddress, flags: PageFlags) { let flags = convert_flags(flags); let start_page_num = address.page_num(); let end_page_num = (address + buffer.len() - 1).page_num() + 1; let mut current_offset = address.offset_in_page(); let mut current_buffer_position = 0; for page_num in start_page_num..end_page_num { let page_address = VirtualAddress::from_page_num(page_num); self.table.change_permissions_or_map( Page::from_address(page_address), PageTableEntryFlags::WRITABLE, ); let mut entry = self.table.get_entry_and_map(page_address); let physical_address = entry .points_to() .expect("The just mapped page isn't mapped."); let (new_current_buffer_position, new_current_offset) = CURRENT_PAGE_TABLE .lock() .with_temporary_page(PageFrame::from_address(physical_address), |page| { let start_address = page.get_address() + current_offset; let write_length = if (PAGE_SIZE - current_offset) >= buffer.len() - current_buffer_position { buffer.len() - current_buffer_position } else { PAGE_SIZE - current_offset }; unsafe { ptr::copy_nonoverlapping( buffer.as_ptr(), start_address.as_mut_ptr(), write_length, ); } ( current_buffer_position + write_length, (current_offset + write_length) % PAGE_SIZE, ) }); current_offset = new_current_offset; current_buffer_position = new_current_buffer_position; entry.set_flags(flags); } self.table.unmap(); } unsafe fn get_page_table_address(&self) -> PhysicalAddress { self.table.get_frame().get_address() } fn map_page(&mut self, page_address: VirtualAddress, flags: PageFlags) { let flags = convert_flags(flags); self.table.map_page(Page::from_address(page_address), flags); self.table.unmap(); } unsafe fn unmap_page(&mut self, start_address: VirtualAddress) { self.table.unmap_page(Page::from_address(start_address)); self.table.unmap(); } unsafe fn unmap_page_unchecked(&mut self, start_address: VirtualAddress) { self.table .unmap_page_unchecked(Page::from_address(start_address)); self.table.unmap(); } fn create_kernel_stack(id: ThreadID, address_space: &mut AddressSpace) -> Stack { let tid: usize = id.into(); Stack::new( 0x4000, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_AREA_BASE + KERNEL_STACK_OFFSET * tid, AccessType::KernelOnly, Some(address_space), ) } fn create_user_stack(id: ThreadID, address_space: &m
fn create_idle_stack(cpu_id: usize) -> Stack { Stack::new( 0x3000, KERNEL_STACK_MAX_SIZE, KERNEL_STACK_AREA_BASE + KERNEL_STACK_OFFSET * cpu_id, AccessType::KernelOnly, None, ) } }
ut AddressSpace) -> Stack { let tid: usize = id.into(); Stack::new( 0x2000, USER_STACK_MAX_SIZE, USER_STACK_AREA_BASE + USER_STACK_OFFSET * tid, AccessType::UserAccessible, Some(address_space), ) }
function_block-function_prefixed
[ { "content": "/// Maps the given page to the given frame using the given flags.\n\npub fn map_page_at(page_address: VirtualAddress, frame_address: PhysicalAddress, flags: PageFlags) {\n\n CURRENT_PAGE_TABLE.lock().map_page_at(\n\n Page::from_address(page_address),\n\n PageFrame::from_address(frame_address),\n\n convert_flags(flags)\n\n );\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 0, "score": 293262.3628984911 }, { "content": "/// Maps the given page to the given frame using the given flags.\n\npub fn map_page_at(page_address: VirtualAddress, frame_address: PhysicalAddress, flags: PageFlags) {\n\n paging::map_page_at(page_address, frame_address, flags);\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 1, "score": 292619.84210251854 }, { "content": "/// Maps the given page using the given flags.\n\npub fn map_page(page_address: VirtualAddress, flags: PageFlags) {\n\n paging::map_page(page_address, flags);\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 2, "score": 284543.3195840033 }, { "content": "/// Maps the given page using the given flags.\n\npub fn map_page(page_address: VirtualAddress, flags: PageFlags) {\n\n CURRENT_PAGE_TABLE\n\n .lock()\n\n .map_page(Page::from_address(page_address), convert_flags(flags));\n\n}\n\n\n\n/// Unmaps the given page.\n\n///\n\n/// # Safety\n\n/// - Make sure this page isn't referenced anymore when unmapping it.\n\npub unsafe fn unmap_page(start_address: VirtualAddress) {\n\n CURRENT_PAGE_TABLE\n\n .lock()\n\n .unmap_page(Page::from_address(start_address));\n\n}\n\n\n\n/// Maps the initramfs into the kernel.\n\n///\n\n/// # Safety\n\n/// - This should only be called once.\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 3, "score": 284385.19111847907 }, { "content": "/// Returns the flags for the given page, if the page is mapped.\n\npub fn get_page_flags(page_address: VirtualAddress) -> PageFlags {\n\n let mut flags = PageFlags::empty();\n\n let mut table = CURRENT_PAGE_TABLE.lock();\n\n\n\n if let Some(entry) = table.get_entry(Page::from_address(page_address).get_address()) {\n\n let entry_flags = entry.flags();\n\n\n\n if entry_flags.contains(PageTableEntryFlags::PRESENT) {\n\n flags |= PageFlags::PRESENT;\n\n }\n\n\n\n if entry_flags.contains(PageTableEntryFlags::WRITABLE) {\n\n flags |= PageFlags::WRITABLE;\n\n }\n\n\n\n if !entry_flags.contains(PageTableEntryFlags::NO_EXECUTE) {\n\n flags |= PageFlags::EXECUTABLE;\n\n }\n\n\n\n if entry_flags.contains(PageTableEntryFlags::DISABLE_CACHE) {\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 4, "score": 261689.5585093469 }, { "content": "/// Returns the flags of the given page.\n\npub fn get_page_flags(page_address: VirtualAddress) -> PageFlags {\n\n paging::get_page_flags(page_address)\n\n}\n\n\n\n/// Unmaps the given page.\n\n///\n\n/// # Safety\n\n/// - Make sure that nothing references that page anymore.\n\npub unsafe fn unmap_page(start_address: VirtualAddress) {\n\n paging::unmap_page(start_address);\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 5, "score": 261217.21658293757 }, { "content": "/// Converts the general `PageFlags` to x86_64-specific flags.\n\npub fn convert_flags(flags: PageFlags) -> PageTableEntryFlags {\n\n let mut entry_flags = PageTableEntryFlags::PRESENT;\n\n\n\n if flags.contains(PageFlags::WRITABLE) {\n\n entry_flags |= PageTableEntryFlags::WRITABLE;\n\n }\n\n\n\n if !flags.contains(PageFlags::EXECUTABLE) {\n\n entry_flags |= PageTableEntryFlags::NO_EXECUTE;\n\n }\n\n\n\n if flags.contains(PageFlags::NO_CACHE) {\n\n entry_flags |= PageTableEntryFlags::DISABLE_CACHE;\n\n }\n\n\n\n if flags.contains(PageFlags::USER_ACCESSIBLE) {\n\n entry_flags |= PageTableEntryFlags::USER_ACCESSIBLE;\n\n }\n\n\n\n entry_flags\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 6, "score": 236148.86860080162 }, { "content": "/// The page fault handler.\n\npub fn page_fault_handler(address: VirtualAddress, program_counter: VirtualAddress) {\n\n unsafe { crate::sync::disable_preemption() };\n\n let current_thread = CURRENT_THREAD.lock();\n\n\n\n error!(\n\n \"Page fault in {:?} {:?} at address {:?} (PC: {:?})\",\n\n current_thread.pid, current_thread.id, address, program_counter\n\n );\n\n\n\n error!(\"Page flags: {:?}\", arch::Current::get_page_flags(address));\n\n loop {}\n\n}\n", "file_path": "kernel/src/interrupts/mod.rs", "rank": 7, "score": 218113.18714506086 }, { "content": "/// Checks if the address is a kernel or a userspace address.\n\npub fn is_userspace_address(address: VirtualAddress) -> bool {\n\n address <= VIRTUAL_LOW_MAX_ADDRESS\n\n}\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 8, "score": 190488.00708830194 }, { "content": "/// Issues an interrupt to the current CPU.\n\npub fn issue_self_interrupt(vector: u8) {\n\n issue_interrupt(InterruptDestinationMode::SELF, vector);\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 9, "score": 188645.62583264842 }, { "content": "/// Creates a new process.\n\npub fn create_process(address_space: AddressSpace, entry_address: VirtualAddress) -> ProcessID {\n\n let mut pcb = PCB::new(address_space);\n\n\n\n let mut process_list = PROCESS_LIST.lock();\n\n let id = find_pid(&process_list);\n\n\n\n let first_tcb = TCB::in_process(id, 0.into(), entry_address, &mut pcb);\n\n\n\n scheduler::READY_LIST.lock().push(first_tcb);\n\n\n\n assert!(\n\n process_list.insert(id, pcb).is_none(),\n\n \"Trying to use an already used {:?}.\",\n\n id\n\n );\n\n\n\n id\n\n}\n\n\n", "file_path": "kernel/src/multitasking/mod.rs", "rank": 10, "score": 183055.8123703001 }, { "content": "/// Aligns the given address to the given alignment.\n\n///\n\n/// The alignment must be a power of two.\n\nfn align(address: VirtualAddress, alignment: usize) -> VirtualAddress {\n\n debug_assert!(alignment.is_power_of_two());\n\n\n\n if address.as_usize() % alignment == 0 {\n\n address\n\n } else {\n\n let alignment_bitmask = !(alignment - 1);\n\n VirtualAddress::from_usize((address.as_usize() & alignment_bitmask) + alignment)\n\n }\n\n}\n", "file_path": "kernel/src/memory/allocator/mod.rs", "rank": 11, "score": 175175.37942937805 }, { "content": "/// Initializes the paging.\n\npub fn init(initramfs_area: MemoryArea<PhysicalAddress>) {\n\n assert_has_not_been_called!(\"The x86_64 paging module should only be initialized once.\");\n\n\n\n debug!(\"Initializing the free list...\");\n\n free_list::init();\n\n\n\n debug!(\"Remapping the kernel...\");\n\n unsafe { remap_kernel() };\n\n\n\n debug!(\"Mapping the initramfs...\");\n\n unsafe { map_initramfs(initramfs_area) };\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 12, "score": 174874.30142823438 }, { "content": "/// Initializes the multiboot module.\n\npub fn init(information_structure_address: usize) {\n\n assert_has_not_been_called!(\"The multiboot module should only be initialized once.\");\n\n\n\n unsafe {\n\n STRUCT_BASE_ADDRESS =\n\n to_virtual!(information_structure_address) as *const MultibootInformation\n\n };\n\n\n\n assert!(!get_flags().contains(MultibootFlags::A_OUT | MultibootFlags::ELF));\n\n}\n\n\n\n/// Returns the VGA buffer information requested.\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 13, "score": 168817.86811151798 }, { "content": "/// Initializes the multiboot module.\n\npub fn init(information_structure_address: usize) {\n\n assert_has_not_been_called!(\"The multiboot2 module should only be initialized once.\");\n\n BOOT_INFO.call_once(|| unsafe { multiboot2::load(information_structure_address) });\n\n}\n\n\n\n/// Returns the VGA buffer information requested.\n", "file_path": "kernel/src/boot/multiboot2.rs", "rank": 14, "score": 168817.868111518 }, { "content": "/// Gets the current task priority for the local APIC.\n\npub fn get_priority() -> u8 {\n\n unsafe { get_register(TASK_PRIORITY_REGISTER) as u8 }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 15, "score": 166870.8480606203 }, { "content": "/// The keyboard interrupt handler.\n\npub fn keyboard_interrupt(scancode: u8) {\n\n if scancode == 1 {\n\n unsafe { crate::sync::disable_preemption() };\n\n loop {}\n\n }\n\n info!(\"Key: <{}>\", scancode);\n\n}\n\n\n", "file_path": "kernel/src/interrupts/mod.rs", "rank": 16, "score": 164890.2984063002 }, { "content": "/// Represents something that can act like an address.\n\npub trait Address: PartialOrd + Ord + Add<usize, Output = Self> + Sized + Clone + Copy {\n\n /// Returns the value of the address as a `usize`.\n\n #[inline(always)]\n\n fn as_usize(&self) -> usize;\n\n\n\n /// Creates a value of the address type from a `usize`.\n\n #[inline(always)]\n\n fn from_usize(_: usize) -> Self;\n\n\n\n /// Aligns the address to the next page border, rounded down.\n\n fn page_align_down(self) -> Self {\n\n Self::from_usize(self.as_usize() / PAGE_SIZE * PAGE_SIZE)\n\n }\n\n\n\n /// Returns the offset of the page from the previous page border.\n\n fn offset_in_page(self) -> usize {\n\n self.as_usize() % PAGE_SIZE\n\n }\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 17, "score": 162780.38703805697 }, { "content": "/// Returns the size of unused physical memory.\n\npub fn get_free_memory_size() -> usize {\n\n FRAME_ALLOCATOR.get_free_frame_num() * PAGE_SIZE\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/mod.rs", "rank": 18, "score": 161789.51921357767 }, { "content": "/// Sets the task priority for the local APIC.\n\npub fn set_priority(value: u8) {\n\n unsafe {\n\n set_register(TASK_PRIORITY_REGISTER, value as u32);\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 19, "score": 159853.14160383964 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct ColorCode(u8);\n\n\n\nimpl ColorCode {\n\n /// Creates a color code.\n\n const fn new(foreground: Color, background: Color) -> ColorCode {\n\n ColorCode((background as u8) << 4 | (foreground as u8))\n\n }\n\n}\n\n\n\n/// Represents a character in the buffer.\n", "file_path": "kernel/src/arch/x86_64/vga_buffer.rs", "rank": 20, "score": 159136.22542874698 }, { "content": "/// Returns the memory area of the initramfs.\n\npub fn get_initramfs_area() -> MemoryArea<PhysicalAddress> {\n\n let module_entry = get_initramfs_module_entry();\n\n\n\n MemoryArea::from_start_and_end(\n\n PhysicalAddress::from_usize(module_entry.start_address() as usize),\n\n PhysicalAddress::from_usize(module_entry.end_address() as usize),\n\n )\n\n}\n\n\n\n/// Provides an iterator for the memory map.\n\npub struct MemoryMapIterator {\n\n /// Iterator for current memory.\n\n memory: multiboot2::MemoryAreaIter,\n\n}\n\n\n\nimpl MemoryMapIterator {\n\n /// Creates a new iterator through the memory map.\n\n fn new() -> MemoryMapIterator {\n\n MemoryMapIterator {\n\n memory: BOOT_INFO\n", "file_path": "kernel/src/boot/multiboot2.rs", "rank": 21, "score": 157146.55385029447 }, { "content": "/// Returns the memory area of the initramfs.\n\npub fn get_initramfs_area() -> MemoryArea<PhysicalAddress> {\n\n match *get_boot_method() {\n\n BootMethod::Multiboot => multiboot::get_initramfs_area(),\n\n BootMethod::Multiboot2 => multiboot2::get_initramfs_area(),\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/mod.rs", "rank": 22, "score": 157146.55385029447 }, { "content": "/// Returns the memory area of the initramfs.\n\npub fn get_initramfs_area() -> MemoryArea<PhysicalAddress> {\n\n let module_entry = get_initramfs_module_entry();\n\n\n\n MemoryArea::from_start_and_end(\n\n PhysicalAddress::from_usize(module_entry.mod_start as usize),\n\n PhysicalAddress::from_usize(module_entry.mod_end as usize),\n\n )\n\n}\n\n\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 23, "score": 157146.55385029447 }, { "content": "/// Initializes the boot module and all the data it provides.\n\npub fn init(magic_number: u32, information_structure_address: usize) {\n\n assert_has_not_been_called!(\"Boot information should only be initialized once.\");\n\n\n\n set_boot_method(magic_number);\n\n\n\n match *get_boot_method() {\n\n BootMethod::Multiboot2 => multiboot2::init(information_structure_address),\n\n BootMethod::Multiboot => multiboot::init(information_structure_address),\n\n _ => unimplemented!(),\n\n };\n\n}\n\n\n", "file_path": "kernel/src/boot/mod.rs", "rank": 24, "score": 154834.0057313702 }, { "content": "/// This function gets executed whenever there is nothing else to execute.\n\n///\n\n/// It can perform various tasks, such as cleaning up unused resources.\n\n///\n\n/// Once it's done performing it's initial cleanup, it sleeps in a loop,\n\n/// performing periodic cleanup. It should also be interruptable as often as\n\n/// possible.\n\npub fn idle() -> ! {\n\n // TODO: Peform initial cleanup here.\n\n unsafe {\n\n enable_preemption();\n\n schedule();\n\n }\n\n loop {\n\n // TODO: Perform periodic cleanup here.\n\n unsafe {\n\n {\n\n if let Some(next_wake_thread) = SLEEPING_LIST.lock().peek() {\n\n let current_time = Timestamp::get_current();\n\n let wake_time = next_wake_thread.get_wake_time();\n\n if let Some(sleep_duration) = wake_time.checked_sub(current_time) {\n\n arch::Current::interrupt_in(sleep_duration);\n\n } else {\n\n schedule();\n\n }\n\n }\n\n }\n\n halt();\n\n }\n\n }\n\n}\n", "file_path": "kernel/src/multitasking/scheduler.rs", "rank": 25, "score": 153958.10638317416 }, { "content": "/// The physical address at which the kernel starts.\n\npub fn get_kernel_area() -> MemoryArea<PhysicalAddress> {\n\n let start = unsafe { TEXT_START };\n\n let end = unsafe { KERNEL_END };\n\n MemoryArea::from_start_and_end(start, end)\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 26, "score": 152841.45000895785 }, { "content": "/// Returns the start address of the initramfs.\n\npub fn get_initramfs_area() -> MemoryArea<VirtualAddress> {\n\n unsafe { INITRAMFS_AREA }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 27, "score": 152610.47683998436 }, { "content": "/// Hashes page frames to values from 0 to 511.\n\n///\n\n/// This serves to speed up temporary mapping of page frames,\n\n/// by better utilizing the available space.\n\nfn page_frame_hash(frame: PageFrame) -> usize {\n\n // UNOPTIMIZED: Possibly use a better hash algorithm here?\n\n let mut address = frame.get_address().as_usize() >> 12;\n\n address *= 101_489;\n\n address % 512\n\n}\n", "file_path": "kernel/src/arch/x86_64/memory/paging/current_page_table.rs", "rank": 28, "score": 148202.64718725462 }, { "content": "/// Initializes the buffer for use.\n\npub fn init() {\n\n let info = boot::get_vga_info();\n\n WRITER.lock().init(info);\n\n clear_screen();\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/vga_buffer.rs", "rank": 29, "score": 147842.5031965989 }, { "content": "/// Returns the memory map given by the boot loader.\n\npub fn get_memory_map() -> MemoryMapIterator {\n\n MemoryMapIterator::new()\n\n}\n", "file_path": "kernel/src/boot/multiboot2.rs", "rank": 30, "score": 147198.4337950197 }, { "content": "/// Returns the memory map given by the boot loader.\n\npub fn get_memory_map() -> MemoryMapIterator {\n\n MemoryMapIterator::new()\n\n}\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 31, "score": 147198.4337950197 }, { "content": "#[start]\n\n#[no_mangle]\n\npub fn _start(_: isize, _: *const *const u8) -> isize {\n\n unsafe {\n\n main();\n\n }\n\n exit();\n\n}\n\n\n\n#[lang = \"eh_personality\"]\n\nextern \"C\" fn eh_personality() {\n\n unimplemented!();\n\n}\n\n\n\n/// The panic handler of the program.\n\n///\n\n/// This exits after printing some debug information.\n\n#[panic_implementation]\n\n#[no_mangle]\n\npub extern \"C\" fn panic_fmt(info: &PanicInfo) -> ! {\n\n println!(\"{}\", info);\n\n exit();\n\n}\n", "file_path": "std/src/lib.rs", "rank": 32, "score": 146151.91606472508 }, { "content": "/// Clears the screen.\n\npub fn clear_screen() {\n\n WRITER.lock().clear_screen();\n\n}\n", "file_path": "kernel/src/arch/x86_64/vga_buffer.rs", "rank": 33, "score": 145108.4397890396 }, { "content": "/// Represents the buffer.\n\nstruct Buffer {\n\n address: Unique<Volatile<ScreenChar>>,\n\n width: usize,\n\n height: usize\n\n}\n\n\n\nimpl Buffer {\n\n /// Creates a new buffer.\n\n const fn new(address: usize, width: usize, height: usize) -> Buffer {\n\n Buffer {\n\n address: unsafe { Unique::new_unchecked(address as *mut _) },\n\n width,\n\n height\n\n }\n\n }\n\n\n\n /// Writes a character to this buffer.\n\n fn write_char(&mut self, row_position: usize, column_position: usize, character: ScreenChar) {\n\n let start = self.address.as_ptr();\n\n // TODO better safety check here\n", "file_path": "kernel/src/arch/x86_64/vga_buffer.rs", "rank": 34, "score": 144074.94451336103 }, { "content": "/// Returns an iterator for the map of usable memory.\n\npub fn get_memory_map() -> Either<\n\n MemoryMapIterator<multiboot::MemoryMapIterator>,\n\n MemoryMapIterator<multiboot2::MemoryMapIterator>,\n\n> {\n\n match *get_boot_method() {\n\n BootMethod::Multiboot => Left(MemoryMapIterator::new(multiboot::get_memory_map())),\n\n BootMethod::Multiboot2 => Right(MemoryMapIterator::new(multiboot2::get_memory_map())),\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\n#[repr(C, align(4))]\n\npub struct Multiboot1 {\n\n magic: u32,\n\n flags: u32,\n\n checksum: u32,\n\n header_addr: u32,\n\n load_addr: u32,\n\n load_end_addr: u32,\n\n bss_end_addr: u32,\n\n entry_addr: u32,\n\n mode_type: u32,\n\n width: u32,\n\n height: u32,\n\n depth: u32,\n\n}\n\n\n", "file_path": "kernel/src/boot/mod.rs", "rank": 35, "score": 142003.28707748582 }, { "content": "/// Returns the number of available cpus.\n\npub fn get_cpu_num() -> usize {\n\n arch::Current::get_cpu_num()\n\n}\n", "file_path": "kernel/src/multitasking/mod.rs", "rank": 36, "score": 141954.67673328696 }, { "content": "/// Returns the id of the current cpu.\n\npub fn get_cpu_id() -> usize {\n\n arch::Current::get_cpu_id()\n\n}\n\n\n", "file_path": "kernel/src/multitasking/mod.rs", "rank": 37, "score": 141954.67673328696 }, { "content": "/// Initializes the list of free page frames.\n\npub fn init() {\n\n assert_has_not_been_called!(\"The free list should only be initialized once.\");\n\n\n\n let mut free_list = FREE_LIST.lock();\n\n\n\n for entry in boot::get_memory_map() {\n\n unsafe { free_list.insert(entry) }\n\n }\n\n}\n", "file_path": "kernel/src/arch/x86_64/memory/paging/free_list.rs", "rank": 38, "score": 141668.16403499537 }, { "content": "/// Represents a page table level.\n\npub trait PageTableLevel {\n\n fn get_level() -> usize;\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table.rs", "rank": 39, "score": 141334.83968468718 }, { "content": "/// Represents a page table level that is not one.\n\npub trait ReducablePageTableLevel: PageTableLevel {\n\n /// The page table level below this one.\n\n type NextLevel: PageTableLevel;\n\n}\n\n\n\n/// Page table level 4.\n\npub struct Level4;\n\nimpl PageTableLevel for Level4 {\n\n fn get_level() -> usize {\n\n 4\n\n }\n\n}\n\nimpl ReducablePageTableLevel for Level4 {\n\n type NextLevel = Level3;\n\n}\n\n\n\n/// Page table level 3.\n\npub struct Level3;\n\nimpl PageTableLevel for Level3 {\n\n fn get_level() -> usize {\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table.rs", "rank": 40, "score": 140919.9122578462 }, { "content": "/// Structs managing a level 4 page table and it's decendants can implement\n\n/// this to manage paging.\n\npub trait PageTableManager {\n\n /// Returns a mutable reference to the level 4 page table.\n\n fn get_l4(&mut self) -> &mut PageTable<Level4>;\n\n\n\n /// Returns the corresponding physical address to a virtual address.\n\n fn translate_address(&mut self, address: VirtualAddress) -> Option<PhysicalAddress> {\n\n self.get_l1(address)\n\n .and_then(|l1| l1[PageTable::<Level1>::table_index(address)].points_to())\n\n .map(|page_address| page_address + (address.as_usize() & 0xfff))\n\n }\n\n\n\n /// Returns a mutable reference to the level 1 table corresponding to the\n\n /// given address.\n\n fn get_l1(&mut self, address: VirtualAddress) -> Option<Level1TableReference> {\n\n assert!(valid_address!(address));\n\n\n\n let table_index = PageTable::<Level2>::table_index(address);\n\n let preemption_state = {\n\n let l4 = self.get_l4();\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_manager.rs", "rank": 41, "score": 139383.2438435812 }, { "content": "/// Returns a lock of the current process.\n\npub fn get_current_process<'a>() -> ProcessLock<'a> {\n\n let pid = CURRENT_THREAD.lock().pid;\n\n ProcessLock {\n\n guard: PROCESS_LIST.lock(),\n\n key: pid\n\n }\n\n}\n", "file_path": "kernel/src/multitasking/pcb.rs", "rank": 42, "score": 134241.84460364393 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn get_vga_info() -> vga_buffer::Info {\n\n match BOOT_INFO.try().unwrap().fb_info_tag() {\n\n Some(framebuffer_tag) => vga_buffer::Info {\n\n height: framebuffer_tag.height as usize,\n\n width: framebuffer_tag.width as usize,\n\n address: VirtualAddress::from_usize(to_virtual!(framebuffer_tag.addr)), /* bpp: framebuffer_tag.\n\n * bpp,\n\n * pitch: framebuffer_tag.pitch as usize */\n\n },\n\n None => vga_buffer::Info {\n\n height: 25,\n\n width: 80,\n\n address: VirtualAddress::from_usize(to_virtual!(0xb8000)), /* bpp: 16,\n\n * pitch: 160 */\n\n },\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/multiboot2.rs", "rank": 43, "score": 134070.19679852505 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn get_vga_info() -> vga_buffer::Info {\n\n match *get_boot_method() {\n\n BootMethod::Multiboot => multiboot::get_vga_info(),\n\n BootMethod::Multiboot2 => multiboot2::get_vga_info(),\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/mod.rs", "rank": 44, "score": 134070.19679852505 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn get_vga_info() -> vga_buffer::Info {\n\n if get_flags().contains(MultibootFlags::FRAMEBUFFER) {\n\n let info = get_info();\n\n vga_buffer::Info {\n\n height: info.framebuffer_height as usize,\n\n width: info.framebuffer_width as usize,\n\n address: VirtualAddress::from_usize(to_virtual!(info.framebuffer_addr)), /* bpp: 16 * pitch: 160 */\n\n }\n\n } else {\n\n vga_buffer::Info {\n\n height: 25,\n\n width: 80,\n\n address: VirtualAddress::from_usize(to_virtual!(0xb8000)), /* bpp: 16,\n\n * pitch: 160 */\n\n }\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 45, "score": 134070.19679852505 }, { "content": "/// Sets the given IRQ number to the specified value.\n\nfn set_irq(number: u8, value: IORedirectionEntry) {\n\n assert!(number < 24);\n\n\n\n let reg = 0x10 + number * 2;\n\n\n\n // Disable the entry, before setting the destination.\n\n set_register(reg, IORedirectionEntryFlags::MASK.bits() as u32);\n\n\n\n set_register(reg + 1, (value.0 >> 32) as u32);\n\n set_register(reg, value.0 as u32);\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/ioapic.rs", "rank": 46, "score": 129205.3780763661 }, { "content": "fn exec(name_ptr: VirtualAddress, name_length: usize) -> isize {\n\n let name_ptr_valid = {\n\n let pcb = get_current_process();\n\n\n\n pcb.address_space\n\n .contains_area(MemoryArea::new(name_ptr, name_length))\n\n };\n\n\n\n if name_ptr_valid {\n\n let name = from_raw_str!(name_ptr, name_length);\n\n\n\n if let Ok(name) = name {\n\n let process_id = elf::process_from_initramfs_file(name);\n\n\n\n if let Ok(process_id) = process_id {\n\n let pid: usize = process_id.into();\n\n\n\n assert!(pid as isize > 0, \"Process ID too large.\");\n\n\n\n pid as isize\n", "file_path": "kernel/src/syscalls/mod.rs", "rank": 47, "score": 128468.89927066873 }, { "content": "#[derive(Clone, Copy)]\n\nstruct FreeListEntry {\n\n /// The length of this entry.\n\n length: usize,\n\n /// The start address of the next entry.\n\n next_entry: Option<PhysicalAddress>,\n\n}\n\n\n\nimpl FreeListEntry {\n\n /// Creates a new free list entry.\n\n fn new(length: usize, next_entry: Option<PhysicalAddress>) -> FreeListEntry {\n\n FreeListEntry { length, next_entry }\n\n }\n\n}\n\n\n\n/// Represents the list of free page frames.\n\npub struct FreeList {\n\n /// The first entry in the linked list.\n\n first_entry: Option<PhysicalAddress>,\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/free_list.rs", "rank": 48, "score": 125861.48611810344 }, { "content": "/// Initializes all IO devices.\n\npub fn init() {\n\n assert_has_not_been_called!(\"IO components should only be initialized once\");\n\n arch::Current::init_io();\n\n}\n\n\n\n/// Prints the given line to the screen.\n\n///\n\n/// It uses the arguments passed to it and prints the string with the\n\n/// formatting arguments.\n\n/// Then a new line is started.\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt:expr) => (print!(concat!($fmt, \"\\n\")));\n\n ($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, \"\\n\"), $($arg)*));\n\n}\n\n\n\n/// Prints the given string to the screen.\n\n///\n\n/// It uses the arguments passed to it and prints the string with the\n\n/// formatting arguments.\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($($arg:tt)*) => ({\n\n <$crate::arch::Current as $crate::arch::Architecture>::write_fmt(format_args!($($arg)*));\n\n });\n\n}\n", "file_path": "kernel/src/io.rs", "rank": 49, "score": 119486.2884956917 }, { "content": "#[no_mangle]\n\npub fn main() {\n\n loop {\n\n veos_std::thread::sleep(Duration::from_millis(1000));\n\n println!(\"Nest\");\n\n }\n\n}\n", "file_path": "test/src/lib.rs", "rank": 50, "score": 119486.2884956917 }, { "content": "/// Exits the current process.\n\npub fn exit() -> ! {\n\n unsafe {\n\n syscall!(EXIT_SYSCALL_NUM);\n\n }\n\n unreachable!();\n\n}\n\n\n", "file_path": "std/src/process.rs", "rank": 51, "score": 119486.2884956917 }, { "content": "#[no_mangle]\n\npub fn main() {\n\n veos_std::process::exec(\"/bin/test\").unwrap();\n\n\n\n loop {\n\n veos_std::thread::sleep(Duration::from_millis(500));\n\n println!(\"Test\");\n\n }\n\n}\n", "file_path": "init/src/lib.rs", "rank": 52, "score": 119486.2884956917 }, { "content": "/// This function gets called when the system is out of memory.\n\npub fn oom() -> ! {\n\n panic!(\"Out of memory!\");\n\n}\n", "file_path": "kernel/src/memory/mod.rs", "rank": 53, "score": 117351.9744866277 }, { "content": "/// Kills the current thread.\n\npub fn kill_thread() {\n\n unsafe {\n\n syscall!(KILL_THREAD_SYSCALL_NUM);\n\n }\n\n}\n\n\n\n/// Used internally to create and exit new threads.\n\nextern \"C\" fn new_thread_creator(\n\n function: fn(u64, u64, u64, u64),\n\n arg1: u64,\n\n arg2: u64,\n\n arg3: u64,\n\n arg4: u64,\n\n) {\n\n function(arg1, arg2, arg3, arg4);\n\n\n\n kill_thread();\n\n}\n", "file_path": "std/src/thread.rs", "rank": 54, "score": 117351.9744866277 }, { "content": "#[cfg(not(test))]\n\npub fn init() {\n\n assert_has_not_been_called!(\"Memory state should only be initialized once.\");\n\n\n\n arch::Current::memory_init();\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 55, "score": 117351.9744866277 }, { "content": "/// Invokes the scheduler.\n\n///\n\n/// This does nothing more than calling the current architecture scheduling\n\n/// function. The only reason this exists is for convenience.\n\npub fn schedule() {\n\n Current::invoke_scheduler()\n\n}\n", "file_path": "kernel/src/arch/mod.rs", "rank": 56, "score": 117351.9744866277 }, { "content": "/// This function accepts the syscalls and calls the corresponding handlers.\n\npub fn syscall_handler(\n\n num: u16,\n\n arg1: usize,\n\n arg2: usize,\n\n arg3: usize,\n\n arg4: usize,\n\n arg5: usize,\n\n arg6: usize\n\n) -> isize {\n\n match num {\n\n 0 => print_char(arg1 as u8 as char),\n\n 1 => kill_process(),\n\n 2 => return_pid(),\n\n 3 => exec(VirtualAddress::from_usize(arg1), arg2),\n\n 4 => sleep(arg1, arg2),\n\n 5 => create_thread(\n\n VirtualAddress::from_usize(arg1),\n\n arg2,\n\n arg3,\n\n arg4,\n\n arg5,\n\n arg6\n\n ),\n\n 6 => kill_thread(),\n\n _ => unknown_syscall(num)\n\n }\n\n}\n\n\n", "file_path": "kernel/src/syscalls/mod.rs", "rank": 57, "score": 115339.13998783144 }, { "content": "/// Initializes the system to be able to accept syscalls.\n\npub fn init() {\n\n let sysret_cs = USER_32BIT_CODE_SEGMENT.0 as u64;\n\n let syscall_cs = KERNEL_CODE_SEGMENT.0 as u64;\n\n\n\n let star_value = sysret_cs << 48 | syscall_cs << 32;\n\n let lstar_value = syscall_entry as u64;\n\n let fmask_value = Flags::IF.bits() as u64;\n\n let gs_base_value = unsafe { &TSS.privilege_stack_table[0] as *const _ as u64 };\n\n\n\n unsafe {\n\n wrmsr(IA32_LSTAR, lstar_value);\n\n wrmsr(IA32_STAR, star_value);\n\n wrmsr(IA32_FMASK, fmask_value);\n\n wrmsr(IA32_KERNEL_GS_BASE, gs_base_value);\n\n }\n\n}\n\n\n\n/// The entry point for all syscalls.\n\n#[naked]\n\nextern \"C\" fn syscall_entry() {\n", "file_path": "kernel/src/arch/x86_64/syscalls.rs", "rank": 58, "score": 115339.13998783144 }, { "content": "#[inline(always)]\n\npub fn cpu_relax() {\n\n arch::Current::cpu_relax();\n\n}\n\n\n\n/// Halts the CPU.\n\n///\n\n/// # Safety\n\n/// - If preemption is disabled, the execution can never be returned.\n\n#[inline(always)]\n\npub unsafe fn cpu_halt() {\n\n arch::Current::cpu_halt();\n\n}\n\n\n\n/// Disables preemption and returns the previous state.\n\n///\n\n/// # Safety\n\n/// - The returned `PreemptionState` must be restored.\n\npub unsafe fn disable_preemption() -> PreemptionState {\n\n let state = PreemptionState::current();\n\n\n", "file_path": "kernel/src/sync/mod.rs", "rank": 59, "score": 115339.13998783144 }, { "content": "/// This function should get called after calling `context_switch` to perform\n\n/// clean up.\n\npub fn after_context_switch() {\n\n if OLD_THREAD.is_some() {\n\n if OLD_THREAD.as_ref().unwrap().is_dead() {\n\n unsafe {\n\n // Drop the old thread.\n\n OLD_THREAD.as_mut().take();\n\n }\n\n } else {\n\n let old_thread = unsafe { OLD_THREAD.as_mut().take().unwrap() };\n\n return_old_thread_to_queue(old_thread);\n\n }\n\n }\n\n arch::Current::interrupt_in(CURRENT_THREAD.lock().get_quantum());\n\n}\n\n\n", "file_path": "kernel/src/multitasking/scheduler.rs", "rank": 60, "score": 115339.13998783144 }, { "content": "/// The timer interrupt handler for the system.\n\npub fn timer_interrupt() {\n\n schedule();\n\n}\n\n\n", "file_path": "kernel/src/interrupts/mod.rs", "rank": 61, "score": 115339.13998783144 }, { "content": "/// Creates a new thread passing it the given arguments.\n\npub fn new_thread(function: fn(u64, u64, u64, u64), arg1: u64, arg2: u64, arg3: u64, arg4: u64) {\n\n unsafe {\n\n syscall!(\n\n NEW_THREAD_SYSCALL_NUM,\n\n new_thread_creator as u64,\n", "file_path": "std/src/thread.rs", "rank": 62, "score": 115019.14546322692 }, { "content": "/// Writes the file to the initramfs file.\n\n///\n\n/// The file name parameter specifies the name within the initramfs, while the file_path parameter specifies the path to the source file.\n\nfn write_file(file: &mut File, file_num: usize, file_name: &str, file_path: &Path) {\n\n let file_metadata_start = FILE_METADATA_OFFSET + file_num * FILE_METADATA_SIZE;\n\n\n\n // Write file name.\n\n let name_position = file\n\n .seek(SeekFrom::End(0))\n\n .unwrap_or_exit(COULD_NOT_SEEK_TARGET);\n\n file.write(file_name.as_bytes())\n\n .unwrap_or_exit(COULD_NOT_WRITE_TO_TARGET);\n\n\n\n // Write file name metadata.\n\n file.seek(SeekFrom::Start(file_metadata_start as u64))\n\n .unwrap_or_exit(COULD_NOT_SEEK_TARGET);\n\n file.write_u64::<BigEndian>(name_position)\n\n .unwrap_or_exit(COULD_NOT_WRITE_TO_TARGET);\n\n file.write_u64::<BigEndian>(file_name.len() as u64)\n\n .unwrap_or_exit(COULD_NOT_WRITE_TO_TARGET);\n\n\n\n // Write file content.\n\n let content_position = file\n", "file_path": "mkinitramfs/src/main.rs", "rank": 63, "score": 114003.68270296406 }, { "content": "/// Initializes the I/O APIC.\n\npub fn init() {\n\n assert_has_not_been_called!(\"The I/O APIC should only be initialized once.\");\n\n\n\n map_page_at(\n\n get_ioapic_base(),\n\n IO_APIC_BASE,\n\n PageFlags::READABLE | PageFlags::WRITABLE | PageFlags::NO_CACHE\n\n );\n\n\n\n // Disable the 8259 PIC.\n\n unsafe {\n\n outb(0x21, 0xff);\n\n outb(0xa1, 0xff);\n\n }\n\n\n\n for (i, irq_num) in IRQ_INTERRUPT_NUMS.iter().enumerate().take(16) {\n\n let mut irq = IORedirectionEntry::new();\n\n irq.set_vector(*irq_num);\n\n set_irq(i as u8, irq);\n\n }\n", "file_path": "kernel/src/arch/x86_64/interrupts/ioapic.rs", "rank": 64, "score": 113437.70055058307 }, { "content": "/// Initializes the LAPIC.\n\npub fn init() {\n\n assert_has_not_been_called!(\"The LAPIC should only be initialized once.\");\n\n\n\n map_page_at(\n\n get_lapic_base(),\n\n LAPIC_BASE,\n\n PageFlags::READABLE | PageFlags::WRITABLE | PageFlags::NO_CACHE\n\n );\n\n\n\n let cpu_id = CpuId::new()\n\n .get_feature_info()\n\n .unwrap()\n\n .initial_local_apic_id();\n\n let logical_id = cpu_id % 8;\n\n\n\n let mut inactive_register = LVTRegister::new();\n\n inactive_register.set_inactive();\n\n\n\n let mut lint0_register = LVTRegister::new();\n\n lint0_register.set_delivery_mode(LVTRegisterFlags::EXTINT_DELIVERY_MODE);\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 65, "score": 113437.70055058307 }, { "content": "/// Initializes the memory manager.\n\npub fn init() {\n\n assert_has_not_been_called!(\"The x86_64 memory initialization should only be called once.\");\n\n\n\n let physical_initramfs_area = crate::boot::get_initramfs_area();\n\n\n\n paging::init(physical_initramfs_area);\n\n\n\n let start = INITRAMFS_MAP_AREA_START + physical_initramfs_area.start_address().offset_in_page();\n\n unsafe {\n\n INITRAMFS_AREA = MemoryArea::new(start, physical_initramfs_area.length());\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/mod.rs", "rank": 66, "score": 113437.70055058307 }, { "content": "/// Initializes interrupts on the x86_64 architecture.\n\npub fn init() {\n\n assert_has_not_been_called!(\"Interrupts should only be initialized once.\");\n\n\n\n IDT.load();\n\n\n\n lapic::init();\n\n\n\n ioapic::init();\n\n\n\n lapic::calibrate_timer();\n\n}\n\n\n\nmacro_rules! irq_interrupt {\n\n ($(#[$attr: meta])* fn $name: ident $content: tt) => {\n\n $(#[$attr])*\n\n extern \"x86-interrupt\" fn $name(_: &mut ExceptionStackFrame) {\n\n let old_priority = lapic::get_priority();\n\n lapic::set_priority(0x20);\n\n unsafe {\n\n interrupts::enable();\n", "file_path": "kernel/src/arch/x86_64/interrupts/mod.rs", "rank": 67, "score": 113437.70055058307 }, { "content": "#[inline(always)]\n\npub fn cpu_relax() {\n\n // This instruction is meant for usage in spinlock loops\n\n // (see Intel x86 manual, III, 4.2)\n\n unsafe {\n\n asm!(\"pause\" :::: \"volatile\");\n\n }\n\n}\n\n\n\n/// Halts the cpu, until it is woken again.\n\n///\n\n/// # Safety\n\n/// - Don't use this function directly, rather use the interface through the\n\n/// sync module.\n\n#[inline(always)]\n\npub unsafe fn cpu_halt() {\n\n asm!(\"hlt\" :::: \"volatile\");\n\n}\n\n\n\n/// Disables interrupts.\n\n///\n", "file_path": "kernel/src/arch/x86_64/sync.rs", "rank": 68, "score": 113437.70055058307 }, { "content": "/// The memory area containing the initramfs.\n\nfn initramfs() -> MemoryArea<PhysicalAddress> {\n\n let area = get_initramfs_area();\n\n // Align to the previous page.\n\n let initramfs_start = area.start_address().page_align_down();\n\n\n\n // Round up the the next page boundary.\n\n let initramfs_length = area.length();\n\n let initramfs_length = if initramfs_length > 0 {\n\n (initramfs_length - 1) / PAGE_SIZE * PAGE_SIZE + PAGE_SIZE\n\n } else {\n\n 0\n\n };\n\n MemoryArea::new(initramfs_start, initramfs_length)\n\n}\n\n\n\n/// Provides an iterator for a memory map.\n\npub struct MemoryMapIterator<I: Iterator<Item = MemoryArea<PhysicalAddress>>> {\n\n // multiboot2_iterator: Option<multiboot2::MemoryMapIterator>,\n\n to_exclude: [MemoryArea<PhysicalAddress>; 2],\n\n current_entry: Option<MemoryArea<PhysicalAddress>>,\n", "file_path": "kernel/src/boot/mod.rs", "rank": 69, "score": 112895.57046070803 }, { "content": "/// Returns the base address for the I/O APIC.\n\nfn get_ioapic_base() -> VirtualAddress {\n\n IO_APIC_BASE.to_virtual()\n\n}\n\n\n\n/// Represents an entry in the I/O APIC redirection table.\n", "file_path": "kernel/src/arch/x86_64/interrupts/ioapic.rs", "rank": 70, "score": 112802.33222969521 }, { "content": "/// Returns the base address for the LAPIC of this CPU.\n\nfn get_lapic_base() -> VirtualAddress {\n\n LAPIC_BASE.to_virtual()\n\n}\n\n\n\n/// Sets a LAPIC register.\n\n///\n\n/// # Safety\n\n/// - Ensure the LAPIC is mapped.\n\n/// - Setting registers incorrectly can cause interrupts to behave unexpected.\n\nunsafe fn set_register(offset: usize, value: u32) {\n\n assert!(offset < 0x1000);\n\n\n\n *(get_lapic_base() + offset).as_mut_ptr() = value;\n\n}\n\n\n\n/// Gets a LAPIC register.\n\n///\n\n/// # Safety\n\n/// - Ensure the LAPIC is mapped.\n\nunsafe fn get_register(offset: usize) -> u32 {\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 71, "score": 112802.33222969521 }, { "content": "/// Returns the ID of the current process.\n\npub fn get_pid() -> u64 {\n\n unsafe { syscall!(GET_PID_SYSCALL_NUM) as u64 }\n\n}\n\n\n", "file_path": "std/src/process.rs", "rank": 72, "score": 112129.61141870782 }, { "content": "/// Calibrates the timer to work properly.\n\npub fn calibrate_timer() {\n\n let measure_accuracy_in_ms = 125;\n\n\n\n debug!(\n\n \"Calibrating the LAPIC timer for {}ms...\",\n\n measure_accuracy_in_ms\n\n );\n\n\n\n // Use the RTC to calibrate the LAPIC timer.\n\n unsafe {\n\n // Save the NMI enable state to restore it later.\n\n let nmi_bit = inb(0x70) & 0x80;\n\n\n\n // Read the previous value of status register b.\n\n outb(0x70, 0x8b);\n\n let previous_b = inb(0x71);\n\n\n\n // Enable the RTC interrupts with the default frequency of 1024hz.\n\n outb(0x70, 0x8b);\n\n outb(0x71, previous_b | 0x40);\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 73, "score": 111638.65786625612 }, { "content": "/// Signals the end of the interrupt handler to the LAPIC.\n\npub fn signal_eoi() {\n\n unsafe {\n\n set_register(END_OF_INTERRUPT, 0);\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 74, "score": 111638.65786625612 }, { "content": "/// Lets the current thread sleep for `ms` milliseconds.\n\npub fn sleep(duration: Duration) {\n\n unsafe {\n\n syscall!(\n\n SLEEP_SYSCALL_NUM,\n\n duration.as_secs(),\n\n duration.subsec_nanos()\n\n );\n\n }\n\n}\n\n\n", "file_path": "std/src/thread.rs", "rank": 75, "score": 109337.85195532235 }, { "content": " /// Ensures mutual exclusion for pages this entry points to.\n\n const ENTRY_LOCK = 1 << 9;\n\n /// No code on this page can be executed.\n\n const NO_EXECUTE = 1 << 63;\n\n\n\n /// The flags used for page tables.\n\n const PAGE_TABLE_FLAGS = Self::PRESENT.bits | Self::WRITABLE.bits | Self::USER_ACCESSIBLE.bits;\n\n }\n\n}\n\n\n\nimpl PageTableEntry {\n\n /// Creates a new page table entry.\n\n pub fn new() -> PageTableEntry {\n\n PageTableEntry(0)\n\n }\n\n\n\n /// Gets the flags from a page table entry.\n\n pub fn flags(&self) -> PageTableEntryFlags {\n\n PageTableEntryFlags::from_bits_truncate(self.0)\n\n }\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 76, "score": 108755.9995055349 }, { "content": " pub fn unlock(&mut self, preemption_state: &PreemptionState) {\n\n self.0 = self.0 & !PageTableEntryFlags::ENTRY_LOCK.bits();\n\n unsafe {\n\n restore_preemption_state(preemption_state);\n\n }\n\n }\n\n\n\n /// Checks if this entry is locked.\n\n pub fn is_locked(&self) -> bool {\n\n self.flags().contains(PageTableEntryFlags::ENTRY_LOCK)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for PageTableEntry {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if self.flags().contains(PageTableEntryFlags::PRESENT) {\n\n write!(\n\n f,\n\n \"Entry(Address={:?}, Flags={:?})\",\n\n self.points_to().unwrap(),\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 77, "score": 108755.02086126847 }, { "content": " self.flags()\n\n )\n\n } else {\n\n write!(f, \"Entry(Address=invalid, Flags={:?})\", self.flags())\n\n }\n\n }\n\n}\n\n\n\n/// Tests for methods used in the paging module.\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n /// Tests the points_to method of a page table entry.\n\n #[test]\n\n fn test_points_to() {\n\n let mut entry = PageTableEntry::new();\n\n entry.set_address(0xdead_b000);\n\n assert_eq!(entry.points_to(), None);\n\n entry.set_flags(PageTableEntryFlags::PRESENT);\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 78, "score": 108751.66086296334 }, { "content": "//! Handles page table entries.\n\n\n\nuse super::frame_allocator::FRAME_ALLOCATOR;\n\nuse super::PageFrame;\n\nuse core::fmt;\n\nuse core::sync::atomic::{AtomicU64, Ordering};\n\nuse crate::memory::{Address, PhysicalAddress};\n\nuse crate::sync::{cpu_relax, disable_preemption, restore_preemption_state, PreemptionState};\n\n\n\n/// Serves as a mask for the physical address in a page table entry.\n\nconst PHYSICAL_ADDRESS_MASK: usize = 0xff_ffff_ffff << 12;\n\n\n\n/// Represents a page table entry.\n\n#[repr(C)]\n\n#[derive(Clone)]\n\npub struct PageTableEntry(u64);\n\n\n\nbitflags! {\n\n /// The possible flags in a page table entry.\n\n pub struct PageTableEntryFlags: u64 {\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 79, "score": 108751.57917577859 }, { "content": " /// Sets the given flags in the entry.\n\n pub fn set_flags(&mut self, flags: PageTableEntryFlags) -> &mut PageTableEntry {\n\n if self.is_locked() {\n\n self.0 = (self.0 & PHYSICAL_ADDRESS_MASK as u64)\n\n | flags.bits()\n\n | PageTableEntryFlags::ENTRY_LOCK.bits();\n\n } else {\n\n self.0 = (self.0 & PHYSICAL_ADDRESS_MASK as u64) | flags.bits();\n\n }\n\n self\n\n }\n\n\n\n /// Removes the given flags from the entry.\n\n pub fn remove_flags(&mut self, flags: PageTableEntryFlags) -> &mut PageTableEntry {\n\n let mut current_flags = self.flags();\n\n current_flags.remove(flags);\n\n self.set_flags(current_flags)\n\n }\n\n\n\n /// Unmaps and deallocates the frame this entry points to.\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 80, "score": 108751.39785429211 }, { "content": " pub fn unmap(&mut self) {\n\n let address = self.points_to().expect(\"Trying to unmap an unmapped page.\");\n\n unsafe { FRAME_ALLOCATOR.deallocate(PageFrame::from_address(address)) };\n\n self.0 = 0;\n\n }\n\n\n\n /// Locks the pages this entry points to.\n\n ///\n\n /// They can't be accessed by other processors/threads after being locked.\n\n /// # Note\n\n /// The preemtion state must be restored when unlocking.\n\n pub fn lock(&mut self) -> PreemptionState {\n\n let mut preemption_state;\n\n let atomic_lock: &AtomicU64 = unsafe { &*((&mut self.0) as *mut u64 as *mut AtomicU64) };\n\n loop {\n\n unsafe {\n\n preemption_state = disable_preemption();\n\n }\n\n let lock_switch = atomic_lock\n\n .fetch_or(PageTableEntryFlags::ENTRY_LOCK.bits(), Ordering::Acquire)\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 81, "score": 108750.36439881797 }, { "content": "\n\n /// Returns the address this entry points to.\n\n pub fn points_to(&self) -> Option<PhysicalAddress> {\n\n if self.flags().contains(PageTableEntryFlags::PRESENT) {\n\n Some(PhysicalAddress::from_usize(\n\n self.0 as usize & PHYSICAL_ADDRESS_MASK\n\n ))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Sets the address of this entry.\n\n pub fn set_address(&mut self, address: PhysicalAddress) -> &mut PageTableEntry {\n\n assert_eq!(address.as_usize() & !PHYSICAL_ADDRESS_MASK, 0);\n\n self.0 &= !PHYSICAL_ADDRESS_MASK as u64; // Clear address field first.\n\n self.0 |= address.as_usize() as u64 & PHYSICAL_ADDRESS_MASK as u64;\n\n self\n\n }\n\n\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 82, "score": 108749.00141469098 }, { "content": " #[test]\n\n fn test_flags() {\n\n let mut entry = PageTableEntry::new();\n\n let flags = PageTableEntryFlags::PRESENT\n\n | PageTableEntryFlags::DIRTY\n\n | PageTableEntryFlags::USER_ACCESSIBLE\n\n | PageTableEntryFlags::WRITABLE\n\n | PageTableEntryFlags::NO_EXECUTE;\n\n entry.set_flags(flags);\n\n assert_eq!(entry.flags(), flags);\n\n }\n\n\n\n /// Tests that changing the flags doesn't change the address.\n\n #[test]\n\n fn test_flag_change() {\n\n let mut entry = PageTableEntry::new();\n\n let flags = PageTableEntryFlags::PRESENT\n\n | PageTableEntryFlags::DIRTY\n\n | PageTableEntryFlags::USER_ACCESSIBLE\n\n | PageTableEntryFlags::WRITABLE\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 83, "score": 108747.00954153812 }, { "content": " | PageTableEntryFlags::NO_EXECUTE;\n\n entry.set_address(0xcafe_b000);\n\n entry.set_flags(flags);\n\n assert_eq!(entry.points_to(), Some(0xcafe_b000));\n\n }\n\n\n\n /// Tests that the binary representation is as expected.\n\n #[test]\n\n fn test_representation() {\n\n let mut entry = PageTableEntry::new();\n\n let flags = PageTableEntryFlags::PRESENT\n\n | PageTableEntryFlags::DIRTY\n\n | PageTableEntryFlags::USER_ACCESSIBLE\n\n | PageTableEntryFlags::WRITABLE\n\n | PageTableEntryFlags::NO_EXECUTE;\n\n entry.set_flags(flags);\n\n entry.set_address(0xdead_b000);\n\n assert_eq!(\n\n entry.0,\n\n 0xdead_b000 | (1 << 0) | (1 << 6) | (1 << 2) | (1 << 1) | (1 << 63)\n\n );\n\n }\n\n}\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 84, "score": 108745.55917839067 }, { "content": " assert_eq!(entry.points_to(), Some(0xdead_b000));\n\n }\n\n\n\n /// Tests that unaligned addresses panic.\n\n #[test]\n\n #[should_panic]\n\n fn test_unaligned_address() {\n\n let mut entry = PageTableEntry::new();\n\n entry.set_address(0xdead_beef);\n\n }\n\n\n\n /// Tests that overflowing addresses panic.\n\n #[test]\n\n #[should_panic]\n\n fn test_address_overflow() {\n\n let mut entry = PageTableEntry::new();\n\n entry.set_address(0xcafe_babe_dead_b000);\n\n }\n\n\n\n /// Tests that the flags field works as expected.\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 85, "score": 108744.59206215343 }, { "content": " & PageTableEntryFlags::ENTRY_LOCK.bits() == 0;\n\n if lock_switch {\n\n break;\n\n } else {\n\n unsafe {\n\n restore_preemption_state(&preemption_state);\n\n }\n\n }\n\n\n\n // Wait until the lock looks unlocked before retrying\n\n while atomic_lock.load(Ordering::Relaxed) & PageTableEntryFlags::ENTRY_LOCK.bits() > 0 {\n\n cpu_relax();\n\n }\n\n }\n\n\n\n preemption_state\n\n }\n\n\n\n /// Unlocks the pages this entry points to and restores the preemption\n\n /// state.\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 86, "score": 108742.70369123902 }, { "content": " /// The page is present.\n\n const PRESENT = 1 << 0;\n\n /// The page is writable.\n\n const WRITABLE = 1 << 1;\n\n /// The page is accessible in user mode.\n\n const USER_ACCESSIBLE = 1 << 2;\n\n /// Writes will not be cached.\n\n const WRITE_TROUGH_CACHING = 1 << 3;\n\n /// Page accesses will not be cached.\n\n const DISABLE_CACHE = 1 << 4;\n\n /// The page was accessed.\n\n const ACCESSED = 1 << 5;\n\n /// The page was written to.\n\n const DIRTY = 1 << 6;\n\n /// The page is a huge page.\n\n const HUGE_PAGE = 1 << 7;\n\n /// The page is global.\n\n ///\n\n /// This means that it won't be flushed from the caches on an address space switch.\n\n const GLOBAL = 1 << 8;\n", "file_path": "kernel/src/arch/x86_64/memory/paging/page_table_entry.rs", "rank": 87, "score": 108726.42005717686 }, { "content": "#[inline(always)]\n\npub fn interrupts_enabled() -> bool {\n\n flags().contains(Flags::IF)\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/sync.rs", "rank": 88, "score": 108429.1292971325 }, { "content": "/// Returns the current timestamp.\n\npub fn get_current_timestamp() -> Timestamp {\n\n Timestamp::from_duration(unsafe { CLOCK })\n\n}\n", "file_path": "kernel/src/arch/x86_64/sync.rs", "rank": 89, "score": 106724.42880363515 }, { "content": "/// Prints the given format arguments.\n\npub fn print(args: fmt::Arguments) {\n\n StdOut.write_fmt(args).unwrap();\n\n}\n\n\n", "file_path": "std/src/io.rs", "rank": 90, "score": 104974.33510656503 }, { "content": "#[derive(Clone, Copy, Debug)]\n\n#[repr(C, packed)]\n\nstruct MmapEntry {\n\n /// The size of the entry.\n\n size: u32,\n\n /// The base address of the memory area.\n\n base_addr: PhysicalAddress,\n\n /// The length of the memory area.\n\n length: usize,\n\n /// The type of memory contained in the area.\n\n ///\n\n /// 1 means usable memory.\n\n mem_type: u32,\n\n}\n\n\n\n/// Represents a module loaded by the boot loader.\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 91, "score": 104238.98526225757 }, { "content": "#[repr(C, packed)]\n\nstruct ModuleEntry {\n\n /// The start address of the module.\n\n mod_start: u32,\n\n /// The end address of the module.\n\n mod_end: u32,\n\n /// The string associated with the module.\n\n string: u32,\n\n /// Reserved, don't use.\n\n reserved: u32,\n\n}\n\n\n\n/// The base address for the information strucuture.\n\n// This is only valid after init was called.\n\nstatic mut STRUCT_BASE_ADDRESS: *const MultibootInformation = 0 as *const MultibootInformation;\n\n\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 92, "score": 104238.98526225757 }, { "content": "/// Returns the name of the boot loader.\n\npub fn get_bootloader_name() -> &'static str {\n\n match *get_boot_method() {\n\n BootMethod::Multiboot2 => multiboot2::get_bootloader_name(),\n\n BootMethod::Multiboot => multiboot::get_bootloader_name(),\n\n _ => \"no boot loader\",\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/mod.rs", "rank": 93, "score": 103928.32250093916 }, { "content": "/// Returns the name of the boot loader.\n\npub fn get_bootloader_name() -> &'static str {\n\n if get_flags().contains(MultibootFlags::BOOT_LOADER_NAME) {\n\n from_c_str!(to_virtual!(get_info().boot_loader_name)).unwrap()\n\n } else {\n\n // When no specific name was given by the boot loader.\n\n \"a multiboot compliant bootloader\"\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/multiboot.rs", "rank": 94, "score": 103928.32250093916 }, { "content": "/// Returns the bootloader name.\n\npub fn get_bootloader_name() -> &'static str {\n\n match BOOT_INFO.try().unwrap().boot_loader_name_tag() {\n\n Some(boot_loader_name_tag) => boot_loader_name_tag.name(),\n\n None => \"a multiboot compliant bootloader\",\n\n }\n\n}\n\n\n", "file_path": "kernel/src/boot/multiboot2.rs", "rank": 95, "score": 103928.32250093916 }, { "content": "/// Writes an I/O APIC register.\n\nfn set_register(reg: u8, value: u32) {\n\n unsafe {\n\n *get_ioapic_base().as_mut_ptr() = reg as u32;\n\n *(get_ioapic_base() + 0x10).as_mut_ptr() = value;\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/ioapic.rs", "rank": 96, "score": 102638.83735326165 }, { "content": "/// Returns the method the system was booted with.\n\npub fn get_boot_method() -> &'static BootMethod {\n\n unsafe { &BOOT_METHOD }\n\n}\n\n\n\n/// Returns information about the VGA buffer.\n", "file_path": "kernel/src/boot/mod.rs", "rank": 97, "score": 102310.7328182426 }, { "content": "/// Sets the periodic lapic timer to the specified delay in milliseconds.\n\npub fn set_timer(delay: u32) {\n\n unsafe {\n\n set_register(TIMER_INITIAL_COUNT, delay * TICKS_PER_MS);\n\n }\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/interrupts/lapic.rs", "rank": 98, "score": 102310.7328182426 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Clone, Copy)]\n\nstruct ScreenChar {\n\n /// The ascii character represented.\n\n character: u8,\n\n /// The color code of the character represented.\n\n color_code: ColorCode\n\n}\n\n\n", "file_path": "kernel/src/arch/x86_64/vga_buffer.rs", "rank": 99, "score": 100257.781664615 } ]
Rust
src/db/models/configs.rs
nlopes/avro-schema-registry
f18168bacfd1f141be857b7f41cda8c7a874ce93
use std::fmt; use std::str; use chrono::NaiveDateTime; use diesel::prelude::*; use crate::api::errors::{ApiAvroErrorCode, ApiError}; use super::schema::*; use super::Subject; #[derive(Debug, Identifiable, Queryable, Associations, Serialize)] #[table_name = "configs"] #[belongs_to(Subject)] pub struct Config { pub id: i64, pub compatibility: Option<String>, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, pub subject_id: Option<i64>, } #[derive(Debug, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum CompatibilityLevel { Backward, BackwardTransitive, Forward, ForwardTransitive, Full, FullTransitive, #[serde(rename = "NONE")] CompatNone, #[serde(other)] Unknown, } impl fmt::Display for CompatibilityLevel { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let screaming_snake_case = match self { Self::Backward => Ok("BACKWARD"), Self::BackwardTransitive => Ok("BACKWARD_TRANSITIVE"), Self::Forward => Ok("FORWARD"), Self::ForwardTransitive => Ok("FORWARD_TRANSITIVE"), Self::Full => Ok("FULL"), Self::FullTransitive => Ok("FULL_TRANSITIVE"), Self::CompatNone => Ok("NONE"), _ => Ok(""), }?; write!(f, "{}", screaming_snake_case) } } impl str::FromStr for CompatibilityLevel { type Err = (); fn from_str(s: &str) -> Result<Self, ()> { match s { "BACKWARD" => Ok(Self::Backward), "BACKWARD_TRANSITIVE" => Ok(Self::BackwardTransitive), "FORWARD" => Ok(Self::Forward), "FORWARD_TRANSITIVE" => Ok(Self::ForwardTransitive), "FULL" => Ok(Self::Full), "FULL_TRANSITIVE" => Ok(Self::FullTransitive), "NONE" => Ok(Self::CompatNone), _ => Err(()), } } } impl CompatibilityLevel { pub fn valid(self) -> Result<Self, ApiError> { ConfigCompatibility::new(self.to_string()).and(Ok(self)) } } #[derive(Debug, Serialize, Deserialize)] pub struct ConfigCompatibility { pub compatibility: CompatibilityLevel, } impl ConfigCompatibility { pub fn new(level: String) -> Result<Self, ApiError> { match level.parse::<CompatibilityLevel>() { Ok(l) => Ok(Self { compatibility: l }), Err(_) => Err(ApiError::new(ApiAvroErrorCode::InvalidCompatibilityLevel)), } } } pub type SetConfig = ConfigCompatibility; pub struct GetSubjectConfig { pub subject: String, } pub struct SetSubjectConfig { pub subject: String, pub compatibility: CompatibilityLevel, } impl Config { pub const DEFAULT_COMPATIBILITY: CompatibilityLevel = CompatibilityLevel::Backward; pub fn get_global_compatibility(conn: &PgConnection) -> Result<String, ApiError> { use super::schema::configs::dsl::*; match configs.filter(id.eq(0)).get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), Err(diesel::result::Error::NotFound) => { Self::insert(&Self::DEFAULT_COMPATIBILITY.to_string(), conn)?; Ok(Self::DEFAULT_COMPATIBILITY.to_string()) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn get_with_subject_name( conn: &PgConnection, subject_name: String, ) -> Result<String, ApiError> { let subject = Subject::get_by_name(conn, subject_name)?; match Self::belonging_to(&subject).get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn set_with_subject_name( conn: &PgConnection, subject_name: String, compat: String, ) -> Result<String, ApiError> { use super::schema::configs::dsl::*; let subject = Subject::get_by_name(conn, subject_name)?; match Self::belonging_to(&subject).get_result::<Self>(conn) { Ok(config) => { match diesel::update(&config) .set(compatibility.eq(&compat)) .get_result::<Self>(conn) { Ok(conf) => conf .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } Err(diesel::result::Error::NotFound) => { diesel::insert_into(configs) .values(( compatibility.eq(&compat), created_at.eq(diesel::dsl::now), updated_at.eq(diesel::dsl::now), subject_id.eq(subject.id), )) .execute(conn) .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))?; Ok(compat) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn set_global_compatibility(conn: &PgConnection, compat: &str) -> Result<String, ApiError> { use super::schema::configs::dsl::*; match diesel::update(configs.find(0)) .set(compatibility.eq(compat)) .get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), Err(diesel::result::Error::NotFound) => { Self::insert(compat, conn)?; Ok(compat.to_string()) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } fn insert(compat: &str, conn: &PgConnection) -> Result<usize, ApiError> { use super::schema::configs::dsl::*; diesel::insert_into(configs) .values(( id.eq(0), compatibility.eq(&compat), created_at.eq(diesel::dsl::now), updated_at.eq(diesel::dsl::now), )) .execute(conn) .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)) } }
use std::fmt; use std::str; use chrono::NaiveDateTime; use diesel::prelude::*; use crate::api::errors::{ApiAvroErrorCode, ApiError}; use super::schema::*; use super::Subject; #[derive(Debug, Identifiable, Queryable, Associations, Serialize)] #[table_name = "configs"] #[belongs_to(Subject)] pub struct Config { pub id: i64, pub compatibility: Option<String>, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, pub subject_id: Option<i64>, } #[derive(Debug, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum CompatibilityLevel { Backward, BackwardTransitive, Forward, ForwardTransitive, Full, FullTransitive, #[serde(rename = "NONE")] CompatNone, #[serde(other)] Unknown, } impl fmt::Display for CompatibilityLevel { fn fmt(&self, f:
impl str::FromStr for CompatibilityLevel { type Err = (); fn from_str(s: &str) -> Result<Self, ()> { match s { "BACKWARD" => Ok(Self::Backward), "BACKWARD_TRANSITIVE" => Ok(Self::BackwardTransitive), "FORWARD" => Ok(Self::Forward), "FORWARD_TRANSITIVE" => Ok(Self::ForwardTransitive), "FULL" => Ok(Self::Full), "FULL_TRANSITIVE" => Ok(Self::FullTransitive), "NONE" => Ok(Self::CompatNone), _ => Err(()), } } } impl CompatibilityLevel { pub fn valid(self) -> Result<Self, ApiError> { ConfigCompatibility::new(self.to_string()).and(Ok(self)) } } #[derive(Debug, Serialize, Deserialize)] pub struct ConfigCompatibility { pub compatibility: CompatibilityLevel, } impl ConfigCompatibility { pub fn new(level: String) -> Result<Self, ApiError> { match level.parse::<CompatibilityLevel>() { Ok(l) => Ok(Self { compatibility: l }), Err(_) => Err(ApiError::new(ApiAvroErrorCode::InvalidCompatibilityLevel)), } } } pub type SetConfig = ConfigCompatibility; pub struct GetSubjectConfig { pub subject: String, } pub struct SetSubjectConfig { pub subject: String, pub compatibility: CompatibilityLevel, } impl Config { pub const DEFAULT_COMPATIBILITY: CompatibilityLevel = CompatibilityLevel::Backward; pub fn get_global_compatibility(conn: &PgConnection) -> Result<String, ApiError> { use super::schema::configs::dsl::*; match configs.filter(id.eq(0)).get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), Err(diesel::result::Error::NotFound) => { Self::insert(&Self::DEFAULT_COMPATIBILITY.to_string(), conn)?; Ok(Self::DEFAULT_COMPATIBILITY.to_string()) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn get_with_subject_name( conn: &PgConnection, subject_name: String, ) -> Result<String, ApiError> { let subject = Subject::get_by_name(conn, subject_name)?; match Self::belonging_to(&subject).get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn set_with_subject_name( conn: &PgConnection, subject_name: String, compat: String, ) -> Result<String, ApiError> { use super::schema::configs::dsl::*; let subject = Subject::get_by_name(conn, subject_name)?; match Self::belonging_to(&subject).get_result::<Self>(conn) { Ok(config) => { match diesel::update(&config) .set(compatibility.eq(&compat)) .get_result::<Self>(conn) { Ok(conf) => conf .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } Err(diesel::result::Error::NotFound) => { diesel::insert_into(configs) .values(( compatibility.eq(&compat), created_at.eq(diesel::dsl::now), updated_at.eq(diesel::dsl::now), subject_id.eq(subject.id), )) .execute(conn) .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))?; Ok(compat) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } pub fn set_global_compatibility(conn: &PgConnection, compat: &str) -> Result<String, ApiError> { use super::schema::configs::dsl::*; match diesel::update(configs.find(0)) .set(compatibility.eq(compat)) .get_result::<Self>(conn) { Ok(config) => config .compatibility .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), Err(diesel::result::Error::NotFound) => { Self::insert(compat, conn)?; Ok(compat.to_string()) } _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)), } } fn insert(compat: &str, conn: &PgConnection) -> Result<usize, ApiError> { use super::schema::configs::dsl::*; diesel::insert_into(configs) .values(( id.eq(0), compatibility.eq(&compat), created_at.eq(diesel::dsl::now), updated_at.eq(diesel::dsl::now), )) .execute(conn) .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError)) } }
&mut fmt::Formatter) -> fmt::Result { let screaming_snake_case = match self { Self::Backward => Ok("BACKWARD"), Self::BackwardTransitive => Ok("BACKWARD_TRANSITIVE"), Self::Forward => Ok("FORWARD"), Self::ForwardTransitive => Ok("FORWARD_TRANSITIVE"), Self::Full => Ok("FULL"), Self::FullTransitive => Ok("FULL_TRANSITIVE"), Self::CompatNone => Ok("NONE"), _ => Ok(""), }?; write!(f, "{}", screaming_snake_case) } }
random
[ { "content": "#[derive(Debug, Serialize)]\n\nstruct SchemaCompatibility {\n\n is_compatible: bool,\n\n}\n\n\n\nimpl SchemaCompatibility {\n\n fn is_compatible(\n\n old: &str,\n\n new: &str,\n\n compatibility: CompatibilityLevel,\n\n ) -> Result<bool, ApiError> {\n\n match compatibility {\n\n CompatibilityLevel::CompatNone => Ok(true),\n\n CompatibilityLevel::Backward => Schema::is_compatible(new, old),\n\n CompatibilityLevel::Forward => Schema::is_compatible(old, new),\n\n CompatibilityLevel::Full => {\n\n Ok(Schema::is_compatible(old, new)? && Schema::is_compatible(new, old)?)\n\n }\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n", "file_path": "src/api/compatibility.rs", "rank": 0, "score": 81371.2670753221 }, { "content": "pub fn api_routing(cfg: &mut web::ServiceConfig) {\n\n let password =\n\n env::var(\"SCHEMA_REGISTRY_PASSWORD\").expect(\"Must pass a schema registry password\");\n\n\n\n cfg.service(\n\n web::scope(\"\")\n\n .wrap(middleware::VerifyAcceptHeader)\n\n .wrap(middleware::VerifyAuthorization::new(&password))\n\n .service(\n\n web::resource(\"/compatibility/subjects/{subject}/versions/{version}\")\n\n .route(web::post().to(api::check_compatibility)),\n\n )\n\n .service(\n\n web::resource(\"/config\")\n\n .route(web::get().to(api::get_config))\n\n .route(web::put().to(api::put_config)),\n\n )\n\n .service(\n\n web::resource(\"/config/{subject}\")\n\n .route(web::get().to(api::get_subject_config))\n", "file_path": "src/app/mod.rs", "rank": 1, "score": 81224.21441568584 }, { "content": "pub fn monitoring_routing(cfg: &mut web::ServiceConfig) {\n\n cfg.service(\n\n web::scope(\"_\")\n\n .service(web::resource(\"/health_check\").route(web::get().to(health::status))),\n\n );\n\n}\n\n\n", "file_path": "src/app/mod.rs", "rank": 2, "score": 81224.21441568584 }, { "content": "/// `get_subject_version_from_db` fetches a specific subject version pair from the\n\n/// database, given a subject name and an optional version. If the version is not given,\n\n/// then we get the latest schema id.\n\npub fn get_subject_version_from_db(\n\n conn: &diesel::r2d2::PooledConnection<diesel::r2d2::ConnectionManager<diesel::PgConnection>>,\n\n subject: String,\n\n version: Option<u32>,\n\n) -> Result<GetSubjectVersionResponse, ApiError> {\n\n use crate::api::version::VersionLimit;\n\n\n\n match version {\n\n Some(v) => {\n\n if !v.within_limits() {\n\n return Err(ApiError::new(ApiAvroErrorCode::InvalidVersion));\n\n }\n\n SchemaVersion::get_schema_id(conn, subject.to_string(), v)\n\n }\n\n None => SchemaVersion::get_schema_id_from_latest(conn, subject.to_string()),\n\n }\n\n .map(|o| GetSubjectVersionResponse {\n\n subject: subject.to_string(),\n\n id: o.0,\n\n version: o.1,\n", "file_path": "src/api/subjects.rs", "rank": 3, "score": 79820.23179885633 }, { "content": "pub fn status() -> HttpResponse {\n\n HttpResponse::Ok()\n\n .content_type(\"application/json\")\n\n .body(\"{\\\"status\\\": \\\"healthy\\\"}\")\n\n}\n", "file_path": "src/health/mod.rs", "rank": 4, "score": 77529.38403735752 }, { "content": "pub fn get_schema_registry_password() -> String {\n\n env::var(\"SCHEMA_REGISTRY_PASSWORD\").unwrap_or_else(|_| \"test_password\".to_string())\n\n}\n", "file_path": "tests/common/settings.rs", "rank": 5, "score": 73578.8673150446 }, { "content": "CREATE UNIQUE INDEX index_configs_on_subject_id ON configs(subject_id);\n\n\n", "file_path": "migrations/2018-12-18-105635_create_configs/up.sql", "rank": 6, "score": 71525.15512315839 }, { "content": "pub fn setup() -> (ApiTesterServer, DbConnection) {\n\n let server = ApiTesterServer::new();\n\n let conn = DbPool::new_pool(Some(1)).connection().unwrap();\n\n conn.reset();\n\n\n\n (server, conn)\n\n}\n\n\n\nimpl ApiTesterServer {\n\n pub fn new() -> Self {\n\n Self(test::start(|| {\n\n App::new()\n\n .configure(app::monitoring_routing)\n\n .app_data(Data::new(DbPool::new_pool(Some(1))))\n\n .configure(app::api_routing)\n\n }))\n\n }\n\n\n\n pub async fn test(\n\n &self,\n", "file_path": "tests/common/server.rs", "rank": 7, "score": 68455.28825081946 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 8, "score": 56758.941251336895 }, { "content": "CREATE SEQUENCE configs_id_seq;\n", "file_path": "migrations/2018-12-18-105635_create_configs/up.sql", "rank": 9, "score": 54970.334472509305 }, { "content": "pub trait VersionLimit {\n\n fn within_limits(&self) -> bool;\n\n}\n\n\n\nimpl VersionLimit for u32 {\n\n fn within_limits(&self) -> bool {\n\n *self > 0 && *self < 2_147_483_648\n\n }\n\n}\n", "file_path": "src/api/version.rs", "rank": 10, "score": 41903.896021349385 }, { "content": "pub trait DbManage {\n\n fn new_pool(max_size: Option<u32>) -> Self;\n\n fn connection(&self) -> Result<DbConnection, ApiError>;\n\n}\n\n\n\nimpl DbManage for DbPool {\n\n fn new_pool(max_size: Option<u32>) -> Self {\n\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n\n let manager = ConnectionManager::<PgConnection>::new(database_url);\n\n Pool::builder()\n\n .max_size(max_size.unwrap_or(10))\n\n .build(manager)\n\n .expect(\"Failed to create pool.\")\n\n }\n\n\n\n fn connection(&self) -> Result<DbConnection, ApiError> {\n\n self.get()\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))\n\n }\n\n}\n", "file_path": "src/db/connection.rs", "rank": 11, "score": 41903.896021349385 }, { "content": "pub trait DbAuxOperations {\n\n fn reset(&self);\n\n fn reset_schemas(&self);\n\n fn reset_subjects(&self);\n\n fn reset_configs_global(&self);\n\n\n\n fn create_test_subject_with_config(&self, compat: &str);\n\n fn add_subjects(&self, subjects: Vec<String>);\n\n fn register_schema(&self, subject: String, schema: String) -> Schema;\n\n}\n\n\n\nimpl DbAuxOperations for DbConnection {\n\n fn reset(&self) {\n\n use avro_schema_registry::db::models::schema::configs::dsl::configs;\n\n use avro_schema_registry::db::models::schema::schema_versions::dsl::schema_versions;\n\n use avro_schema_registry::db::models::schema::schemas::dsl::schemas;\n\n use avro_schema_registry::db::models::schema::subjects::dsl::subjects;\n\n\n\n self.transaction::<_, diesel::result::Error, _>(|| {\n\n diesel::delete(configs).execute(self)?;\n", "file_path": "tests/db/mod.rs", "rank": 12, "score": 40775.26270340107 }, { "content": "CREATE UNIQUE INDEX index_schema_versions_on_subject_id_and_version ON schema_versions(subject_id, version);\n", "file_path": "migrations/2018-12-16-123727_create_schema_versions/up.sql", "rank": 13, "score": 35050.38518865712 }, { "content": "CREATE TABLE configs (\n\n id BIGINT PRIMARY KEY DEFAULT nextval('configs_id_seq'::regclass),\n\n compatibility CHARACTER VARYING,\n\n created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n\n updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n\n subject_id BIGINT\n\n);\n\n\n", "file_path": "migrations/2018-12-18-105635_create_configs/up.sql", "rank": 14, "score": 33637.259699717346 }, { "content": "SELECT diesel_manage_updated_at('configs');\n", "file_path": "migrations/2018-12-18-105635_create_configs/up.sql", "rank": 15, "score": 31505.041158482094 }, { "content": " \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema_forward_compatible)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":false\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_schema_for_full_compatibility_with_subject_and_with_version() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FULL\");\n\n\n\n let schema_s = std::fs::read_to_string(\"tests/fixtures/schema.json\").unwrap();\n\n let schema_backward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_backward_compatible.json\").unwrap();\n\n\n\n let _ = conn.register_schema(String::from(\"test.subject\"), schema_s.to_string());\n\n let schema2 = SchemaBody {\n\n schema: schema_backward_compatible_s.to_string(),\n", "file_path": "tests/compatibility.rs", "rank": 16, "score": 30320.10851180183 }, { "content": " \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema_forward_compatible)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":true\\}\"#,\n\n )\n\n .await;\n\n\n\n // Should not be backwards compatible\n\n let schema_backward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_backward_compatible.json\").unwrap();\n\n\n\n let schema_backward_compatible = SchemaBody {\n\n schema: schema_backward_compatible_s.to_string(),\n\n };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema_backward_compatible)),\n", "file_path": "tests/compatibility.rs", "rank": 17, "score": 30317.510557984337 }, { "content": " .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema_forward_compatible)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":false\\}\"#,\n\n )\n\n .await;\n\n\n\n let schema_full_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_full_compatible.json\").unwrap();\n\n\n\n let schema_full_compatible = SchemaBody {\n\n schema: schema_full_compatible_s.to_string(),\n\n };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema_full_compatible)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":true\\}\"#,\n\n )\n\n .await;\n\n}\n", "file_path": "tests/compatibility.rs", "rank": 18, "score": 30317.03027841197 }, { "content": " .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_schema_for_forward_compatibility_with_subject_and_with_version() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FORWARD\");\n\n\n\n let schema_s = std::fs::read_to_string(\"tests/fixtures/schema.json\").unwrap();\n\n let schema_forward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_forward_compatible.json\").unwrap();\n\n\n\n let _ = conn.register_schema(String::from(\"test.subject\"), schema_s.to_string());\n\n let schema_forward_compatible = SchemaBody {\n\n schema: schema_forward_compatible_s.to_string(),\n\n };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n", "file_path": "tests/compatibility.rs", "rank": 19, "score": 30316.52823734837 }, { "content": " http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":false\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_schema_for_backward_compatibility_with_subject_and_with_version() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"BACKWARD\");\n\n\n\n let schema_s = std::fs::read_to_string(\"tests/fixtures/schema.json\").unwrap();\n\n let schema_backward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_backward_compatible.json\").unwrap();\n\n\n\n let _ = conn.register_schema(String::from(\"test.subject\"), schema_s.to_string());\n\n let schema2 = SchemaBody {\n\n schema: schema_backward_compatible_s.to_string(),\n\n };\n\n\n", "file_path": "tests/compatibility.rs", "rank": 20, "score": 30316.429947534318 }, { "content": " };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema2)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":false\\}\"#,\n\n )\n\n .await;\n\n\n\n let schema_forward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_forward_compatible.json\").unwrap();\n\n\n\n let schema_forward_compatible = SchemaBody {\n\n schema: schema_forward_compatible_s.to_string(),\n\n };\n\n\n\n server\n", "file_path": "tests/compatibility.rs", "rank": 21, "score": 30315.11214299137 }, { "content": " server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema2)),\n\n http::StatusCode::OK,\n\n r#\"\\{\"is_compatible\":true\\}\"#,\n\n )\n\n .await;\n\n\n\n let schema_forward_compatible_s =\n\n std::fs::read_to_string(\"tests/fixtures/schema_forward_compatible.json\").unwrap();\n\n\n\n let schema_forward_compatible = SchemaBody {\n\n schema: schema_forward_compatible_s.to_string(),\n\n };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n", "file_path": "tests/compatibility.rs", "rank": 22, "score": 30315.005484912726 }, { "content": " .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_schema_for_compatibility_with_subject_and_with_non_existent_version() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FORWARD\");\n\n\n\n let schema_s = std::fs::read_to_string(\"tests/fixtures/schema.json\").unwrap();\n\n let schema = SchemaBody { schema: schema_s };\n\n\n\n // it returns 404 with 'Version not found'\n\n server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/2\",\n\n Some(json!(schema)),\n\n http::StatusCode::NOT_FOUND,\n\n r#\"\\{\"error_code\":40402,\"message\":\"Version not found\"\\}\"#,\n\n )\n", "file_path": "tests/compatibility.rs", "rank": 23, "score": 30313.875610423536 }, { "content": "use actix_web::http;\n\n\n\nuse crate::common::server::setup;\n\nuse crate::db::DbAuxOperations;\n\nuse avro_schema_registry::api::SchemaBody;\n\n\n\n#[actix_rt::test]\n\nasync fn test_schema_for_compatibility_with_non_existent_subject() {\n\n let (server, _) = setup();\n\n let schema_s = std::fs::read_to_string(\"tests/fixtures/schema.json\").unwrap();\n\n let schema = SchemaBody { schema: schema_s };\n\n\n\n server\n\n .test(\n\n http::Method::POST,\n\n \"/compatibility/subjects/test.subject/versions/1\",\n\n Some(json!(schema)),\n\n http::StatusCode::NOT_FOUND,\n\n r#\"\\{\"error_code\":40401,\"message\":\"Subject not found\"\\}\"#,\n\n )\n", "file_path": "tests/compatibility.rs", "rank": 24, "score": 30313.100395584937 }, { "content": "use actix_web::http;\n\n\n\nuse crate::common::server::setup;\n\nuse crate::db::DbAuxOperations;\n\n\n\n#[actix_rt::test]\n\nasync fn test_get_global_config() {\n\n let (server, _) = setup();\n\n\n\n // returns compatibility\n\n server\n\n .test(\n\n http::Method::GET,\n\n \"/config\",\n\n None,\n\n http::StatusCode::OK,\n\n r#\"\\{\"compatibility\":\"BACKWARD\"\\}\"#,\n\n )\n\n .await;\n\n}\n", "file_path": "tests/config.rs", "rank": 25, "score": 29692.729617427038 }, { "content": " None,\n\n http::StatusCode::NOT_FOUND,\n\n r#\"\\{\"error_code\":40401,\"message\":\"Subject not found\"\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_update_compatibility_level_with_existent_subject() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FULL\");\n\n\n\n // with valid compatibility FORWARD_TRANSITIVE it returns FORWARD_TRANSITIVE\n\n server\n\n .test(\n\n http::Method::PUT,\n\n \"/config/test.subject\",\n\n Some(json!({\"compatibility\": \"FORWARD_TRANSITIVE\"})),\n\n http::StatusCode::OK,\n\n r#\"\\{\"compatibility\":\"FORWARD_TRANSITIVE\"\\}\"#,\n", "file_path": "tests/config.rs", "rank": 26, "score": 29692.58524721976 }, { "content": " http::Method::GET,\n\n \"/config/test.subject\",\n\n None,\n\n http::StatusCode::OK,\n\n r#\"\\{\"compatibility\":\"FULL\"\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_get_compatibility_level_with_non_existent_subject() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FULL\");\n\n conn.reset_subjects();\n\n\n\n // returns 404 with Invalid compatibility level\n\n server\n\n .test(\n\n http::Method::GET,\n\n \"/config/test.subject\",\n", "file_path": "tests/config.rs", "rank": 27, "score": 29690.46444749932 }, { "content": "\n\n#[actix_rt::test]\n\nasync fn test_set_global_config_with_valid_compatibility_full() {\n\n let (server, _) = setup();\n\n\n\n // returns compatibility\n\n server\n\n .test(\n\n http::Method::PUT,\n\n \"/config\",\n\n Some(json!({\"compatibility\": \"FULL\"})),\n\n http::StatusCode::OK,\n\n r#\"\\{\"compatibility\":\"FULL\"\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_set_global_config_with_invalid_compatibility() {\n\n let (server, _) = setup();\n", "file_path": "tests/config.rs", "rank": 28, "score": 29689.591783440195 }, { "content": " // with valid compatibility FULL it returns 404\n\n server\n\n .test(\n\n http::Method::PUT,\n\n \"/config/test.subject\",\n\n Some(json!({\"compatibility\": \"FULL\"})),\n\n http::StatusCode::NOT_FOUND,\n\n r#\"\\{\"error_code\":40401,\"message\":\"Subject not found\"\\}\"#,\n\n )\n\n .await;\n\n}\n", "file_path": "tests/config.rs", "rank": 29, "score": 29688.33760736735 }, { "content": " // returns 422 with Invalid compatibility level\n\n server\n\n .test(\n\n http::Method::PUT,\n\n \"/config\",\n\n Some(json!({\"compatibility\": \"NOT_VALID\"})),\n\n http::StatusCode::UNPROCESSABLE_ENTITY,\n\n r#\"\\{\"error_code\":42203,\"message\":\"Invalid compatibility level\"\\}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_get_compatibility_level_with_existent_subject() {\n\n let (server, conn) = setup();\n\n conn.create_test_subject_with_config(\"FULL\");\n\n\n\n // returns valid compatibility\n\n server\n\n .test(\n", "file_path": "tests/config.rs", "rank": 30, "score": 29687.594548411806 }, { "content": " )\n\n .await;\n\n\n\n // with invalid compatibility it returns 422\n\n server\n\n .test(\n\n http::Method::PUT,\n\n \"/config/test.subject\",\n\n Some(json!({\"compatibility\": \"NOT_VALID\"})),\n\n http::StatusCode::UNPROCESSABLE_ENTITY,\n\n r#\"\\{\"error_code\":42203,\"message\":\"Invalid compatibility level\"}\"#,\n\n )\n\n .await;\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_update_compatibility_level_with_non_existent_subject() {\n\n let (server, conn) = setup();\n\n conn.reset_subjects();\n\n\n", "file_path": "tests/config.rs", "rank": 31, "score": 29684.55635036628 }, { "content": "use std::str::FromStr;\n\n\n\nuse actix_web::{\n\n web::{Data, Json, Path},\n\n HttpResponse, Responder,\n\n};\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\nuse crate::api::SchemaBody;\n\nuse crate::db::models::{CompatibilityLevel, Config, Schema};\n\nuse crate::db::{DbManage, DbPool};\n\n\n\npub async fn check_compatibility(\n\n info: Path<(String, u32)>,\n\n body: Json<SchemaBody>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let (subject, version) = info.into_inner();\n\n let schema = body.into_inner().schema;\n\n info!(\"method=post,subject={},version={}\", subject, version);\n", "file_path": "src/api/compatibility.rs", "rank": 32, "score": 28610.337680578832 }, { "content": "\n\n let conn = db.connection()?;\n\n let sv_response =\n\n crate::api::subjects::get_subject_version_from_db(&conn, subject.clone(), Some(version))?;\n\n let compatibility = Config::get_with_subject_name(&conn, subject)?;\n\n if let Ok(compat) = CompatibilityLevel::from_str(&compatibility) {\n\n if let Ok(is_compatible) =\n\n SchemaCompatibility::is_compatible(&sv_response.schema, &schema, compat)\n\n {\n\n Ok(HttpResponse::Ok().json(SchemaCompatibility { is_compatible }))\n\n } else {\n\n Err(ApiError::new(ApiAvroErrorCode::InvalidAvroSchema))\n\n }\n\n } else {\n\n Err(ApiError::new(ApiAvroErrorCode::InvalidAvroSchema))\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n", "file_path": "src/api/compatibility.rs", "rank": 33, "score": 28608.80386864096 }, { "content": "use actix_web::{\n\n web::{Data, Json, Path},\n\n HttpResponse, Responder,\n\n};\n\n\n\nuse crate::db::models::{Config, ConfigCompatibility, SetConfig};\n\nuse crate::db::{DbManage, DbPool};\n\n\n\npub async fn get_config(db: Data<DbPool>) -> impl Responder {\n\n info!(\"path=/config,method=get\");\n\n\n\n let conn = db.connection()?;\n\n match Config::get_global_compatibility(&conn).and_then(ConfigCompatibility::new) {\n\n Ok(config) => Ok(HttpResponse::Ok().json(config)),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\npub async fn put_config(body: Json<SetConfig>, db: Data<DbPool>) -> impl Responder {\n\n let compatibility = body.compatibility;\n", "file_path": "src/api/configs.rs", "rank": 34, "score": 28021.310072696364 }, { "content": " }\n\n}\n\n\n\n/// Update compatibility level for the specified subject.\n\n///\n\n/// *Note:* The confluent schema registry does not return \"Subject not found\" if the\n\n/// subject does not exist, due to the way they map configs to subjects. We map them\n\n/// internally to subject_id's therefore, we can *and will* return \"Schema not found\" if\n\n/// no subject is found with the given name.\n\npub async fn put_subject_config(\n\n subject_path: Path<String>,\n\n body: Json<SetConfig>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let subject = subject_path.into_inner();\n\n let compatibility = body.compatibility;\n\n info!(\n\n \"method=put,subject={},compatibility={}\",\n\n subject, compatibility\n\n );\n", "file_path": "src/api/configs.rs", "rank": 35, "score": 28018.168821507617 }, { "content": " info!(\"method=put,compatibility={}\", compatibility);\n\n\n\n let conn = db.connection()?;\n\n match Config::set_global_compatibility(&conn, &compatibility.valid()?.to_string())\n\n .and_then(ConfigCompatibility::new)\n\n {\n\n Ok(config) => Ok(HttpResponse::Ok().json(config)),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n/// Get compatibility level for a subject.\n\npub async fn get_subject_config(subject_path: Path<String>, db: Data<DbPool>) -> impl Responder {\n\n let subject = subject_path.into_inner();\n\n info!(\"method=get,subject={}\", subject);\n\n\n\n let conn = db.connection()?;\n\n match Config::get_with_subject_name(&conn, subject).and_then(ConfigCompatibility::new) {\n\n Ok(config) => Ok(HttpResponse::Ok().json(config)),\n\n Err(e) => Err(e),\n", "file_path": "src/api/configs.rs", "rank": 36, "score": 28017.086554274116 }, { "content": "\n\n let conn = db.connection()?;\n\n match Config::set_with_subject_name(&conn, subject, compatibility.valid()?.to_string())\n\n .and_then(ConfigCompatibility::new)\n\n {\n\n Ok(config) => Ok(HttpResponse::Ok().json(config)),\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "src/api/configs.rs", "rank": 37, "score": 28013.66391286955 }, { "content": "-- This file should undo anything in `up.sql`", "file_path": "migrations/2018-12-18-105635_create_configs/down.sql", "rank": 38, "score": 28006.404606613494 }, { "content": "CREATE SEQUENCE subjects_id_seq;\n", "file_path": "migrations/2018-12-13-153527_create_subjects/up.sql", "rank": 51, "score": 23465.293314027207 }, { "content": "CREATE SEQUENCE schemas_id_seq;\n", "file_path": "migrations/2018-12-14-163507_create_schemas/up.sql", "rank": 52, "score": 23465.293314027207 }, { "content": "CREATE SEQUENCE schema_versions_id_seq;\n", "file_path": "migrations/2018-12-16-123727_create_schema_versions/up.sql", "rank": 53, "score": 21488.973530690942 }, { "content": "use chrono::NaiveDateTime;\n\nuse diesel::prelude::*;\n\n\n\nuse super::schema::*;\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\n\n\n#[derive(Debug, Identifiable, Associations, Queryable, Serialize)]\n\n#[table_name = \"subjects\"]\n\npub struct Subject {\n\n pub id: i64,\n\n pub name: String,\n\n pub created_at: NaiveDateTime,\n\n pub updated_at: NaiveDateTime,\n\n}\n\n\n\nimpl Subject {\n\n /// Insert a new subject but ignore if it already exists.\n\n ///\n\n /// *Note:* 'ignore' in the case above means we will update the name if it already\n", "file_path": "src/db/models/subjects.rs", "rank": 54, "score": 19.680091006183694 }, { "content": "use diesel::prelude::*;\n\n\n\nuse super::schema::*;\n\nuse super::schemas::Schema;\n\nuse super::subjects::Subject;\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\n\n\n#[derive(Debug, Identifiable, Associations, Queryable)]\n\n#[table_name = \"schema_versions\"]\n\n#[belongs_to(Schema)]\n\n#[belongs_to(Subject)]\n\npub struct SchemaVersion {\n\n pub id: i64,\n\n pub version: Option<i32>,\n\n pub subject_id: i64,\n\n pub schema_id: i64,\n\n}\n\n\n\n#[derive(Debug, Insertable)]\n", "file_path": "src/db/models/schema_versions.rs", "rank": 55, "score": 19.401742118479035 }, { "content": "use avro_rs::schema_compatibility::SchemaCompatibility;\n\nuse chrono::{NaiveDateTime, Utc};\n\nuse diesel::prelude::*;\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\n\n\nuse super::schema::*;\n\nuse super::{GetSubjectVersionResponse, NewSchemaVersion, SchemaVersion, Subject};\n\n\n\n#[derive(Debug, Identifiable, Associations, Queryable)]\n\n#[table_name = \"schemas\"]\n\npub struct Schema {\n\n pub id: i64,\n\n pub fingerprint: String,\n\n pub json: String,\n\n pub created_at: NaiveDateTime,\n\n pub updated_at: NaiveDateTime,\n\n pub fingerprint2: Option<String>,\n\n}\n\n\n", "file_path": "src/db/models/schemas.rs", "rank": 56, "score": 18.178857704755828 }, { "content": "use actix_web::{\n\n web::{Data, Json, Path},\n\n HttpResponse, Responder,\n\n};\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\nuse crate::db::models::{\n\n DeleteSchemaVersion, RegisterSchema, RegisterSchemaResponse, Schema, SchemaResponse,\n\n SchemaVersion,\n\n};\n\nuse crate::db::{DbManage, DbPool};\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct SchemaBody {\n\n pub schema: String,\n\n}\n\n\n\npub async fn get_schema(id: Path<i64>, db: Data<DbPool>) -> impl Responder {\n\n info!(\"method=get,id={}\", id);\n\n\n", "file_path": "src/api/schemas.rs", "rank": 57, "score": 15.904712542767971 }, { "content": "use actix_web::{http::StatusCode, HttpResponse};\n\n\n\n// TODO: maybe replace this with serde_aux::serde_aux_enum_number_declare\n\nmacro_rules! enum_number {\n\n ($name:ident { $($variant:ident = $value:expr, )* }) => {\n\n #[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\n pub enum $name {\n\n $($variant = $value,)*\n\n }\n\n\n\n impl ::serde::Serialize for $name {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: ::serde::Serializer,\n\n {\n\n serializer.serialize_u16(*self as u16)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/api/errors.rs", "rank": 58, "score": 15.454148367673874 }, { "content": "pub use self::compatibility::*;\n\npub use self::configs::*;\n\npub use self::schemas::*;\n\npub use self::subjects::*;\n\n\n\nmod compatibility;\n\nmod configs;\n\npub mod errors;\n\nmod schemas;\n\nmod subjects;\n\npub mod version;\n", "file_path": "src/api/mod.rs", "rank": 59, "score": 14.518870146421218 }, { "content": " self.transaction::<_, diesel::result::Error, _>(|| {\n\n diesel::update(configs)\n\n .filter(id.eq(0))\n\n .set(compatibility.eq(\"BACKWARD\"))\n\n .execute(self)\n\n })\n\n .unwrap();\n\n }\n\n\n\n fn create_test_subject_with_config(&self, compat: &str) {\n\n use avro_schema_registry::db::models::schema::configs::dsl::{\n\n compatibility, configs, created_at as config_created_at, subject_id,\n\n updated_at as config_updated_at,\n\n };\n\n use avro_schema_registry::db::models::schema::subjects::dsl::*;\n\n use avro_schema_registry::db::models::Subject;\n\n\n\n self.transaction::<_, diesel::result::Error, _>(|| {\n\n diesel::insert_into(subjects)\n\n .values((\n", "file_path": "tests/db/mod.rs", "rank": 60, "score": 14.117676661323431 }, { "content": "#[derive(Debug, Insertable)]\n\n#[table_name = \"schemas\"]\n\npub struct NewSchema {\n\n pub fingerprint: String,\n\n pub json: String,\n\n pub created_at: NaiveDateTime,\n\n pub updated_at: NaiveDateTime,\n\n pub fingerprint2: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct SchemaResponse {\n\n pub schema: String,\n\n}\n\n\n\npub struct GetSchema {\n\n pub id: i64,\n\n}\n\n\n\nimpl Schema {\n", "file_path": "src/db/models/schemas.rs", "rank": 61, "score": 12.595793240832558 }, { "content": "\n\n// We use the macro to ensure we serialize as numbers, not as the name.\n\nenum_number!(ApiAvroErrorCode {\n\n SubjectNotFound = 40401,\n\n VersionNotFound = 40402,\n\n SchemaNotFound = 40403,\n\n\n\n InvalidAvroSchema = 42201,\n\n InvalidVersion = 42202,\n\n InvalidCompatibilityLevel = 42203,\n\n\n\n BackendDatastoreError = 50001,\n\n OperationTimedOut = 50002,\n\n MasterForwardingError = 50003,\n\n});\n\n\n\nimpl ApiAvroErrorCode {\n\n pub const fn message(&self) -> &str {\n\n match self {\n\n Self::SubjectNotFound => \"Subject not found\",\n", "file_path": "src/api/errors.rs", "rank": 62, "score": 12.452699918340148 }, { "content": "#[table_name = \"schema_versions\"]\n\npub struct NewSchemaVersion {\n\n pub version: Option<i32>,\n\n pub subject_id: i64,\n\n pub schema_id: i64,\n\n}\n\n\n\npub type SchemaVersionFields = NewSchemaVersion;\n\n\n\nimpl SchemaVersion {\n\n pub fn insert(conn: &PgConnection, sv: NewSchemaVersion) -> Result<Self, ApiError> {\n\n use super::schema::schema_versions::dsl::schema_versions;\n\n diesel::insert_into(schema_versions)\n\n .values(&sv)\n\n .get_result::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))\n\n }\n\n\n\n pub fn find(\n\n conn: &PgConnection,\n", "file_path": "src/db/models/schema_versions.rs", "rank": 63, "score": 11.647177425744406 }, { "content": "}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct GetSubjectVersion {\n\n pub subject: String,\n\n pub version: Option<u32>,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct GetSubjectVersionResponse {\n\n pub subject: String,\n\n // TODO: The documentation mentions this but their example response doesn't include it\n\n pub id: i64,\n\n pub version: i32,\n\n pub schema: String,\n\n}\n", "file_path": "src/db/models/subjects.rs", "rank": 64, "score": 11.574319191621692 }, { "content": "pub use self::configs::*;\n\npub use self::schema_versions::*;\n\npub use self::schemas::*;\n\npub use self::subjects::*;\n\n\n\npub mod schema;\n\n\n\nmod configs;\n\nmod schema_versions;\n\nmod schemas;\n\nmod subjects;\n", "file_path": "src/db/models/mod.rs", "rank": 65, "score": 11.180978966892907 }, { "content": " Self::VersionNotFound => \"Version not found\",\n\n Self::SchemaNotFound => \"Schema not found\",\n\n\n\n Self::InvalidAvroSchema => \"Invalid Avro schema\",\n\n Self::InvalidVersion => \"Invalid version\",\n\n Self::InvalidCompatibilityLevel => \"Invalid compatibility level\",\n\n\n\n Self::BackendDatastoreError => \"Error in the backend datastore\",\n\n Self::OperationTimedOut => \"Operation timed out\",\n\n Self::MasterForwardingError => \"Error while forwarding the request to the master\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Clone)]\n\npub struct ApiErrorResponse {\n\n pub error_code: ApiAvroErrorCode,\n\n pub message: String,\n\n}\n\n\n", "file_path": "src/api/errors.rs", "rank": 66, "score": 10.90903401855983 }, { "content": " find_subject_id: i64,\n\n find_schema_id: i64,\n\n ) -> Result<Self, ApiError> {\n\n use super::schema::schema_versions::dsl::{schema_id, schema_versions, subject_id};\n\n\n\n schema_versions\n\n .filter(subject_id.eq(find_subject_id))\n\n .filter(schema_id.eq(find_schema_id))\n\n .get_result::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::VersionNotFound))\n\n }\n\n\n\n pub fn with_schema_and_subject(\n\n conn: &PgConnection,\n\n search_subject_name: String,\n\n search_schema_id: i64,\n\n ) -> Result<usize, ApiError> {\n\n use super::schema::schema_versions::dsl::{id, schema_id, schema_versions, subject_id};\n\n use super::schema::schemas::dsl::{id as schemas_id, schemas};\n\n use super::schema::subjects::dsl::{id as subjects_id, name as subject_name, subjects};\n", "file_path": "src/db/models/schema_versions.rs", "rank": 67, "score": 10.791957623203487 }, { "content": " fn delete(&self, conn: &PgConnection) -> Result<(), diesel::result::Error> {\n\n use super::schema::configs::dsl::{configs, subject_id};\n\n use super::schema::schema_versions::dsl::{id, schema_versions};\n\n use super::schema::schemas::dsl::{id as schemas_id, schemas};\n\n use super::schema::subjects::dsl::{id as subjects_id, subjects};\n\n\n\n conn.transaction::<_, diesel::result::Error, _>(|| {\n\n let schemas_delete = schemas.filter(schemas_id.eq(self.schema_id));\n\n let subjects_delete = subjects.filter(subjects_id.eq(self.subject_id));\n\n let schema_versions_delete = schema_versions.filter(id.eq(self.id));\n\n let configs_delete = configs.filter(subject_id.eq(self.subject_id));\n\n\n\n diesel::delete(schemas_delete).execute(conn)?;\n\n diesel::delete(subjects_delete).execute(conn)?;\n\n diesel::delete(schema_versions_delete).execute(conn)?;\n\n diesel::delete(configs_delete).execute(conn)?;\n\n\n\n Ok(())\n\n })\n\n }\n", "file_path": "src/db/models/schema_versions.rs", "rank": 68, "score": 9.851500928338766 }, { "content": " use super::schema::schema_versions::dsl::{schema_versions, subject_id, version};\n\n use super::schema::subjects::dsl::{id as subjects_id, name, subjects};\n\n\n\n let res = schema_versions\n\n .inner_join(subjects.on(subject_id.eq(subjects_id)))\n\n .filter(name.eq(&subject_name))\n\n .select(version)\n\n .order(version.desc())\n\n .first::<Option<i32>>(conn);\n\n\n\n match res {\n\n Ok(v) => Ok(v),\n\n Err(diesel::NotFound) => Ok(None),\n\n _ => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)),\n\n }\n\n }\n\n\n\n pub fn get_schema_id_from_latest(\n\n conn: &PgConnection,\n\n subject_name: String,\n", "file_path": "src/db/models/schema_versions.rs", "rank": 69, "score": 9.070260591585331 }, { "content": " name.eq(\"test.subject\"),\n\n created_at.eq(diesel::dsl::now),\n\n updated_at.eq(diesel::dsl::now),\n\n ))\n\n .get_result::<Subject>(self)\n\n .and_then(|subject| {\n\n diesel::insert_into(configs)\n\n .values((\n\n compatibility.eq(compat),\n\n config_created_at.eq(diesel::dsl::now),\n\n config_updated_at.eq(diesel::dsl::now),\n\n subject_id.eq(subject.id),\n\n ))\n\n .execute(self)\n\n })\n\n })\n\n .unwrap();\n\n }\n\n\n\n fn add_subjects(&self, subjects: Vec<String>) {\n", "file_path": "tests/db/mod.rs", "rank": 70, "score": 8.913582965484258 }, { "content": "table! {\n\n configs (id) {\n\n id -> Int8,\n\n compatibility -> Nullable<Varchar>,\n\n created_at -> Timestamp,\n\n updated_at -> Timestamp,\n\n subject_id -> Nullable<Int8>,\n\n }\n\n}\n\n\n\ntable! {\n\n schema_versions (id) {\n\n id -> Int8,\n\n version -> Nullable<Int4>,\n\n subject_id -> Int8,\n\n schema_id -> Int8,\n\n }\n\n}\n\n\n\ntable! {\n", "file_path": "src/db/models/schema.rs", "rank": 71, "score": 8.882191329261325 }, { "content": " })\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct RegisterSchemaResponse {\n\n pub id: String,\n\n}\n\n\n\npub struct RegisterSchema {\n\n pub subject: String,\n\n pub schema: String,\n\n}\n\n\n\npub struct VerifySchemaRegistration {\n\n pub subject: String,\n\n pub schema: String,\n\n}\n\n\n\npub type VerifyRegistrationResponse = GetSubjectVersionResponse;\n", "file_path": "src/db/models/schemas.rs", "rank": 72, "score": 8.708727413521652 }, { "content": " return Err(ApiError::new(ApiAvroErrorCode::InvalidVersion));\n\n }\n\n match SchemaVersion::delete_version_with_subject(&conn, delete_schema_version) {\n\n Ok(r) => Ok(HttpResponse::Ok().body(format!(\"{}\", r))),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\npub async fn delete_schema_version_latest(\n\n subject: Path<String>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let subject = subject.into_inner();\n\n\n\n use crate::api::version::VersionLimit;\n\n let conn = db.connection()?;\n\n\n\n let sv_response =\n\n crate::api::subjects::get_subject_version_from_db(&conn, subject.clone(), None)?;\n\n\n", "file_path": "src/api/schemas.rs", "rank": 73, "score": 8.618698305109225 }, { "content": "## Endpoints\n\n\n\n| Endpoint | Method | Maturity |\n\n|---|---|---|\n\n| `/compatibility/subjects/{subject}/versions/{version}` | POST | Ready (NONE, BACKWARD, FORWARD, FULL); Unimplemented (TRANSITIVE) |\n\n| `/config` | GET | Ready |\n\n| `/config` | PUT | Ready |\n\n| `/config/{subject}` | GET | Ready |\n\n| `/config/{subject}` | PUT | Ready |\n\n| `/schemas/ids/{id}`| GET | Ready |\n\n| `/subjects` | GET | Ready |\n\n| `/subjects/{subject}` | DELETE | Ready |\n\n| `/subjects/{subject}` | POST | Ready |\n\n| `/subjects/{subject}/versions` | GET | Ready |\n\n| `/subjects/{subject}/versions` | POST | Ready |\n\n| `/subjects/{subject}/versions/latest` | DELETE | Ready |\n\n| `/subjects/{subject}/versions/latest` | GET | Ready |\n\n| `/subjects/{subject}/versions/{version}` | DELETE | Ready |\n\n| `/subjects/{subject}/versions/{version}` | GET | Ready |\n\n| `/subjects/{subject}/versions/latest/schema` | GET | Ready |\n\n| `/subjects/{subject}/versions/{version}/schema` | GET | Ready |\n\n\n\n## Extra Endpoints\n\n\n\n| Endpoint | Method | Maturity |\n\n|---|---|---|\n\n| `/_/health_check` | GET | Incomplete |\n\n| `/_/metrics` | GET | Ready |\n\n\n\n\n\n## Build\n\n\n\n```\n\ncargo build --release\n\n```\n\n\n\n## Run\n\n\n\nThis assumes you have a running PostgreSQL instance (versions 9.5 and above) and\n\nyou've run the diesel setup (with its migrations).\n\n\n\n1) Setup env (everything is controlled through environment variables)\n\n```\n\nexport SENTRY_URL=\"http://sentry-url/id\" \\ # optional\n\n DEFAULT_HOST=127.0.0.1:8080 \\ # optional (default is 127.0.0.1:8080)\n\n DATABASE_URL=postgres://postgres:@localhost:5432/diesel_testing \\\n\n SCHEMA_REGISTRY_PASSWORD=silly_password\n\n```\n\n\n\n2) Run application\n\n```\n\n# If you haven't set PORT, it listens on the default 8080\n\ncargo run # or the binary after running `cargo build`\n\n```\n\n\n\n## Tests\n\n\n\n### Unit\n\n\n\n```\n\ncargo test middleware\n\n```\n\n\n\n### Integration\n\n\n\n1) Setup testing environment variables\n\n```\n\nexport RUST_TEST_THREADS=1 \\\n\n DATABASE_URL=postgres://postgres:@localhost:5432/diesel_testing \\\n\n SCHEMA_REGISTRY_PASSWORD=silly_password\n\n```\n\n\n\n2) Run test suite\n\n```\n\ncargo test speculate\n\n```\n\n\n", "file_path": "README.md", "rank": 74, "score": 8.53130291632829 }, { "content": " .values(&schema)\n\n .get_result::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))\n\n }\n\n\n\n pub fn get_by_json(conn: &PgConnection, data: String) -> Result<Self, ApiError> {\n\n use super::schema::schemas::dsl::*;\n\n schemas\n\n .filter(json.eq(data))\n\n .get_result::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::SchemaNotFound))\n\n }\n\n\n\n pub fn get_by_id(conn: &PgConnection, schema_id: i64) -> Result<Self, ApiError> {\n\n use super::schema::schemas::dsl::*;\n\n schemas\n\n .find(schema_id)\n\n .get_result::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::SchemaNotFound))\n\n }\n", "file_path": "src/db/models/schemas.rs", "rank": 75, "score": 8.510872573283857 }, { "content": "#[macro_use]\n\nextern crate serde_json;\n\n\n\nmod common;\n\nmod compatibility;\n\nmod config;\n\nmod db;\n\nmod schemas;\n\nmod subject;\n", "file_path": "tests/tests.rs", "rank": 76, "score": 8.402926503149668 }, { "content": " None => {\n\n // Create schema version for subject\n\n let sch = match json {\n\n Some(j) => Self::new(conn, j, fingerprint)?,\n\n None => db_schema\n\n .ok_or_else(|| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))?,\n\n };\n\n (sch, 1)\n\n }\n\n };\n\n\n\n SchemaVersion::insert(\n\n conn,\n\n NewSchemaVersion {\n\n version: Some(new_version),\n\n subject_id: subject.id,\n\n schema_id: schema.id,\n\n },\n\n )?;\n\n // TODO: set compatibility\n", "file_path": "src/db/models/schemas.rs", "rank": 77, "score": 8.377126832175533 }, { "content": " ) -> Result<(i64, i32, String), ApiError> {\n\n use super::schema::schema_versions::dsl::{\n\n schema_id, schema_versions, subject_id, version,\n\n };\n\n use super::schema::schemas::dsl::{json, schemas};\n\n\n\n conn.transaction::<_, ApiError, _>(|| {\n\n let subject = Subject::get_by_name(conn, subject_name)?;\n\n\n\n let (schema_version, schema_id_result): (Option<i32>, i64) = match schema_versions\n\n .filter(subject_id.eq(subject.id))\n\n .order(version.desc())\n\n .select((version, schema_id))\n\n .first(conn)\n\n {\n\n Err(diesel::result::Error::NotFound) => {\n\n Err(ApiError::new(ApiAvroErrorCode::VersionNotFound))\n\n }\n\n Err(_) => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)),\n\n Ok(o) => Ok(o),\n", "file_path": "src/db/models/schema_versions.rs", "rank": 78, "score": 7.896199112895001 }, { "content": " let conn = db.connection()?;\n\n match Schema::get_by_id(&conn, id.into_inner()).map(|schema| SchemaResponse {\n\n schema: schema.json,\n\n }) {\n\n Ok(response) => Ok(HttpResponse::Ok().json(response)),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\npub async fn delete_schema_version(info: Path<(String, u32)>, db: Data<DbPool>) -> impl Responder {\n\n let q = info.into_inner();\n\n\n\n use crate::api::version::VersionLimit;\n\n\n\n let delete_schema_version = DeleteSchemaVersion {\n\n subject: q.0,\n\n version: q.1,\n\n };\n\n let conn = db.connection()?;\n\n if !delete_schema_version.version.within_limits() {\n", "file_path": "src/api/schemas.rs", "rank": 79, "score": 7.644055104756765 }, { "content": "use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform};\n\nuse actix_web::error::{Error, ErrorBadRequest, ErrorForbidden, ParseError};\n\nuse actix_web::http::header::HeaderMap;\n\nuse futures::future::{ok, Either, Ready};\n\nuse futures::task::{Context, Poll};\n\n\n\npub struct VerifyAuthorization {\n\n password: String,\n\n}\n\n\n\nimpl VerifyAuthorization {\n\n pub fn new(password: &str) -> Self {\n\n Self {\n\n password: password.to_string(),\n\n }\n\n }\n\n\n\n fn validate(headers: &HeaderMap, password: &str) -> Result<(), Error> {\n\n let authorization = headers\n\n .get(\"Authorization\")\n", "file_path": "src/middleware/verify_auth.rs", "rank": 80, "score": 7.602839520651206 }, { "content": " Ok(o) => Ok(o),\n\n }?;\n\n\n\n let schema_json = match schemas.find(schema_id_result).select(json).first(conn) {\n\n Err(_) => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)),\n\n Ok(o) => Ok(o),\n\n }?;\n\n\n\n Ok((schema_id_result, schema_version as i32, schema_json))\n\n })\n\n }\n\n\n\n pub fn delete_subject_with_name(\n\n conn: &PgConnection,\n\n subject: String,\n\n ) -> Result<Vec<Option<i32>>, diesel::result::Error> {\n\n use super::schema::schema_versions::dsl::{\n\n id, schema_id, schema_versions, subject_id, version,\n\n };\n\n use super::schema::schemas::dsl::{id as schemas_id, schemas};\n", "file_path": "src/db/models/schema_versions.rs", "rank": 81, "score": 7.59205688641131 }, { "content": "use actix_web::http;\n\n\n\nuse crate::common::server::setup;\n\nuse crate::db::DbAuxOperations;\n\n\n\n#[actix_rt::test]\n\nasync fn test_get_schema_without_schema() {\n\n let (server, _) = setup();\n\n\n\n // it returns 404 with message\n\n server\n\n .test(\n\n http::Method::GET,\n\n \"/schemas/ids/1\",\n\n None,\n\n http::StatusCode::NOT_FOUND,\n\n r#\"\\{\"error_code\":40403,\"message\":\"Schema not found\"\\}\"#,\n\n )\n\n .await;\n\n}\n", "file_path": "tests/schemas.rs", "rank": 82, "score": 7.4618850164482735 }, { "content": "\n\n schema_versions\n\n .inner_join(subjects.on(subject_id.eq(subjects_id)))\n\n .inner_join(schemas.on(schema_id.eq(schemas_id)))\n\n .filter(subject_name.eq(search_subject_name))\n\n .filter(schema_id.eq(search_schema_id))\n\n .select(id)\n\n .execute(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))\n\n }\n\n\n\n pub fn versions_with_subject_name(\n\n conn: &PgConnection,\n\n subject_name: String,\n\n ) -> Result<Vec<Option<i32>>, ApiError> {\n\n use super::schema::schema_versions::dsl::{schema_versions, subject_id, version};\n\n use super::schema::subjects::dsl::{id as subjects_id, name, subjects};\n\n\n\n match schema_versions\n\n .inner_join(subjects.on(subject_id.eq(subjects_id)))\n", "file_path": "src/db/models/schema_versions.rs", "rank": 83, "score": 7.400459274593903 }, { "content": " schema_version: u32,\n\n ) -> Result<(i64, i32, String), ApiError> {\n\n use super::schema::schema_versions::dsl::{\n\n schema_id, schema_versions, subject_id, version,\n\n };\n\n use super::schema::schemas::dsl::{json, schemas};\n\n\n\n conn.transaction::<_, ApiError, _>(|| {\n\n let subject = Subject::get_by_name(conn, subject_name)?;\n\n\n\n let schema_id_result = match schema_versions\n\n .filter(subject_id.eq(subject.id))\n\n .filter(version.eq(Some(schema_version as i32)))\n\n .select(schema_id)\n\n .first(conn)\n\n {\n\n Err(diesel::result::Error::NotFound) => {\n\n Err(ApiError::new(ApiAvroErrorCode::VersionNotFound))\n\n }\n\n Err(_) => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)),\n", "file_path": "src/db/models/schema_versions.rs", "rank": 84, "score": 7.143868073173286 }, { "content": "\n\npub struct GetSubjects;\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct SubjectVersionsResponse {\n\n // TODO: this should be a new type with values between 1 and 2^31-1\n\n pub versions: Vec<Option<i32>>,\n\n}\n\n\n\npub struct GetSubjectVersions {\n\n pub subject: String,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct DeleteSubjectResponse {\n\n pub versions: Vec<Option<i32>>,\n\n}\n\n\n\npub struct DeleteSubject {\n\n pub subject: String,\n", "file_path": "src/db/models/subjects.rs", "rank": 85, "score": 7.139981008678999 }, { "content": "extern crate actix;\n\nextern crate chrono;\n\n#[macro_use]\n\nextern crate diesel;\n\nextern crate failure;\n\n#[macro_use]\n\nextern crate failure_derive;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate env_logger;\n\nextern crate json;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde_json;\n\n\n\npub mod api;\n\npub mod app;\n\npub mod db;\n\npub mod health;\n\npub mod middleware;\n", "file_path": "src/lib.rs", "rank": 86, "score": 6.894921812914395 }, { "content": "pub use self::verify_auth::VerifyAuthorization;\n\npub use self::verify_headers::VerifyAcceptHeader;\n\n\n\nmod verify_auth;\n\nmod verify_headers;\n", "file_path": "src/middleware/mod.rs", "rank": 87, "score": 6.833110864415671 }, { "content": "use actix_web::{\n\n dev::{Service, ServiceRequest, ServiceResponse, Transform},\n\n error::ParseError,\n\n http,\n\n};\n\nuse futures::future::{ok, Either, Ready};\n\nuse futures::task::{Context, Poll};\n\n\n\npub struct VerifyAcceptHeader;\n\n\n\nconst VALID_ACCEPT_HEADERS: [&str; 3] = [\n\n \"application/vnd.schemaregistry+json\",\n\n \"application/vnd.schemaregistry.v1+json\",\n\n \"application/json\",\n\n];\n\n\n\nimpl VerifyAcceptHeader {\n\n fn is_valid(headers: &http::header::HeaderMap) -> bool {\n\n match headers.get(http::header::ACCEPT) {\n\n Some(v) => match v.to_str() {\n", "file_path": "src/middleware/verify_headers.rs", "rank": 88, "score": 6.558116320112397 }, { "content": "#[derive(Debug, Fail, Clone)]\n\n#[fail(display = \"{}\", response)]\n\npub struct ApiError {\n\n pub status_code: StatusCode,\n\n pub response: ApiErrorResponse,\n\n}\n\n\n\nimpl ApiError {\n\n pub fn new(error_code: ApiAvroErrorCode) -> Self {\n\n let status_code = match error_code {\n\n ApiAvroErrorCode::SubjectNotFound => StatusCode::NOT_FOUND,\n\n ApiAvroErrorCode::VersionNotFound => StatusCode::NOT_FOUND,\n\n ApiAvroErrorCode::SchemaNotFound => StatusCode::NOT_FOUND,\n\n\n\n ApiAvroErrorCode::InvalidAvroSchema => StatusCode::UNPROCESSABLE_ENTITY,\n\n ApiAvroErrorCode::InvalidVersion => StatusCode::UNPROCESSABLE_ENTITY,\n\n ApiAvroErrorCode::InvalidCompatibilityLevel => StatusCode::UNPROCESSABLE_ENTITY,\n\n\n\n ApiAvroErrorCode::BackendDatastoreError => StatusCode::INTERNAL_SERVER_ERROR,\n\n ApiAvroErrorCode::OperationTimedOut => StatusCode::INTERNAL_SERVER_ERROR,\n", "file_path": "src/api/errors.rs", "rank": 89, "score": 6.4738277673923115 }, { "content": "pub use self::connection::{DbConnection, DbManage, DbPool};\n\n\n\nmod connection;\n\npub mod models;\n", "file_path": "src/db/mod.rs", "rank": 90, "score": 6.444871371302321 }, { "content": " ) -> Result<Vec<Option<i32>>, ApiError> {\n\n use super::SchemaVersion;\n\n\n\n match SchemaVersion::delete_subject_with_name(conn, subject_name) {\n\n Err(_) => Err(ApiError::new(ApiAvroErrorCode::BackendDatastoreError)),\n\n Ok(res) => {\n\n if !res.is_empty() {\n\n Ok(res)\n\n } else {\n\n Err(ApiError::new(ApiAvroErrorCode::SubjectNotFound))\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct SubjectList {\n\n pub content: Vec<String>,\n\n}\n", "file_path": "src/db/models/subjects.rs", "rank": 91, "score": 6.42743776986441 }, { "content": "use std::env;\n\n\n\nuse diesel::pg::PgConnection;\n\nuse diesel::r2d2::{ConnectionManager, Pool, PooledConnection};\n\n\n\nuse crate::api::errors::{ApiAvroErrorCode, ApiError};\n\n\n\npub type DbPool = Pool<ConnectionManager<PgConnection>>;\n\npub type DbConnection = PooledConnection<ConnectionManager<PgConnection>>;\n\n\n", "file_path": "src/db/connection.rs", "rank": 92, "score": 6.416499033532085 }, { "content": " }\n\n}\n\n\n\npub async fn get_subject_version_latest_schema(\n\n subject: Path<String>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let conn = db.connection()?;\n\n match get_subject_version_from_db(&conn, subject.into_inner(), None) {\n\n Ok(r) => Ok(HttpResponse::Ok().json(SchemaResponse { schema: r.schema })),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\npub async fn post_subject(\n\n subject: Path<String>,\n\n body: Json<SchemaBody>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let conn = db.connection()?;\n\n match Schema::verify_registration(&conn, subject.into_inner(), body.into_inner().schema) {\n\n Ok(response) => Ok(HttpResponse::Ok().json(response)),\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "src/api/subjects.rs", "rank": 93, "score": 6.307203970806874 }, { "content": "pub async fn get_subject_version_latest(subject: Path<String>, db: Data<DbPool>) -> impl Responder {\n\n let conn = db.connection()?;\n\n match get_subject_version_from_db(&conn, subject.into_inner(), None) {\n\n Ok(r) => Ok(HttpResponse::Ok().json(r)),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n// TODO: for now, do the same as for `get_subject_version` and then extract only the\n\n// schema\n\npub async fn get_subject_version_schema(\n\n info: Path<(String, u32)>,\n\n db: Data<DbPool>,\n\n) -> impl Responder {\n\n let q = info.into_inner();\n\n\n\n let conn = db.connection()?;\n\n match get_subject_version_from_db(&conn, q.0, Some(q.1)) {\n\n Ok(r) => Ok(HttpResponse::Ok().json(SchemaResponse { schema: r.schema })),\n\n Err(e) => Err(e),\n", "file_path": "src/api/subjects.rs", "rank": 94, "score": 6.270900136352223 }, { "content": "use actix_web::{\n\n web::{Data, Json, Path},\n\n HttpResponse, Responder,\n\n};\n\n\n\nuse crate::api::{\n\n errors::{ApiAvroErrorCode, ApiError},\n\n SchemaBody,\n\n};\n\nuse crate::db::models::{\n\n DeleteSubjectResponse, GetSubjectVersionResponse, Schema, SchemaResponse, SchemaVersion,\n\n Subject, SubjectList, SubjectVersionsResponse,\n\n};\n\nuse crate::db::{DbManage, DbPool};\n\n\n\npub async fn get_subjects(db: Data<DbPool>) -> impl Responder {\n\n let conn = db.connection()?;\n\n match Subject::distinct_names(&conn).map(|content| SubjectList { content }) {\n\n Ok(subjects) => Ok(HttpResponse::Ok().json(subjects.content)),\n\n Err(e) => Err(e),\n", "file_path": "src/api/subjects.rs", "rank": 95, "score": 6.058482742375373 }, { "content": " schemas (id) {\n\n id -> Int8,\n\n fingerprint -> Varchar,\n\n json -> Text,\n\n created_at -> Timestamp,\n\n updated_at -> Timestamp,\n\n fingerprint2 -> Nullable<Varchar>,\n\n }\n\n}\n\n\n\ntable! {\n\n subjects (id) {\n\n id -> Int8,\n\n name -> Text,\n\n created_at -> Timestamp,\n\n updated_at -> Timestamp,\n\n }\n\n}\n\n\n\nallow_tables_to_appear_in_same_query!(configs, schema_versions, schemas, subjects,);\n", "file_path": "src/db/models/schema.rs", "rank": 96, "score": 6.040709103978099 }, { "content": " schema: o.2,\n\n })\n\n}\n\n\n\n// TODO(nlopes): maybe new type here\n\n//\n\n// According to\n\n// https://docs.confluent.io/3.1.0/schema-registry/docs/api.html#get--subjects-(string-%20subject)-versions-(versionId-%20version)\n\n// the Version ID should be in the range of 1 to 2^31-1, which isn't u32. We should create\n\n// a new type with the boundaries of this.\n\npub async fn get_subject_version(info: Path<(String, u32)>, db: Data<DbPool>) -> impl Responder {\n\n let q = info.into_inner();\n\n\n\n let conn = db.connection()?;\n\n match get_subject_version_from_db(&conn, q.0, Some(q.1)) {\n\n Ok(r) => Ok(HttpResponse::Ok().json(r)),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/api/subjects.rs", "rank": 97, "score": 6.030640507687815 }, { "content": " let a = SchemaCompatibility::can_read(&writers_schema, &readers_schema);\n\n Ok(a)\n\n }\n\n\n\n pub fn find_by_fingerprint(\n\n conn: &PgConnection,\n\n fingerprint: String,\n\n ) -> Result<Option<Self>, ApiError> {\n\n use super::schema::schemas::dsl::{fingerprint2, schemas};\n\n Ok(schemas\n\n .filter(fingerprint2.eq(fingerprint))\n\n .load::<Self>(conn)\n\n .map_err(|_| ApiError::new(ApiAvroErrorCode::BackendDatastoreError))?\n\n .pop())\n\n }\n\n\n\n pub fn register_new_version(\n\n conn: &PgConnection,\n\n registration: RegisterSchema,\n\n ) -> Result<Self, ApiError> {\n", "file_path": "src/db/models/schemas.rs", "rank": 98, "score": 5.966580780268356 }, { "content": " diesel::delete(schemas).execute(self)?;\n\n diesel::delete(subjects).execute(self)?;\n\n diesel::delete(schema_versions).execute(self)\n\n })\n\n .unwrap();\n\n }\n\n\n\n fn reset_schemas(&self) {\n\n use avro_schema_registry::db::models::schema::schemas::dsl::*;\n\n diesel::delete(schemas).execute(self).unwrap();\n\n }\n\n\n\n fn reset_subjects(&self) {\n\n use avro_schema_registry::db::models::schema::subjects::dsl::*;\n\n diesel::delete(subjects).execute(self).unwrap();\n\n }\n\n\n\n fn reset_configs_global(&self) {\n\n use avro_schema_registry::db::models::schema::configs::dsl::*;\n\n\n", "file_path": "tests/db/mod.rs", "rank": 99, "score": 5.782918325788765 } ]
Rust
src/mdbook/files.rs
igorlesik/svdocgen
4e3548720e3fd7673634d557a340be9ecee58a36
use std::fs; use std::path::{Path, PathBuf}; use std::io; use crate::args; use crate::fsnode::FsNode; pub struct SrcFiles { pub nodes: FsNode, } pub fn collect_sources(options: &args::ParsedOptions) -> Result<SrcFiles,String> { let mut inputs: Vec<PathBuf> = Vec::new(); for input in &options.inputs { let path = Path::new(input); if !path.exists() { let include = options.includes.iter().find(|&x| Path::new(x).join(path).exists()); match include { Some(inc) => inputs.push(Path::new(inc).join(path)), None => { println!("Warning: can't find '{}' in {:?}", input, &options.includes); continue; }, } } else { inputs.push(path.to_path_buf()); } } let mut nodes = FsNode { name: String::from(""), children: Vec::new() }; for input in &inputs { println!("input path: {:?}", input); let is_already_present = nodes.exists(input); if is_already_present { println!("Warning: duplicate {:?}", input); } else { nodes.push(input); } } let mut nodes_with_files = nodes.clone(); let mut/*env*/ collect_files = |node: &FsNode, path: &PathBuf, _level: usize| { println!("traverse {}: {:?}", node.name, path); if path.is_dir() && node.children.is_empty() { println!("checking for files in {:?}", path); match visit_dir_and_search_files(&mut nodes_with_files, path) { Err(_) => println!("error"), _ => (), } } }; nodes.traverse_top(&mut collect_files); let src = SrcFiles { nodes: nodes_with_files, }; Ok(src) } fn visit_dir_and_search_files(nodes: &mut FsNode, dir: &Path) -> io::Result<()> { if dir.is_dir() { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { visit_dir_and_search_files(nodes, &path)?; } else { if let Some(ext) = path.extension() { if ext.eq("sv") || ext.eq("v") || ext.eq("md") { println!("add {:?}", path); nodes.push(&path); } } } } } Ok(()) } pub fn get_files_with_extensions( all_files: &FsNode, extensions: &[&str] ) -> Result<FsNode,String> { let mut files_with_ext = FsNode { name: String::from(""), children: Vec::new() }; let mut/*env*/ filter_files = |_node: &FsNode, path: &PathBuf, _level: usize| { if path.is_file() { if let Some(ext) = path.extension() { if extensions.contains(&ext.to_str().unwrap_or("")) { files_with_ext.push(&path); } } } }; all_files.traverse_top(&mut filter_files); Ok(files_with_ext) } pub fn get_md_files(all_files: &FsNode) -> Result<FsNode,String> { get_files_with_extensions(all_files, &["md"]) } pub fn get_sv_files(all_files: &FsNode) -> Result<FsNode,String> { get_files_with_extensions(all_files, &["sv", "v"]) }
use std::fs; use std::path::{Path, PathBuf}; use std::io; use crate::args; use crate::fsnode::FsNode; pub struct SrcFiles { pub nodes: FsNode, } pub fn collect_sources(options: &args::ParsedOptions) -> Result<SrcFiles,String> { let mut inputs: Vec<PathBuf> = Vec::new(); for input in &options.inputs { let path = Path::new(input); if !path.exists() { let include = options.includes.iter().find(|&x| Path::new(x).join(path).exists()); match include { Some(inc) => inputs.push(Path::new(inc).join(path)), None => { println!("Warning: can't find '{}' in {:?}", input, &options.includes); continue; }, } } else { inputs.push(path.to_path_buf()); } } let mut nodes = FsNode { name: String::from(""), children: Vec::new() }; for input in &inputs { println!("input path: {:?}", input); let is_already_present = nodes.exists(input); if is_already_present { println!("Warning: duplicate {:?}", input); } else { nodes.push(input); } } let mut nodes_with_files = nodes.clone(); let mut/*env*/ collect_files = |node: &FsNode, path: &PathBuf, _level: usize| { println!("traverse {}: {:?}", node.name, path); if path.is_dir() && node.children.is_empty() { println!("checking for files in {:?}", path); match visit_dir_and_search_files(&mut nodes_with_files, path) { Err(_) => println!("error"), _ => (), } } }; nodes.traverse_top(&mut collect_files); let src = SrcFiles { nodes: nodes_with_files, }; Ok(src) } fn visit_dir_and_search_files(nodes: &mut FsNode, dir: &Path) -> io::Result<()> { if dir.is_dir() { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { visit_dir_and_search_files(nodes, &path)?; } else { if let Some(ext) = path.extension() { if ext.eq("sv") || ext.eq("v") || ext.eq("md") { println!("add {:?}", path); nodes.push(&path); } } } } } Ok(()) } pub fn get_files_with_extensions( all_files: &FsNode,
pub fn get_md_files(all_files: &FsNode) -> Result<FsNode,String> { get_files_with_extensions(all_files, &["md"]) } pub fn get_sv_files(all_files: &FsNode) -> Result<FsNode,String> { get_files_with_extensions(all_files, &["sv", "v"]) }
extensions: &[&str] ) -> Result<FsNode,String> { let mut files_with_ext = FsNode { name: String::from(""), children: Vec::new() }; let mut/*env*/ filter_files = |_node: &FsNode, path: &PathBuf, _level: usize| { if path.is_file() { if let Some(ext) = path.extension() { if extensions.contains(&ext.to_str().unwrap_or("")) { files_with_ext.push(&path); } } } }; all_files.traverse_top(&mut filter_files); Ok(files_with_ext) }
function_block-function_prefix_line
[ { "content": "/// Create files.md file that lists all input files.\n\n///\n\n///\n\nfn create_files_md(path: &str, files: &FsNode) -> Result<String,String> {\n\n\n\n let fname = Path::new(&path).join(\"files.md\");\n\n let fname = fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n fn show_src(data: &mut Vec<String>, path: &str) {\n\n let src_path = Path::new(\"src\").join(path);\n\n let path_str = src_path.to_str().unwrap();\n\n let mut path_str_md = String::from(path_str);\n\n path_str_md.push_str(\".md\");\n", "file_path": "src/mdbook/generate.rs", "rank": 4, "score": 142094.26875911708 }, { "content": "pub fn get_identifier(node: RefNode) -> Option<Locate> {\n\n // unwrap_node! can take multiple types\n\n match unwrap_node!(node, SimpleIdentifier, EscapedIdentifier) {\n\n Some(RefNode::SimpleIdentifier(x)) => {\n\n return Some(x.nodes.0);\n\n }\n\n Some(RefNode::EscapedIdentifier(x)) => {\n\n return Some(x.nodes.0);\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/mdbook/svpar.rs", "rank": 5, "score": 133747.73945297225 }, { "content": "/// Copy all input files into mdBook `src` directory.\n\n///\n\n///\n\nfn copy_src_files(path: &str, files: &SrcFiles) -> Result<(),String> {\n\n\n\n let target_dir = Path::new(&path).join(\"src\");\n\n let target_dir = target_dir.to_str().unwrap();\n\n\n\n match fs::create_dir_all(target_dir) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n\n\n let mut/*env*/ create_dirs = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_dir() {\n\n if let Some(path_str) = path.to_str() {\n\n if let Some(target_str) = Path::new(&target_dir).join(&path_str).to_str() {\n\n match fs::create_dir_all(target_str) {\n\n Err(e) => println!(\"error {}\", e.to_string()),\n\n Ok(_) => println!(\"create dir: {}\", target_str),\n\n }\n\n }\n\n }\n", "file_path": "src/mdbook/generate.rs", "rank": 6, "score": 130060.15147344844 }, { "content": "pub fn get_whole_str(\n\n syntax_tree: &SyntaxTree,\n\n node: &RefNode\n\n) -> String\n\n{\n\n let mut s = String::new();\n\n\n\n for subnode in node.clone().into_iter() {\n\n if let RefNode::Locate(_text) = subnode {\n\n let text = unwrap_locate!(subnode);\n\n if let Some(text) = text {\n\n let text = syntax_tree.get_str(text);\n\n if let Some(text) = text {\n\n s.push_str(text);\n\n }\n\n }\n\n }\n\n }\n\n\n\n s\n\n}", "file_path": "src/mdbook/svpar.rs", "rank": 8, "score": 110953.85292247348 }, { "content": "fn copy_assets_to_src(book_src_path: &Path)-> Result<(),String> {\n\n\n\n let asset_loadwavedrom_js = include_bytes!(\"../../assets/js/loadwavedrom.js\");\n\n fs::write(\n\n book_src_path.join(\"loadwavedrom.js\"),\n\n asset_loadwavedrom_js).expect(\"Unable to write file\");\n\n\n\n Ok(())\n\n}", "file_path": "src/mdbook/build.rs", "rank": 9, "score": 109925.89981160326 }, { "content": "/// Create mdBook SUMMARY.md file.\n\n///\n\n/// The summary file is used by mdBook to know what chapters to include,\n\n/// in what order they should appear, what their hierarchy is\n\n/// and where the source files are. Without this file, there is no book.\n\n///\n\nfn create_summary_md(mdbook_src_dir: &str, src_files: &SrcFiles) -> Result<(),String> {\n\n\n\n let summary_fname = Path::new(&mdbook_src_dir).join(MDBOOK_SUMMARY_MD);\n\n let summary_fname = summary_fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(summary_fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut text_buf: Vec<String> = Vec::new();\n\n text_buf.push(\"# Summary\\n\".to_string());\n\n\n\n text_buf.push(\"\\n- [User's Documentation]()\\n\".to_string());\n\n let users_md_docs = list_users_md_docs(mdbook_src_dir, src_files)?;\n", "file_path": "src/mdbook/generate.rs", "rank": 10, "score": 109700.51306092394 }, { "content": "pub fn generate_sv_package_info(\n\n output_path: &str,\n\n file_path: &str\n\n) -> (Vec<String>, Vec<(String, String, String)>)\n\n{\n\n let mut text: Vec<String> = Vec::new();\n\n let mut list: Vec<(String, String, String)> = Vec::new();\n\n\n\n // The list of defined macros\n\n let defines = HashMap::new();\n\n // The list of include paths\n\n let includes: Vec<PathBuf> = Vec::new();\n\n\n\n // Parse\n\n let result = parse_sv(file_path, &defines, &includes, false, true);\n\n\n\n if let Ok((syntax_tree, _)) = result {\n\n let mut prev_node: Option<RefNode> = None;\n\n // &SyntaxTree is iterable\n\n for node in &syntax_tree {\n", "file_path": "src/mdbook/svpkg.rs", "rank": 11, "score": 108397.7073233508 }, { "content": "pub fn generate_sv_class_info(\n\n output_path: &str,\n\n file_path: &str\n\n) -> (Vec<String>, Vec<(String, String, String)>)\n\n{\n\n let mut text: Vec<String> = Vec::new();\n\n let mut list: Vec<(String, String, String)> = Vec::new();\n\n\n\n // The list of defined macros\n\n let defines = HashMap::new();\n\n // The list of include paths\n\n let includes: Vec<PathBuf> = Vec::new();\n\n\n\n // Parse\n\n let result = parse_sv(file_path, &defines, &includes, false, true);\n\n\n\n if let Ok((syntax_tree, _)) = result {\n\n let mut prev_node: Option<RefNode> = None;\n\n // &SyntaxTree is iterable\n\n for node in &syntax_tree {\n", "file_path": "src/mdbook/svclass.rs", "rank": 12, "score": 108397.7073233508 }, { "content": "pub fn generate_sv_interface_info(\n\n output_path: &str,\n\n file_path: &str\n\n) -> (Vec<String>, Vec<(String, String, String)>)\n\n{\n\n let mut text: Vec<String> = Vec::new();\n\n let mut list: Vec<(String, String, String)> = Vec::new();\n\n\n\n // The list of defined macros\n\n let defines = HashMap::new();\n\n // The list of include paths\n\n let includes: Vec<PathBuf> = Vec::new();\n\n\n\n // Parse\n\n let result = parse_sv(file_path, &defines, &includes, false, true);\n\n\n\n if let Ok((syntax_tree, _)) = result {\n\n let mut prev_node: Option<RefNode> = None;\n\n // &SyntaxTree is iterable\n\n for node in &syntax_tree {\n", "file_path": "src/mdbook/sviface.rs", "rank": 13, "score": 108397.7073233508 }, { "content": "pub fn generate_sv_module_info(\n\n output_path: &str,\n\n file_path: &str\n\n) -> (Vec<String>, Vec<(String, String, String)>)\n\n{\n\n let mut text: Vec<String> = Vec::new();\n\n let mut list: Vec<(String, String, String)> = Vec::new();\n\n\n\n // The list of defined macros\n\n let defines = HashMap::new();\n\n // The list of include paths\n\n let includes: Vec<PathBuf> = Vec::new();\n\n\n\n // Parse\n\n let result = parse_sv(file_path, &defines, &includes, false, true);\n\n\n\n if let Ok((syntax_tree, _)) = result {\n\n let mut prev_node: Option<RefNode> = None;\n\n // &SyntaxTree is iterable\n\n for node in &syntax_tree {\n", "file_path": "src/mdbook/svmodule.rs", "rank": 14, "score": 108397.7073233508 }, { "content": "/// Parse command line arguments and return ParsedOptions struct.\n\n///\n\n/// Uses `clap` crate to parse command line arguments.\n\n///\n\npub fn parse_args() -> ParsedOptions {\n\n\n\n let matches = App::new(\"SystemVerilog Documentation Generator\")\n\n .version(\"0.1.0\")\n\n .author(\"Igor Lesik <[email protected]>\")\n\n .about(\"Finds .sv and .md files in SV project directory and generates documentation.\")\n\n .arg(Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n\n .value_name(\"DIR\")\n\n .takes_value(true)\n\n .help(\"Set output directory for generated artifacts.\"))\n\n .arg(Arg::with_name(\"INPUT\")\n\n .help(\"Set the input file or directory\")\n\n .required(true)\n\n .multiple(true)\n\n .index(1))\n\n .arg(Arg::with_name(\"include\")\n\n .short(\"i\")\n\n .long(\"include\")\n", "file_path": "src/args.rs", "rank": 15, "score": 106692.67187783291 }, { "content": "/// Create mdBook book.toml file.\n\n///\n\n/// The `book.toml` file is used by mdBook to know the configuration.\n\n///\n\nfn create_book_toml(path: &str, project_name: &str) -> Result<(),String> {\n\n\n\n let book_toml_fname = Path::new(&path).join(MDBOOK_BOOK_TOML);\n\n let book_toml_fname = book_toml_fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(book_toml_fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n let data = format!(r#\"\n\n[book]\n\ntitle = \"{}\"\n\nauthors = [\"Godzilla\"]\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 16, "score": 105684.93772590098 }, { "content": "fn copy_assets(book_path: &Path)-> Result<(),String> {\n\n\n\n let asset_highlight_js = include_bytes!(\"../../assets/js/highlight.js\");\n\n //include_flate::flate!(static asset_highlight_js: str from \"../../assets/js/highlight.js\");\n\n\n\n //println!(\"js: {}\", String::from_utf8_lossy(asset_highlight_js));\n\n\n\n fs::write(\n\n book_path.join(\"highlight.js\"),\n\n asset_highlight_js).expect(\"Unable to write file\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/build.rs", "rank": 17, "score": 102516.05765563258 }, { "content": "/// Generate mdBook sources.\n\n///\n\n/// ```svgbob\n\n/// .--.---.\n\n/// SV |# \\_ | DOC\n\n/// o-->||__(_)|*-->\n\n/// | \\ \\|\n\n/// '----'-'\n\n/// ```\n\npub fn generate(options: &args::ParsedOptions) -> Result<(),String> {\n\n\n\n\n\n match fs::create_dir_all(&options.output_dir) {\n\n Err(e) => { println!(\"Can't create output directory '{}' error: {}\",\n\n &options.output_dir, e);\n\n return Err(e.to_string()); },\n\n Ok(_) => println!(\"Created output directory '{}'\", &options.output_dir),\n\n }\n\n\n\n let mdbook_src_dir = Path::new(&options.output_dir).join(MDBOOK_SRC_DIR);\n\n let mdbook_src_dir = mdbook_src_dir.to_str().unwrap();\n\n\n\n match fs::create_dir_all(mdbook_src_dir) {\n\n Err(e) => { println!(\"Can't create '{}' error: {}\",\n\n mdbook_src_dir, e);\n\n return Err(e.to_string()); },\n\n Ok(_) => println!(\"Created directory '{}'\", mdbook_src_dir),\n\n }\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 18, "score": 87124.10243556672 }, { "content": "/// Build mdBook from mdBook sources.\n\n///\n\n/// ```svgbob\n\n/// .--.---.\n\n/// SV |# \\_ | DOC\n\n/// o-->||__(_)|*-->\n\n/// | \\ \\|\n\n/// '----'-'\n\n/// ```\n\npub fn build(options: &args::ParsedOptions) -> Result<(),String> {\n\n\n\n //let mdbook_src_dir_p = Path::new(&options.output_dir).join(MDBOOK_SRC_DIR);\n\n //let mdbook_src_dir = mdbook_book_src_p.to_str().unwrap();\n\n\n\n let mdbook_book_dir_p = Path::new(&options.output_dir).join(MDBOOK_BOOK_DIR);\n\n let mdbook_book_dir = mdbook_book_dir_p.to_str().unwrap();\n\n\n\n match fs::create_dir_all(mdbook_book_dir) {\n\n Err(e) => { println!(\"Can't create '{}' error: {}\",\n\n mdbook_book_dir, e);\n\n return Err(e.to_string()); },\n\n Ok(_) => println!(\"Created directory '{}'\", mdbook_book_dir),\n\n }\n\n\n\n\n\n let /*mut*/ md = MDBook::load(&options.output_dir).expect(\"Unable to load the book\");\n\n\n\n copy_assets_to_src(&Path::new(&options.output_dir)).expect(\"failed to copy assets\");\n\n\n\n md.build().expect(\"Building failed\");\n\n\n\n copy_assets(&mdbook_book_dir_p).expect(\"failed to copy assets\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/build.rs", "rank": 19, "score": 87124.10243556673 }, { "content": "fn print_class(\n\n top_text: &mut Vec<String>,\n\n output_path: &str,\n\n file_path: &str,\n\n class_name: &str,\n\n _syntax_tree: &SyntaxTree,\n\n _class_node: &RefNode,\n\n _prev_node: &Option<RefNode>\n\n) -> (String, String, String)\n\n{\n\n let mut text = String::new();\n\n\n\n text.push_str(format!(\"## Class `{}`\\n\\n\", class_name).as_str());\n\n text.push_str(format!(\"File: `{}`\\n\\n\", file_path).as_str());\n\n\n\n /*text.push_str(format!(\"### Ports: \\n\\n\").as_str());\n\n if is_ansi {\n\n print_ansi_ports(&mut text, syntax_tree, module_node);\n\n }\n\n\n", "file_path": "src/mdbook/svclass.rs", "rank": 20, "score": 71845.07373009343 }, { "content": "fn print_package(\n\n top_text: &mut Vec<String>,\n\n output_path: &str,\n\n file_path: &str,\n\n pkg_name: &str,\n\n _syntax_tree: &SyntaxTree,\n\n _pkg_node: &RefNode,\n\n _prev_node: &Option<RefNode>\n\n) -> (String, String, String)\n\n{\n\n let mut text = String::new();\n\n\n\n text.push_str(format!(\"## Package `{}`\\n\\n\", pkg_name).as_str());\n\n text.push_str(format!(\"File: `{}`\\n\\n\", file_path).as_str());\n\n\n\n /*text.push_str(format!(\"### Ports: \\n\\n\").as_str());\n\n if is_ansi {\n\n print_ansi_ports(&mut text, syntax_tree, module_node);\n\n }\n\n\n", "file_path": "src/mdbook/svpkg.rs", "rank": 21, "score": 71845.07373009343 }, { "content": "fn print_module(\n\n top_text: &mut Vec<String>,\n\n output_path: &str,\n\n file_path: &str,\n\n module_name: &str,\n\n is_ansi: bool,\n\n syntax_tree: &SyntaxTree,\n\n module_node: &RefNode,\n\n prev_node: &Option<RefNode>\n\n) -> (String, String, String)\n\n{\n\n let mut text = String::new();\n\n\n\n text.push_str(format!(\"## Module `{}`\\n\\n\", module_name).as_str());\n\n text.push_str(format!(\"File: `{}`\\n\\n\", file_path).as_str());\n\n\n\n text.push_str(format!(\"### Ports: \\n\\n\").as_str());\n\n if is_ansi {\n\n print_ansi_ports(&mut text, syntax_tree, module_node);\n\n }\n", "file_path": "src/mdbook/svmodule.rs", "rank": 22, "score": 71845.07373009343 }, { "content": "fn print_iface(\n\n top_text: &mut Vec<String>,\n\n output_path: &str,\n\n file_path: &str,\n\n iface_name: &str,\n\n _syntax_tree: &SyntaxTree,\n\n _iface_node: &RefNode,\n\n _prev_node: &Option<RefNode>\n\n) -> (String, String, String)\n\n{\n\n let mut text = String::new();\n\n\n\n text.push_str(format!(\"## Interface `{}`\\n\\n\", iface_name).as_str());\n\n text.push_str(format!(\"File: `{}`\\n\\n\", file_path).as_str());\n\n\n\n /*text.push_str(format!(\"### Ports: \\n\\n\").as_str());\n\n if is_ansi {\n\n print_ansi_ports(&mut text, syntax_tree, module_node);\n\n }\n\n\n", "file_path": "src/mdbook/sviface.rs", "rank": 23, "score": 71845.07373009343 }, { "content": "/// Create modules.md file that lists all input files.\n\n///\n\n///\n\nfn create_modules_md(\n\n output_path: &str,\n\n files: &FsNode\n\n) -> Result<Vec<(String,String,String)>,String>\n\n{\n\n\n\n let fname = Path::new(&output_path).join(\"modules.md\");\n\n let fname = fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n fn print_module_info(output_path: &str, path: &str)\n", "file_path": "src/mdbook/generate.rs", "rank": 24, "score": 69936.70167413392 }, { "content": "fn print_ansi_ports(\n\n text: &mut String,\n\n syntax_tree: &SyntaxTree,\n\n module_node: &RefNode\n\n)\n\n{\n\n // FIXME TODO check if it can be done without cloning\n\n for node in module_node.clone().into_iter() {\n\n // The type of each node is RefNode\n\n match node {\n\n RefNode::AnsiPortDeclaration(x) => {\n\n //text.push(format!(\"{:?}\\n\", x));\n\n let id = unwrap_node!(x, PortIdentifier).unwrap();\n\n let id = unwrap_locate!(id).unwrap();\n\n let id = syntax_tree.get_str(id).unwrap();\n\n text.push_str(format!(\"- {}\\n\", id).as_str());\n\n\n\n let dir = unwrap_node!(x, PortDirection);\n\n let dir_str = match dir {\n\n Some(RefNode::PortDirection(PortDirection::Input(_))) => \"➔ input\",\n", "file_path": "src/mdbook/svmodule.rs", "rank": 25, "score": 69929.86457288227 }, { "content": "fn print_module_comments(\n\n text: &mut String,\n\n syntax_tree: &SyntaxTree,\n\n _module_node: &RefNode,\n\n prev_node: &Option<RefNode>,\n\n)\n\n{\n\n for node in prev_node.clone().into_iter() {\n\n match node {\n\n RefNode::Comment(x) => {\n\n let comment = unwrap_node!(x, Comment).unwrap();\n\n //text.push_str(format!(\"\\ncomment:\\n {}\\n\", get_whole_str(syntax_tree, &comment)).as_str());\n\n let comment = unwrap_locate!(comment).unwrap();\n\n let comment = syntax_tree.get_str(comment).unwrap();\n\n text.push_str(format!(\"\\n\\n### Description:\\n\\n\").as_str());\n\n text.push_str(extract_text_from_comment(comment).as_str());\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 26, "score": 69929.86457288227 }, { "content": "fn print_instantiated_modules(\n\n text: &mut String,\n\n syntax_tree: &SyntaxTree,\n\n module_node: &RefNode\n\n)\n\n{\n\n let mut mod_instances: HashMap<String, Vec<String>> = HashMap::new();\n\n\n\n for node in module_node.clone().into_iter() {\n\n match node {\n\n RefNode::ModuleInstantiation(x) => {\n\n let mod_name = unwrap_node!(x, ModuleIdentifier).unwrap();\n\n let mod_name = svpar::get_identifier(mod_name).unwrap();\n\n let mod_name = syntax_tree.get_str(&mod_name).unwrap();\n\n\n\n let inst_name = unwrap_node!(x, InstanceIdentifier).unwrap();\n\n let inst_name = svpar::get_identifier(inst_name).unwrap();\n\n let inst_name = syntax_tree.get_str(&inst_name).unwrap();\n\n\n\n let /*mut*/ m = match mod_instances.get_mut(mod_name) {\n", "file_path": "src/mdbook/svmodule.rs", "rank": 27, "score": 69929.86457288227 }, { "content": "fn create_sv_docs(\n\n mdbook_src_dir: &str,\n\n all_files: &SrcFiles\n\n) -> Result<Vec<String>,String>\n\n{\n\n\n\n let sv_files = mdbook::files::get_sv_files(&all_files.nodes)?;\n\n\n\n let mut text_buf: Vec<String> = Vec::new();\n\n\n\n let file_list = create_files_md(mdbook_src_dir, &sv_files)?;\n\n text_buf.push(\"- [Files](files.md)\\n\".to_string());\n\n text_buf.push(file_list);\n\n\n\n let mut module_list = create_modules_md(mdbook_src_dir, &sv_files)?;\n\n module_list.sort();\n\n text_buf.push(\"- [Modules](modules.md)\\n\".to_string());\n\n for module in &module_list {\n\n text_buf.push(format!(\" - [`{}` :{}]({})\\n\", module.0, module.1, module.2));\n\n }\n", "file_path": "src/mdbook/generate.rs", "rank": 28, "score": 69929.86457288227 }, { "content": "fn create_classes_md(\n\n output_path: &str,\n\n files: &FsNode\n\n) -> Result<Vec<(String,String,String)>,String>\n\n{\n\n\n\n let fname = Path::new(&output_path).join(\"classes.md\");\n\n let fname = fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false).write(true).create(true).truncate(true)\n\n .open(fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n fn print_class_info(output_path: &str, path: &str)\n\n -> (Vec<String>, Vec<(String,String,String)>)\n\n {\n\n mdbook::svclass::generate_sv_class_info(output_path, path)\n", "file_path": "src/mdbook/generate.rs", "rank": 29, "score": 69929.86457288227 }, { "content": "fn create_ifaces_md(\n\n output_path: &str,\n\n files: &FsNode\n\n) -> Result<Vec<(String,String,String)>,String>\n\n{\n\n\n\n let fname = Path::new(&output_path).join(\"ifaces.md\");\n\n let fname = fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false).write(true).create(true).truncate(true)\n\n .open(fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n fn print_iface_info(output_path: &str, path: &str)\n\n -> (Vec<String>, Vec<(String,String,String)>)\n\n {\n\n mdbook::sviface::generate_sv_interface_info(output_path, path)\n", "file_path": "src/mdbook/generate.rs", "rank": 30, "score": 69929.86457288227 }, { "content": "fn create_packages_md(\n\n output_path: &str,\n\n files: &FsNode\n\n) -> Result<Vec<(String,String,String)>,String>\n\n{\n\n\n\n let fname = Path::new(&output_path).join(\"packages.md\");\n\n let fname = fname.to_str().unwrap();\n\n\n\n let file = match fs::OpenOptions::new()\n\n .read(false).write(true).create(true).truncate(true)\n\n .open(fname) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(file) => file,\n\n };\n\n\n\n fn print_pkg_info(output_path: &str, path: &str)\n\n -> (Vec<String>, Vec<(String,String,String)>)\n\n {\n\n mdbook::svpkg::generate_sv_package_info(output_path, path)\n", "file_path": "src/mdbook/generate.rs", "rank": 31, "score": 69929.86457288227 }, { "content": "fn list_users_md_docs(\n\n _mdbook_src_dir: &str,\n\n src_files: &SrcFiles\n\n) -> Result<Vec<(usize,String,String)>,String>\n\n{\n\n let md_files = mdbook::files::get_md_files(&src_files.nodes)?;\n\n\n\n let mut list: Vec<(usize, String, String)> = Vec::new();\n\n\n\n let mut/*env*/ list_md_files = |node: &FsNode, path: &PathBuf, level: usize| {\n\n if path.is_file() {\n\n let mdbook_path = Path::new(\"src\").join(path);\n\n let mdbook_path_str = mdbook_path.to_str().unwrap_or(\"\");\n\n list.push((level, node.name.clone(), mdbook_path_str.to_string()));\n\n }\n\n else {\n\n list.push((level, node.name.clone(), \"\".to_string()));\n\n }\n\n };\n\n\n\n md_files.traverse(&mut PathBuf::from(\"\"), 1, &mut list_md_files);\n\n\n\n Ok(list)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 32, "score": 68151.22717102536 }, { "content": " None => return false,\n\n }\n\n },\n\n _ => (),\n\n }\n\n }\n\n true\n\n }\n\n\n\n /// Traverse (iterate over) depth-first.\n\n ///\n\n pub fn traverse(\n\n &self,\n\n parent_path: &mut PathBuf,\n\n level: usize,\n\n f: &mut impl FnMut(&FsNode, &PathBuf, usize)\n\n ) {\n\n let path = parent_path;\n\n for child in &self.children {\n\n path.push(child.name.clone());\n", "file_path": "src/fsnode.rs", "rank": 33, "score": 59825.63110300759 }, { "content": "/// Tuple struct wrapping FsNode.\n\npub struct FsNodeIter<'a>(&'a FsNode);\n\n\n\n\n\nimpl FsNode {\n\n\n\n pub fn push(&mut self, path: &PathBuf) {\n\n let mut node: &mut FsNode = self;\n\n for component in path.components() {\n\n //println!(\"component {:?}\", component);\n\n match component {\n\n path::Component::Normal(_) => {\n\n //println!(\"normal component {:?}\", component);\n\n let name = component.as_os_str().to_string_lossy();\n\n let pos = node.children.iter().position(|child| child.name.eq(&name));\n\n node = match pos {\n\n Some(pos) => node.children.get_mut(pos).unwrap(),\n\n None => {\n\n let new_node = FsNode {name: name.to_string(), children: Vec::new()};\n\n node.children.push(new_node);\n", "file_path": "src/fsnode.rs", "rank": 34, "score": 59824.115756536594 }, { "content": " node.children.last_mut().unwrap()\n\n },\n\n }\n\n },\n\n _ => (),\n\n }\n\n }\n\n }\n\n\n\n pub fn exists(&self, path: &PathBuf) -> bool {\n\n let mut node: &FsNode = self;\n\n for component in path.components() {\n\n //println!(\"component {:?}\", component);\n\n match component {\n\n path::Component::Normal(_) => {\n\n //println!(\"normal component {:?}\", component);\n\n let name = component.as_os_str().to_string_lossy();\n\n let child = node.children.iter().find(|child| child.name.eq(&name));\n\n node = match child {\n\n Some(existing_node) => existing_node,\n", "file_path": "src/fsnode.rs", "rank": 35, "score": 59822.09915046418 }, { "content": "//! Tree structure to keep FS paths.\n\n//!\n\n//! FsNodeIter implement external iterator for FsNode tree.\n\n//! Implementation ideas:\n\n//!\n\n//! - <https://aloso.github.io/2021/03/09/creating-an-iterator>\n\n//!\n\n\n\nuse std::path;\n\nuse std::path::{/*Path,*/ PathBuf};\n\n\n\n/// File System node as path to file or directory.\n\n///\n\n///#[derive(Clone)]\n\npub struct FsNode {\n\n pub name: String,\n\n pub children: Vec<FsNode>,\n\n}\n\n\n\n\n", "file_path": "src/fsnode.rs", "rank": 36, "score": 59820.94430470011 }, { "content": " f(child, &path, level);\n\n child.traverse(path, level + 1, f);\n\n path.pop();\n\n }\n\n }\n\n\n\n pub fn traverse_top(\n\n &self,\n\n f: &mut impl FnMut(&FsNode, &PathBuf, usize))\n\n {\n\n self.traverse(&mut PathBuf::from(\"\"), 0, f)\n\n }\n\n\n\n pub fn iter(&self) -> FsNodeIter<'_> {\n\n FsNodeIter(self)\n\n }\n\n\n\n}\n\n\n\nimpl Clone for FsNode {\n", "file_path": "src/fsnode.rs", "rank": 37, "score": 59820.3999519439 }, { "content": "\n\n fn clone(&self) -> Self {\n\n FsNode {\n\n name: self.name.clone(),\n\n children: self.children.clone()\n\n }\n\n }\n\n\n\n fn clone_from(&mut self, source: &Self) {\n\n self.name = source.name.clone();\n\n self.children = source.children.clone();\n\n }\n\n}\n\n\n\n\n\nimpl<'a> Iterator for FsNodeIter<'a> {\n\n type Item = &'a FsNode;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n //todo!()\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/fsnode.rs", "rank": 38, "score": 59818.27740323212 }, { "content": "fn extract_text_from_comment(raw_text: &str) -> String\n\n{\n\n let re = Regex::new(r\"^\\s*/(\\*)+\").unwrap();\n\n let text = re.replace_all(raw_text, \"\");\n\n\n\n let re = Regex::new(r\"(\\*/)+\\s*$\").unwrap();\n\n let text = re.replace_all(&text, \"\");\n\n\n\n // Ugly workaround for:\n\n // \"Note that ^ matches after new lines, even at the end of input\"\n\n let re = Regex::new(r\"(?m)^\\s*(\\*)+\\s*$\").unwrap();\n\n let text = re.replace_all(&text, \"<!--empty_line-->\");\n\n\n\n let re = Regex::new(r\"(?m)^\\s*(\\*)+\").unwrap();\n\n let text = re.replace_all(&text, \"\");\n\n\n\n // Fixup for the ugly workaround\n\n let re = Regex::new(r\"(?m)^<!--empty_line-->$\").unwrap();\n\n let text = re.replace_all(&text, \"\");\n\n\n\n text.to_string()\n\n}", "file_path": "src/mdbook/svmodule.rs", "rank": 39, "score": 57240.79058489183 }, { "content": " .help(\"Include path where input files and directories are located\")\n\n .required(false)\n\n .takes_value(true)\n\n .multiple(true)\n\n .number_of_values(1))\n\n .arg(Arg::with_name(\"project-name\")\n\n .long(\"project-name\")\n\n .takes_value(true)\n\n .help(\"Project name string.\"))\n\n .get_matches();\n\n\n\n let output_dir = matches.value_of(\"output\").unwrap_or(\"svdoc\");\n\n\n\n let inputs: Vec<&str> = matches.values_of(\"INPUT\").unwrap().collect();\n\n\n\n let includes: Vec<&str> = match matches.values_of(\"include\") {\n\n Some(values) => values.collect(),\n\n None => Vec::new(),\n\n };\n\n\n\n let project_name = matches.value_of(\"project-name\").unwrap_or(\"\");\n\n\n\n ParsedOptions {\n\n output_dir: String::from(output_dir),\n\n inputs: inputs.iter().map(|&x| String::from(x)).collect(),\n\n includes: includes.iter().map(|&x| String::from(x)).collect(),\n\n project_name: String::from(project_name)\n\n }\n\n}", "file_path": "src/args.rs", "rank": 44, "score": 27714.610776340323 }, { "content": "//! Parse `svdocgen` command line arguments.\n\n//!\n\n//! Most of the work is done by <https://docs.rs/clap/latest/clap/>.\n\n//! After the parsing all information is put in to `struct ParsedOptions`.\n\n//!\n\n\n\nuse clap::{Arg, App/*, SubCommand*/};\n\n\n\n/// All configuration options and input info in one place.\n\n///\n\n/// TODO: think to use <https://lib.rs/crates/structopt>\n\n///\n\npub struct ParsedOptions {\n\n pub output_dir: String,\n\n pub inputs: Vec<String>,\n\n pub includes: Vec<String>,\n\n pub project_name: String\n\n}\n\n\n\n/// Parse command line arguments and return ParsedOptions struct.\n\n///\n\n/// Uses `clap` crate to parse command line arguments.\n\n///\n", "file_path": "src/args.rs", "rank": 45, "score": 27709.63493679312 }, { "content": "//! # Use as library\n\n//!\n\n//! TODO: #[doc = svgbobdoc::transform_mdstr!(\n\n\n\npub mod args;\n\npub mod fsnode;\n\npub mod mdbook;\n\n\n\n// TODO static_assert!\n\nconst _: () = assert!(std::mem::size_of::<u64>() == 8);\n", "file_path": "src/lib.rs", "rank": 46, "score": 27705.546804081918 }, { "content": "//! Generate documentation for SystemVerilog project.\n\n//!\n\n//! ```svgbob\n\n//! .--.---.\n\n//! SV |# \\_ | DOC\n\n//! o-->||__(_)|*-->\n\n//! | \\ \\|\n\n//! '----'-'\n\n//! ```\n\n//!\n\n//! SvDocGen is primarily used as a command line tool,\n\n//! even though it exposes all its functionality as a Rust crate\n\n//! for integration in other projects.\n\n//!\n\n//! # Binary `svdocgen`\n\n//!\n\n//! ```terminal\n\n//! $svdocgen [INPUT]\n\n//! ```\n\n//!\n", "file_path": "src/lib.rs", "rank": 47, "score": 27701.55326444395 }, { "content": " let mut/*env*/ print_files = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() {\n\n if let Some(path_str) = path.to_str() {\n\n //data.push(format!(\"- {}\\n\", path_str));\n\n show_src(&mut data, path_str);\n\n add_to_list(&mut list, path_str);\n\n }\n\n }\n\n };\n\n\n\n files.traverse_top(&mut print_files);\n\n\n\n let mut writer = BufWriter::new(file);\n\n\n\n for d in &data {\n\n match writer.write_all(d.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(list)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 48, "score": 26564.64147476009 }, { "content": " }\n\n };\n\n\n\n files.nodes.traverse_top(&mut create_dirs);\n\n\n\n let mut/*env*/ copy_files = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() { if let Some(fname) = path.file_name() {\n\n let target = if let Some(parent) = path.parent() {\n\n Path::new(&target_dir).join(parent).join(fname)\n\n }\n\n else {\n\n Path::new(&target_dir).join(fname)\n\n };\n\n match fs::copy(&path, &target) {\n\n Err(e) => println!(\"error {:?} copying {:?} {:?}\", e, &path, &target),\n\n Ok(nr_bytes) => println!(\"copied {} bytes from {:?} to {:?}\", nr_bytes, &path, &target),\n\n }\n\n\n\n let mut ext = target.extension().unwrap().to_os_string();\n\n if ext.eq(\"v\") || ext.eq(\"sv\") {\n", "file_path": "src/mdbook/generate.rs", "rank": 49, "score": 26564.22447592361 }, { "content": " let src_files = mdbook::files::collect_sources(options)?;\n\n\n\n copy_src_files(mdbook_src_dir, &src_files)?;\n\n\n\n create_summary_md(mdbook_src_dir, &src_files)?;\n\n\n\n create_book_toml(&options.output_dir, &options.project_name)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 50, "score": 26560.304855713803 }, { "content": "//! Parse SV files and generate mdBook sources.\n\n//!\n\n//!\n\n//!\n\n\n\nuse std::fs;\n\nuse std::path::{Path, PathBuf};\n\nuse std::io::{BufWriter, Write};\n\n\n\nuse crate::args;\n\nuse crate::mdbook;\n\nuse crate::mdbook::files::SrcFiles;\n\nuse crate::fsnode::FsNode;\n\n\n\nconst MDBOOK_SRC_DIR: &str = \"src\";\n\nconst MDBOOK_SUMMARY_MD: &str = \"SUMMARY.md\";\n\nconst MDBOOK_BOOK_TOML: &str = \"book.toml\";\n\n\n\n#[svgbobdoc::transform]\n\n/// Generate mdBook sources.\n\n///\n\n/// ```svgbob\n\n/// .--.---.\n\n/// SV |# \\_ | DOC\n\n/// o-->||__(_)|*-->\n\n/// | \\ \\|\n\n/// '----'-'\n\n/// ```\n", "file_path": "src/mdbook/generate.rs", "rank": 51, "score": 26560.265255471746 }, { "content": " text.push_str(format!(\"\\n\\n### Instantiates modules: \\n\\n\").as_str());\n\n print_instantiated_modules(&mut text, syntax_tree, module_node);*/\n\n\n\n //print_module_comments(&mut text, syntax_tree, module_node, prev_node);\n\n\n\n let mut class_path = Path::new(output_path).join(\"src\").join(file_path);\n\n class_path.set_extension(format!(\"class.{}.md\", class_name));\n\n let mut src_class_path = Path::new(\"src\").join(file_path);\n\n src_class_path.set_extension(format!(\"class.{}.md\", class_name));\n\n let src_class_path = src_class_path.to_str().unwrap();\n\n\n\n std::fs::write(class_path, text).expect(\"failed to write file\");\n\n\n\n top_text.push(format!(\"- [`{} :{}`]({})\\n\", class_name, file_path, src_class_path));\n\n\n\n (class_name.to_string(), file_path.to_string(), src_class_path.to_string())\n\n}", "file_path": "src/mdbook/svclass.rs", "rank": 52, "score": 26560.088468082307 }, { "content": "\n\n text.push_str(format!(\"\\n\\n### Instantiates modules: \\n\\n\").as_str());\n\n print_instantiated_modules(&mut text, syntax_tree, module_node);\n\n\n\n print_module_comments(&mut text, syntax_tree, module_node, prev_node);\n\n\n\n let mut module_path = Path::new(output_path).join(\"src\").join(file_path);\n\n module_path.set_extension(format!(\"module.{}.md\", module_name));\n\n let mut src_module_path = Path::new(\"src\").join(file_path);\n\n src_module_path.set_extension(format!(\"module.{}.md\", module_name));\n\n let src_module_path = src_module_path.to_str().unwrap();\n\n\n\n std::fs::write(module_path, text).expect(\"failed to write file\");\n\n\n\n top_text.push(format!(\"- [`{} :{}`]({})\\n\", module_name, file_path, src_module_path));\n\n\n\n (module_name.to_string(), file_path.to_string(), src_module_path.to_string())\n\n}\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 53, "score": 26560.088468082307 }, { "content": " text.push_str(format!(\"\\n\\n### Instantiates modules: \\n\\n\").as_str());\n\n print_instantiated_modules(&mut text, syntax_tree, module_node);*/\n\n\n\n //print_module_comments(&mut text, syntax_tree, module_node, prev_node);\n\n\n\n let mut pkg_path = Path::new(output_path).join(\"src\").join(file_path);\n\n pkg_path.set_extension(format!(\"pkg.{}.md\", pkg_name));\n\n let mut src_pkg_path = Path::new(\"src\").join(file_path);\n\n src_pkg_path.set_extension(format!(\"pkg.{}.md\", pkg_name));\n\n let src_pkg_path = src_pkg_path.to_str().unwrap();\n\n\n\n std::fs::write(pkg_path, text).expect(\"failed to write file\");\n\n\n\n top_text.push(format!(\"- [`{} :{}`]({})\\n\", pkg_name, file_path, src_pkg_path));\n\n\n\n (pkg_name.to_string(), file_path.to_string(), src_pkg_path.to_string())\n\n}", "file_path": "src/mdbook/svpkg.rs", "rank": 54, "score": 26559.694995715927 }, { "content": " text.push_str(format!(\"\\n\\n### Instantiates modules: \\n\\n\").as_str());\n\n print_instantiated_modules(&mut text, syntax_tree, module_node);*/\n\n\n\n //print_module_comments(&mut text, syntax_tree, module_node, prev_node);\n\n\n\n let mut iface_path = Path::new(output_path).join(\"src\").join(file_path);\n\n iface_path.set_extension(format!(\"iface.{}.md\", iface_name));\n\n let mut src_iface_path = Path::new(\"src\").join(file_path);\n\n src_iface_path.set_extension(format!(\"iface.{}.md\", iface_name));\n\n let src_iface_path = src_iface_path.to_str().unwrap();\n\n\n\n std::fs::write(iface_path, text).expect(\"failed to write file\");\n\n\n\n top_text.push(format!(\"- [`{} :{}`]({})\\n\", iface_name, file_path, src_iface_path));\n\n\n\n (iface_name.to_string(), file_path.to_string(), src_iface_path.to_string())\n\n}", "file_path": "src/mdbook/sviface.rs", "rank": 55, "score": 26559.694995715927 }, { "content": " for users_doc in &users_md_docs {\n\n text_buf.push(format!(\"{:indent$}- [{}]({})\\n\", \" \",\n\n users_doc.1, users_doc.2, indent=users_doc.0*2));\n\n }\n\n\n\n text_buf.push(\"\\n---\\n\\n\".to_string());\n\n\n\n let mut svtext = create_sv_docs(mdbook_src_dir, &src_files)?;\n\n text_buf.append(&mut svtext);\n\n\n\n let mut writer = BufWriter::new(file);\n\n\n\n for text in &text_buf {\n\n match writer.write_all(text.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 56, "score": 26558.082807004983 }, { "content": " data.push(format!(\"- [{}]({})\\n\", path, path_str_md));\n\n\n\n //data.push(format!(\"\\n```verilog\\n\"));\n\n //data.push(format!(\"{{{{#include {}}}}}\\n\", src_path.to_str().unwrap()));\n\n //data.push(format!(\"```\\n\\n\"));\n\n }\n\n\n\n fn add_to_list(list: &mut String, path: &str) {\n\n let src_path = Path::new(\"src\").join(path);\n\n let path_str = src_path.to_str().unwrap();\n\n let mut path_str_md = String::from(path_str);\n\n path_str_md.push_str(\".md\");\n\n list.push_str(format!(\" - [{}]({})\\n\", path, path_str_md).as_str());\n\n }\n\n\n\n let mut list = String::new();\n\n\n\n let mut data: Vec<String> = Vec::new();\n\n data.push(\"# Files\\n\\n\".to_string());\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 57, "score": 26557.60669594069 }, { "content": " }\n\n\n\n let mut list_of_classes: Vec<(String,String,String)> = Vec::new();\n\n\n\n let mut text: Vec<String> = Vec::new();\n\n text.push(\"# Classes\\n\\n\".to_string());\n\n\n\n let mut/*env*/ print_classes = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() {\n\n if let Some(path_str) = path.to_str() {\n\n let (mut new_text_chunk, mut new_class_chunk) = print_class_info(output_path, path_str);\n\n text.append(&mut new_text_chunk);\n\n list_of_classes.append(&mut new_class_chunk);\n\n }\n\n }\n\n };\n\n\n\n files.traverse_top(&mut print_classes);\n\n\n\n let mut writer = BufWriter::new(file);\n", "file_path": "src/mdbook/generate.rs", "rank": 58, "score": 26556.998006703387 }, { "content": " }\n\n\n\n let mut list_of_pkgs: Vec<(String,String,String)> = Vec::new();\n\n\n\n let mut text: Vec<String> = Vec::new();\n\n text.push(\"# Packages\\n\\n\".to_string());\n\n\n\n let mut/*env*/ print_pkgs = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() {\n\n if let Some(path_str) = path.to_str() {\n\n let (mut new_text_chunk, mut new_pkg_chunk) = print_pkg_info(output_path, path_str);\n\n text.append(&mut new_text_chunk);\n\n list_of_pkgs.append(&mut new_pkg_chunk);\n\n }\n\n }\n\n };\n\n\n\n files.traverse_top(&mut print_pkgs);\n\n\n\n let mut writer = BufWriter::new(file);\n", "file_path": "src/mdbook/generate.rs", "rank": 59, "score": 26556.998006703387 }, { "content": " }\n\n\n\n let mut list_of_ifaces: Vec<(String,String,String)> = Vec::new();\n\n\n\n let mut text: Vec<String> = Vec::new();\n\n text.push(\"# Interfaces\\n\\n\".to_string());\n\n\n\n let mut/*env*/ print_ifaces = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() {\n\n if let Some(path_str) = path.to_str() {\n\n let (mut new_text_chunk, mut new_iface_chunk) = print_iface_info(output_path, path_str);\n\n text.append(&mut new_text_chunk);\n\n list_of_ifaces.append(&mut new_iface_chunk);\n\n }\n\n }\n\n };\n\n\n\n files.traverse_top(&mut print_ifaces);\n\n\n\n let mut writer = BufWriter::new(file);\n", "file_path": "src/mdbook/generate.rs", "rank": 60, "score": 26556.998006703387 }, { "content": " -> (Vec<String>, Vec<(String,String,String)>)\n\n {\n\n mdbook::svmodule::generate_sv_module_info(output_path, path)\n\n }\n\n\n\n let mut list_of_modules: Vec<(String,String,String)> = Vec::new();\n\n\n\n let mut text: Vec<String> = Vec::new();\n\n text.push(\"# Modules\\n\\n\".to_string());\n\n\n\n let mut/*env*/ print_modules = |_node: &FsNode, path: &PathBuf, _level: usize| {\n\n if path.is_file() {\n\n if let Some(path_str) = path.to_str() {\n\n let (mut new_text_chunk, mut new_mod_chunk) = print_module_info(output_path, path_str);\n\n text.append(&mut new_text_chunk);\n\n list_of_modules.append(&mut new_mod_chunk);\n\n }\n\n }\n\n };\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 61, "score": 26556.8413888044 }, { "content": "//! Build mdBook.\n\n//!\n\n//!\n\n//!\n\n\n\nuse crate::args;\n\nuse std::fs;\n\nuse std::path::Path;\n\nuse mdbook::MDBook;\n\n\n\n// See <https://github.com/rust-lang/mdBook#usage>\n\nconst MDBOOK_BOOK_DIR: &str = \"book\";\n\n//const MDBOOK_SRC_DIR: &str = \"src\";\n\n\n\n#[svgbobdoc::transform]\n\n/// Build mdBook from mdBook sources.\n\n///\n\n/// ```svgbob\n\n/// .--.---.\n\n/// SV |# \\_ | DOC\n\n/// o-->||__(_)|*-->\n\n/// | \\ \\|\n\n/// '----'-'\n\n/// ```\n", "file_path": "src/mdbook/build.rs", "rank": 62, "score": 26556.31940335553 }, { "content": "\n\n let mut pkg_list = create_packages_md(mdbook_src_dir, &sv_files)?;\n\n pkg_list.sort();\n\n text_buf.push(\"- [Packages](packages.md)\\n\".to_string());\n\n for pkg in &pkg_list {\n\n text_buf.push(format!(\" - [`{}` :{}]({})\\n\", pkg.0, pkg.1, pkg.2));\n\n }\n\n\n\n let mut iface_list = create_ifaces_md(mdbook_src_dir, &sv_files)?;\n\n iface_list.sort();\n\n text_buf.push(\"- [Interfaces](ifaces.md)\\n\".to_string());\n\n for iface in &iface_list {\n\n text_buf.push(format!(\" - [`{}` :{}]({})\\n\", iface.0, iface.1, iface.2));\n\n }\n\n\n\n let mut class_list = create_classes_md(mdbook_src_dir, &sv_files)?;\n\n class_list.sort();\n\n text_buf.push(\"- [Classes](classes.md)\\n\".to_string());\n\n for class in &class_list {\n\n text_buf.push(format!(\" - [`{}` :{}]({})\\n\", class.0, class.1, class.2));\n\n }\n\n\n\n text_buf.push(\"- [Functions]()\\n\".to_string());\n\n\n\n Ok(text_buf)\n\n}\n", "file_path": "src/mdbook/generate.rs", "rank": 63, "score": 26556.07483035407 }, { "content": " ext.push(\".md\");\n\n let mut sv_md = target.clone();\n\n sv_md.set_extension(ext);\n\n println!(\"generate {:?}\", sv_md);\n\n let fname_str = fname.to_str().unwrap();\n\n let txt = format!(\n\n \"## {}\\n\\n```verilog\\n{{{{#include {}}}}}\\n```\\n\",\n\n fname_str, fname_str);\n\n fs::write(&sv_md, txt).expect(\"failed to create file\");\n\n }\n\n }}\n\n };\n\n\n\n files.nodes.traverse_top(&mut copy_files);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 64, "score": 26555.0086967316 }, { "content": " // The type of each node is RefNode\n\n match node {\n\n RefNode::PackageDeclaration(x) => {\n\n let id = unwrap_node!(x, PackageIdentifier).unwrap();\n\n let id = svpar::get_identifier(id).unwrap();\n\n let id = syntax_tree.get_str(&id).unwrap();\n\n let item = print_package(&mut text, output_path, file_path, id,\n\n &syntax_tree, &node, &prev_node);\n\n list.push(item)\n\n }\n\n RefNode::Description(_) => {\n\n }\n\n RefNode::WhiteSpace(_) => {\n\n }\n\n RefNode::Locate(_) => {\n\n }\n\n x => { prev_node = Some(x); () },\n\n }\n\n }\n\n } else {\n\n println!(\"parsing of '{}' failed\\n\", file_path);\n\n }\n\n\n\n (text, list)\n\n}\n\n\n", "file_path": "src/mdbook/svpkg.rs", "rank": 65, "score": 26554.685183063924 }, { "content": " // The type of each node is RefNode\n\n match node {\n\n RefNode::ClassDeclaration(x) => {\n\n let id = unwrap_node!(x, ClassIdentifier).unwrap();\n\n let id = svpar::get_identifier(id).unwrap();\n\n let id = syntax_tree.get_str(&id).unwrap();\n\n let item = print_class(&mut text, output_path, file_path, id,\n\n &syntax_tree, &node, &prev_node);\n\n list.push(item)\n\n }\n\n RefNode::Description(_) => {\n\n }\n\n RefNode::WhiteSpace(_) => {\n\n }\n\n RefNode::Locate(_) => {\n\n }\n\n x => { prev_node = Some(x); () },\n\n }\n\n }\n\n } else {\n\n println!(\"parsing of '{}' failed\\n\", file_path);\n\n }\n\n\n\n (text, list)\n\n}\n\n\n", "file_path": "src/mdbook/svclass.rs", "rank": 66, "score": 26554.685183063924 }, { "content": " // The type of each node is RefNode\n\n match node {\n\n RefNode::InterfaceDeclaration(x) => {\n\n let id = unwrap_node!(x, InterfaceIdentifier).unwrap();\n\n let id = svpar::get_identifier(id).unwrap();\n\n let id = syntax_tree.get_str(&id).unwrap();\n\n let item = print_iface(&mut text, output_path, file_path, id,\n\n &syntax_tree, &node, &prev_node);\n\n list.push(item)\n\n }\n\n RefNode::Description(_) => {\n\n }\n\n RefNode::WhiteSpace(_) => {\n\n }\n\n RefNode::Locate(_) => {\n\n }\n\n x => { prev_node = Some(x); () },\n\n }\n\n }\n\n } else {\n\n println!(\"parsing of '{}' failed\\n\", file_path);\n\n }\n\n\n\n (text, list)\n\n}\n\n\n", "file_path": "src/mdbook/sviface.rs", "rank": 67, "score": 26554.685183063924 }, { "content": "[output.html]\n\nadditional-js = [\"loadwavedrom.js\"]\n\n\n\n# cargo install mdbook-linkcheck\n\n# [output.linkcheck] # enable the \"mdbook-linkcheck\" renderer\n\n\n\n\"#, project_name);\n\n\n\n let mut writer = BufWriter::new(file);\n\n\n\n match writer.write_all(data.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 68, "score": 26554.413827050732 }, { "content": "//! Parse SV file and generate info about Verilog package(s).\n\n//!\n\n//!\n\n//!\n\n\n\nuse sv_parser::{parse_sv, unwrap_node, /*unwrap_locate, Locate,*/ RefNode, SyntaxTree};\n\n//use sv_parser::{PortDirection, NetType, IntegerVectorType};\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\n//use regex::Regex;\n\nuse crate::mdbook::svpar;\n\n\n", "file_path": "src/mdbook/svpkg.rs", "rank": 69, "score": 26554.257661630403 }, { "content": "//! Parse SV file and generate info about Verilog interface(s).\n\n//!\n\n//!\n\n//!\n\n\n\nuse sv_parser::{parse_sv, unwrap_node, /*unwrap_locate, Locate,*/ RefNode, SyntaxTree};\n\n//use sv_parser::{PortDirection, NetType, IntegerVectorType};\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\n//use regex::Regex;\n\nuse crate::mdbook::svpar;\n\n\n", "file_path": "src/mdbook/sviface.rs", "rank": 70, "score": 26554.257661630403 }, { "content": "//! Parse SV file and generate info about Verilog module(s).\n\n//!\n\n//!\n\n//!\n\n\n\nuse sv_parser::{parse_sv, unwrap_node, unwrap_locate, /*Locate,*/ RefNode, SyntaxTree};\n\nuse sv_parser::{PortDirection, NetType, IntegerVectorType};\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\nuse regex::Regex;\n\nuse crate::mdbook::svpar;\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 71, "score": 26554.257661630403 }, { "content": "//! Parse SV file and generate info about Verilog class(s).\n\n//!\n\n//!\n\n//!\n\n\n\nuse sv_parser::{parse_sv, unwrap_node, /*unwrap_locate, Locate,*/ RefNode, SyntaxTree};\n\n//use sv_parser::{PortDirection, NetType, IntegerVectorType};\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\n//use regex::Regex;\n\nuse crate::mdbook::svpar;\n\n\n", "file_path": "src/mdbook/svclass.rs", "rank": 72, "score": 26554.257661630403 }, { "content": " // The type of each node is RefNode\n\n match node {\n\n RefNode::ModuleDeclarationNonansi(x) => {\n\n // unwrap_node! gets the nearest ModuleIdentifier from x\n\n let id = unwrap_node!(x, ModuleIdentifier).unwrap();\n\n\n\n let id = svpar::get_identifier(id).unwrap();\n\n\n\n // Original string can be got by SyntaxTree::get_str(self, locate: &Locate)\n\n let id = syntax_tree.get_str(&id).unwrap();\n\n let item = print_module(&mut text, output_path, file_path, id, false,\n\n &syntax_tree, &node, &prev_node);\n\n list.push(item)\n\n }\n\n RefNode::ModuleDeclarationAnsi(x) => {\n\n let id = unwrap_node!(x, ModuleIdentifier).unwrap();\n\n let id = svpar::get_identifier(id).unwrap();\n\n let id = syntax_tree.get_str(&id).unwrap();\n\n let item = print_module(&mut text, output_path, file_path, id, true,\n\n &syntax_tree, &node, &prev_node);\n", "file_path": "src/mdbook/svmodule.rs", "rank": 73, "score": 26554.236794530814 }, { "content": " files.traverse_top(&mut print_modules);\n\n\n\n let mut writer = BufWriter::new(file);\n\n\n\n for t in &text {\n\n match writer.write_all(t.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(list_of_modules)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 74, "score": 26553.945199651815 }, { "content": " Some(m) => m,\n\n None => {mod_instances.insert(mod_name.to_string(), Vec::<String>::new());\n\n mod_instances.get_mut(mod_name).unwrap()},\n\n };\n\n\n\n m.push(inst_name.to_string());\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n for (mname, inames) in mod_instances.iter() {\n\n text.push_str(format!(\"- {}\\n\", mname).as_str());\n\n for iname in inames.iter() {\n\n text.push_str(format!(\" - {}\\n\", iname).as_str());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 75, "score": 26552.27807032113 }, { "content": " Some(RefNode::PortDirection(PortDirection::Output(_))) => \"output ➔\",\n\n Some(RefNode::PortDirection(PortDirection::Inout(_))) => \"inout\",\n\n _ => \"?\",\n\n };\n\n text.push_str(format!(\" * direction: {}\\n\", dir_str).as_str());\n\n\n\n let net_type = unwrap_node!(x, NetType);\n\n match net_type {\n\n Some(RefNode::NetType(NetType::Wire(_))) =>\n\n text.push_str(\" * type: wire\\n\"),\n\n _ => (),\n\n }\n\n\n\n let vnet_type = unwrap_node!(x, IntegerVectorType);\n\n match vnet_type {\n\n Some(RefNode::IntegerVectorType(IntegerVectorType::Reg(_))) =>\n\n text.push_str(\" * type: reg\\n\"),\n\n Some(RefNode::IntegerVectorType(IntegerVectorType::Logic(_))) =>\n\n text.push_str(\" * type: logic\\n\"),\n\n Some(RefNode::IntegerVectorType(IntegerVectorType::Bit(_))) =>\n", "file_path": "src/mdbook/svmodule.rs", "rank": 76, "score": 26551.852972980112 }, { "content": "\n\n for t in &text {\n\n match writer.write_all(t.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(list_of_pkgs)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 77, "score": 26551.74344295156 }, { "content": "\n\n for t in &text {\n\n match writer.write_all(t.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(list_of_classes)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 78, "score": 26551.74344295156 }, { "content": "\n\n for t in &text {\n\n match writer.write_all(t.as_bytes()) {\n\n Err(e) => return Err(e.to_string()),\n\n Ok(_) => (),\n\n }\n\n }\n\n\n\n Ok(list_of_ifaces)\n\n}\n\n\n", "file_path": "src/mdbook/generate.rs", "rank": 79, "score": 26551.74344295156 }, { "content": " list.push(item)\n\n }\n\n RefNode::ModuleDeclaration(_) => {\n\n }\n\n RefNode::Description(_) => {\n\n }\n\n RefNode::WhiteSpace(_) => {\n\n }\n\n RefNode::Locate(_) => {\n\n }\n\n x => { prev_node = Some(x); () },\n\n }\n\n }\n\n } else {\n\n println!(\"parsing of '{}' failed\\n\", file_path);\n\n }\n\n\n\n (text, list)\n\n}\n\n\n\n\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 80, "score": 26551.224991115374 }, { "content": "//! Helpers to parse SV.\n\n//!\n\n//!\n\n//!\n\n\n\nuse sv_parser::{unwrap_node, unwrap_locate, Locate, RefNode, SyntaxTree};\n\n\n", "file_path": "src/mdbook/svpar.rs", "rank": 81, "score": 26551.222533547007 }, { "content": "//! Parse SV files and generate documentation in mdBook format.\n\n//!\n\n//! Most of the work is done by <https://rust-lang.github.io/mdBook/index.html>.\n\n\n\npub mod files; // collect SV files\n\npub mod generate; // generate mdBook source files\n\npub mod svpar; // parsing helpers\n\npub mod svmodule; // generate md file with SV module info\n\npub mod sviface; // generate md file with SV interface info\n\npub mod svclass;\n\npub mod svpkg; // generate md file with SV package info\n\npub mod build; // build mdBook\n", "file_path": "src/mdbook/mod.rs", "rank": 82, "score": 26549.0083330768 }, { "content": " text.push_str(\" * type: bit\\n\"),\n\n _ => (),\n\n }\n\n\n\n let width = unwrap_node!(x, PackedDimensionRange);\n\n if let Some(width) = width {\n\n //text.push(format!(\"{:?}\\n\", &width));\n\n text.push_str(format!(\" * width: {}\\n\", svpar::get_whole_str(syntax_tree, &width)).as_str());\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "src/mdbook/svmodule.rs", "rank": 83, "score": 26547.43723388028 }, { "content": " ignoreMatch() {\n\n this.isMatchIgnored = true;\n", "file_path": "assets/js/highlight.js", "rank": 84, "score": 23989.624487334146 }, { "content": " openNode(node) {\n\n if (!emitsWrappingTags(node)) return;\n\n\n\n let scope = node.kind;\n\n if (node.sublanguage) {\n\n scope = `language-${scope}`;\n\n } else {\n\n scope = expandScopeName(scope, { prefix: this.classPrefix });\n\n }\n\n this.span(scope);\n", "file_path": "assets/js/highlight.js", "rank": 85, "score": 22973.75156434817 }, { "content": " closeNode(node) {\n\n if (!emitsWrappingTags(node)) return;\n\n\n\n this.buffer += SPAN_CLOSE;\n", "file_path": "assets/js/highlight.js", "rank": 86, "score": 22973.75156434817 }, { "content": " openNode(kind) {\n\n /** @type Node */\n\n const node = { kind, children: [] };\n\n this.add(node);\n\n this.stack.push(node);\n", "file_path": "assets/js/highlight.js", "rank": 87, "score": 22970.030454489308 }, { "content": " closeAllNodes() {\n\n while (this.closeNode());\n", "file_path": "assets/js/highlight.js", "rank": 88, "score": 22970.030454489308 }, { "content": " closeNode() {\n\n if (this.stack.length > 1) {\n\n return this.stack.pop();\n\n }\n\n // eslint-disable-next-line no-undefined\n\n return undefined;\n", "file_path": "assets/js/highlight.js", "rank": 89, "score": 22970.030454489308 }, { "content": " addSublanguage(emitter, name) {\n\n /** @type DataNode */\n\n const node = emitter.root;\n\n node.kind = name;\n\n node.sublanguage = true;\n\n this.add(node);\n", "file_path": "assets/js/highlight.js", "rank": 90, "score": 5.799615751594341 }, { "content": "## JDP and CDP access register operations\n\n\n\nThe TAP state machine by design does not allow arbitrary transitions.\n\nBasically, DR states change in the following order:\n\n\n\n1. Select-DR-Scan: start new cycle.\n\n2. Capture-DR\n\n3. Shift-DR\n\n4. Update-DR\n\n5. Exit the cycle and go back to Select-DR-Scan.\n\n\n\nWe map the access operations to the Capture-Shift-Update sequence:\n\n\n\n- Capture-DR: move last read ACK and data into DR.\n\n- Shift-DR: shift read data out, shift write data in.\n\n- Update-DR: trigger the update using shifted-in data.\n\n\n\nIn the **Capture-DR** state, the result of the previous transaction,\n\nif any, is returned, together with a 4-bit ACK response.\n\n\n\n<script type=\"WaveDrom\">\n\n{ reg: [\n\n {bits: 4, name: 'ACK', attr: ''},\n\n {bits: 32, name: 'DATA', attr: 'Read Result'}\n\n], \n\n config:{bits: 36}\n\n}\n\n</script>\n\n\n\nIn the **Shift-DR** state, ACK[3:0] is shifted out first, then 32 bits\n\nof \"Read Result\" data is shifted out.\n\n\n\nAs the returned data is shifted out to `TDO`,\n\nnew data is shifted in from `TDI`.\n\n\n\n<script type=\"WaveDrom\">\n\n{ reg: [\n\n {bits: 4, name: 'DATA', attr: 'Data[3:0]'},\n\n {bits: 32, name: 'DATA', attr: 'Data[35:4]'}\n\n], \n\n config:{bits: 36}\n\n}\n\n</script>\n\n\n\nIf the response indicated by ACK[3:0] is OK/FAULT (not WAIT),\n\nthe previous transaction has completed.\n\nAn OK/FAULT response is followed by an **Update-DR** operation\n\nto fulfill the read or write request that is formed\n\nby the values that were shifted into the scan chain.\n\nFor write requests, WR is having a value of 0b1,\n\nthe value in DATAIN[31:0] is written to the\n\nregister selected by ADDR[2:0].\n\nFor read requests, WR is having a value of 0b0,\n\nthe value in DATAIN[31:0] is ignored,\n\nto be read register is selected by ADDR[2:0].\n\n\n\n<script type=\"WaveDrom\">\n\n{ reg: [\n\n {bits: 1, name: 'WR', attr: ''},\n\n {bits: 3, name: 'ADDR', attr: 'select'},\n\n {bits: 32, name: 'DATA', attr: 'Data[35:4]'}\n\n], \n\n config:{bits: 36}\n\n}\n\n</script>\n\n\n", "file_path": "samples/JtagCoreDbg/JtagCoreDbg.md", "rank": 91, "score": 4.3608465860471455 }, { "content": "### TAP State Machine\n\n\n\n![TAP State Machine](\n\nhttps://upload.wikimedia.org/wikipedia/commons/thumb/1/1a/JTAG_TAP_Controller_State_Diagram.svg/563px-JTAG_TAP_Controller_State_Diagram.svg.png)\n\n\n\nThe Instruction Register (IR) path is used for loading instructions.\n\nThe Data Register (DR) path is used for reading/writing data from/to data registers.\n\n\n\n### JTAG TAP Instruction Register (IR)\n\n\n\nMost JTAG _instructions_ can broadly be described as connecting different data registers to the TDI/TDO path.\n\nThe `BYPASS` instruction connects TDI directly to TDO through a 1-bit shift register,\n\nthe `IDCODE` instruction connects the identification code register to TDO.\n\nBoth `BYPASS` and `IDCODE` are required by the JTAG standard.\n\n\n\nJTAG IR is using 8-bit encoding.\n\nOnly 3 registers: BYPASS, IDCODE and CDPACC are currently implemented.\n\nRest is reserved for future use.\n\n\n\n```Verilog\n\n localparam INSN_EXTEST = 8'b0000_0000; // reserved\n\n localparam INSN_SAMPLE_PRELOAD = 8'b0000_0001; // reserved\n\n localparam INSN_IDCODE = 8'b0000_0010; // required by the standard\n\n localparam INSN_DEBUG = 8'b0000_1000; // reserved\n\n localparam INSN_MBIST = 8'b0000_1001; // reserved\n\n localparam INSN_JDPACC = 8'b0000_0100; // reserved\n\n localparam INSN_CDPACC = 8'b0000_0101; // implemented\n\n localparam INSN_BYPASS = 8'b1111_1111; // all 1's required by the standard\n\n```\n\n\n", "file_path": "samples/JtagCoreDbg/JtagCoreDbg.md", "rank": 92, "score": 4.115969220942345 }, { "content": "/**@file\n\n * @brief Remote JTAG Bitbang TCP server for OpenOCD.\n\n * @author Igor Lesik 2021\n\n *\n\n */\n\n#include \"OocdRemoteBitbang.h\"\n\n\n\n#include <cstdio>\n\n#include <cstdlib>\n\n#include <cstring>\n\n#include <cassert>\n\n\n\n#include <errno.h>\n\n#include <fcntl.h>\n\n#include <unistd.h>\n\n#include <arpa/inet.h>\n\n\n\nstatic OocdRemoteBitbang* bitbang_server_instance_ = nullptr;\n\n\n\nextern \"C\"\n", "file_path": "samples/JtagCoreDbg/cpp/OocdRemoteBitbang.cpp", "rank": 93, "score": 4.0676986292786665 }, { "content": " static _collapse(node) {\n\n if (typeof node === \"string\") return;\n\n if (!node.children) return;\n\n\n\n if (node.children.every(el => typeof el === \"string\")) {\n\n // node.text = node.children.join(\"\");\n\n // delete node.children;\n\n node.children = [node.children.join(\"\")];\n\n } else {\n\n node.children.forEach((child) => {\n\n TokenTree._collapse(child);\n\n });\n\n }\n", "file_path": "assets/js/highlight.js", "rank": 94, "score": 3.691405274263854 }, { "content": " add(node) {\n\n this.top.children.push(node);\n", "file_path": "assets/js/highlight.js", "rank": 95, "score": 3.691405274263854 }, { "content": " static _walk(builder, node) {\n\n if (typeof node === \"string\") {\n\n builder.addText(node);\n\n } else if (node.children) {\n\n builder.openNode(node);\n\n node.children.forEach((child) => this._walk(builder, child));\n\n builder.closeNode(node);\n\n }\n\n return builder;\n", "file_path": "assets/js/highlight.js", "rank": 96, "score": 3.569967274261706 }, { "content": " span(className) {\n\n this.buffer += `<span class=\"${className}\">`;\n", "file_path": "assets/js/highlight.js", "rank": 97, "score": 3.0585170188161204 }, { "content": "### DRUNCTRL - Debug Run Control Register\n\n\n\nThe DRUNCTRL is Write-Only registers which facilitates\n\nrequesting the processor to enter or leave debug state\n\nand single step an instruction.\n\n\n\n<script type=\"WaveDrom\">\n\n{ reg: [\n\n {bits: 1, name: 'H', attr: ''},\n\n {bits: 1, name: 'R', attr: ''},\n\n {bits: 1, name: 'ST', attr: ''}\n\n], \n\n config:{bits: 32}\n\n}\n\n</script>\n\n\n\nTable: DRUNCTRL bits.\n\n\n\n| Bits | Field | Function\n\n| ------- | ----------- | ---------------------------------------------\n\n| [0] | H | Halt request. Writing a 1 to this bit triggers a halting debug event, that is, a request that the processor enters debug state.\n\n| [1] | R | Restart request. Writing a 1 to this bit requests that the processor leaves debug state. This request is held until the processor exits debug state. The debugger must poll DBGSC[1] to determine when this request succeeds.\n\n| [2] | STEP | Execute one instruction. Core makes Fetch Group with one instruction (or u-ops) and NOPs from fetched FG. Core gets restarted and then halted again upon the FG retire event.\n\n\n\n\n\n\n\n### ITR - Instruction Transfer Register\n\n\n\nThe ITR registers enable the external debugger with the functionality\n\nto feed instructions into the core\n\nfor execution while in debug state.\n\nThe ITRn is a write-only register.\n\n\n\n\n\n<script type=\"WaveDrom\">\n\n{ reg: [\n\n {bits: 32, name: 'Instruction', attr: ['Instruction opcode']}\n\n], \n\n config:{bits: 32}\n\n}\n\n</script>\n\n\n\nWriting to ITR3 triggers ITR execution when Core is in Debug Mode\n\nDBGSC[0]==1 and ITR execution is enabled DBGSC[3]==1.\n\n\n\nWith 4 instructions external debugger may use XIMM prefix\n\nand generally form a complete Fetch Group to be executed.\n\nUnused instructions should be NOPs.\n\n\n\n\n", "file_path": "samples/JtagCoreDbg/JtagCoreDbg.md", "rank": 98, "score": 2.864665722753226 }, { "content": "/**@file\n\n * @brief Remote JTAG Bitbang TCP server for OpenOCD.\n\n * @author Igor Lesik 2021\n\n *\n\n */\n\n#pragma once\n\n\n\n#include <cstdint>\n\n\n", "file_path": "samples/JtagCoreDbg/cpp/OocdRemoteBitbang.h", "rank": 99, "score": 2.7730627476041327 } ]
Rust
src/macos/mod.rs
ignatenkobrain/rust-locale
9acaf6987a97f8ffcda0e3ce6e8770e7a01ece09
use std::borrow::ToOwned; use std::env::var; use std::fs::{metadata, File}; use std::io::{BufRead, Error, Result, BufReader}; use std::path::{Path, PathBuf}; use super::{LocaleFactory, Numeric, Time}; static LOCALE_DIR: &'static str = "/usr/share/locale"; #[derive(Debug, Clone)] enum LocaleType { Numeric, Time, } fn find_user_locale_path(file_name: &str) -> Option<PathBuf> { let locale_dir = Path::new(LOCALE_DIR); if let Ok(specific_path) = var(file_name) { let path = locale_dir.join(Path::new(&specific_path)).join(Path::new(file_name)); if path.exists() { return Some(path); } } if let Ok(all_path) = var("LC_ALL") { let path = locale_dir.join(Path::new(&all_path)).join(Path::new(file_name)); if path.exists() { return Some(path); } } if let Ok(lang) = var("LANG") { let path = locale_dir.join(Path::new(&lang)).join(Path::new(file_name)); if path.exists() { return Some(path); } } None } fn find_locale_path(locale_type: LocaleType, locale_name: &str) -> Option<PathBuf> { let file_name = match locale_type { LocaleType::Numeric => "LC_NUMERIC", LocaleType::Time => "LC_TIME", }; if locale_name == "" { return find_user_locale_path(&file_name); } else { let locale_dir = Path::new(LOCALE_DIR); let path = locale_dir.join(locale_name).join(file_name); if path.exists() { return Some(path); } } None } fn load_numeric(locale: &str) -> Result<Numeric> { let path = find_locale_path(LocaleType::Numeric, locale); if let Some(path) = path { let file = BufReader::new(try!(File::open(&path))); let lines: Vec<String> = file.lines().map(|x| x.unwrap()).collect(); Ok(Numeric { decimal_sep: lines[0].trim().to_string(), thousands_sep: lines[1].trim().to_string(), }) } else { return Err(Error::last_os_error()); } } fn load_time(locale: &str) -> Result<Time> { let path = find_locale_path(LocaleType::Time, locale); if let Some(path) = path { let file = BufReader::new(try!(File::open(&path))); let mut iter = file.lines().map(|x| x.unwrap().trim().to_string()); let month_names = iter.by_ref().take(12).collect(); let long_month_names = iter.by_ref().take(12).collect(); let day_names = iter.by_ref().take(7).collect(); let long_day_names = iter.by_ref().take(7).collect(); Ok(Time { month_names: month_names, long_month_names: long_month_names, day_names: day_names, long_day_names: long_day_names, }) } else { return Err(Error::last_os_error()); } } pub struct MacOSLocaleFactory { locale: String, } impl MacOSLocaleFactory { pub fn new(locale: &str) -> Result<Self> { Ok(MacOSLocaleFactory { locale: locale.to_owned() }) } } impl LocaleFactory for MacOSLocaleFactory { fn get_numeric(&mut self) -> Option<Box<Numeric>> { if let Ok(numeric) = load_numeric(&self.locale) { Some(Box::new(numeric)) } else { None } } fn get_time(&mut self) -> Option<Box<Time>> { if let Ok(time) = load_time(&self.locale) { Some(Box::new(time)) } else { None } } } pub trait PathExt { fn exists(&self) -> bool; } impl PathExt for Path { fn exists(&self) -> bool { metadata(self).is_ok() } }
use std::borrow::ToOwned; use std::env::var; use std::fs::{metadata, File}; use std::io::{BufRead, Error, Result, BufReader}; use std::path::{Path, PathBuf}; use super::{LocaleFactory, Numeric, Time}; static LOCALE_DIR: &'static str = "/usr/share/locale"; #[derive(Debug, Clone)] enum LocaleType { Numeric, Time, } fn find_user_locale_path(file_name: &str) -> Option<PathBuf> { let locale_dir = Path::new(LOCALE_DIR); if let Ok(specific_path) = var(file_name) { let path = locale_dir.join(Path::new(&specific_path)).join(Path::new(file_name)); if path.exists() { return Some(path); } } if let Ok(all_path) = var("LC_ALL") { let path = locale_dir.join(Path::new(&all_path)).join(Path::new(file_name)); if path.exists() { return Some(path); } }
None } fn find_locale_path(locale_type: LocaleType, locale_name: &str) -> Option<PathBuf> { let file_name = match locale_type { LocaleType::Numeric => "LC_NUMERIC", LocaleType::Time => "LC_TIME", }; if locale_name == "" { return find_user_locale_path(&file_name); } else { let locale_dir = Path::new(LOCALE_DIR); let path = locale_dir.join(locale_name).join(file_name); if path.exists() { return Some(path); } } None } fn load_numeric(locale: &str) -> Result<Numeric> { let path = find_locale_path(LocaleType::Numeric, locale); if let Some(path) = path { let file = BufReader::new(try!(File::open(&path))); let lines: Vec<String> = file.lines().map(|x| x.unwrap()).collect(); Ok(Numeric { decimal_sep: lines[0].trim().to_string(), thousands_sep: lines[1].trim().to_string(), }) } else { return Err(Error::last_os_error()); } } fn load_time(locale: &str) -> Result<Time> { let path = find_locale_path(LocaleType::Time, locale); if let Some(path) = path { let file = BufReader::new(try!(File::open(&path))); let mut iter = file.lines().map(|x| x.unwrap().trim().to_string()); let month_names = iter.by_ref().take(12).collect(); let long_month_names = iter.by_ref().take(12).collect(); let day_names = iter.by_ref().take(7).collect(); let long_day_names = iter.by_ref().take(7).collect(); Ok(Time { month_names: month_names, long_month_names: long_month_names, day_names: day_names, long_day_names: long_day_names, }) } else { return Err(Error::last_os_error()); } } pub struct MacOSLocaleFactory { locale: String, } impl MacOSLocaleFactory { pub fn new(locale: &str) -> Result<Self> { Ok(MacOSLocaleFactory { locale: locale.to_owned() }) } } impl LocaleFactory for MacOSLocaleFactory { fn get_numeric(&mut self) -> Option<Box<Numeric>> { if let Ok(numeric) = load_numeric(&self.locale) { Some(Box::new(numeric)) } else { None } } fn get_time(&mut self) -> Option<Box<Time>> { if let Ok(time) = load_time(&self.locale) { Some(Box::new(time)) } else { None } } } pub trait PathExt { fn exists(&self) -> bool; } impl PathExt for Path { fn exists(&self) -> bool { metadata(self).is_ok() } }
if let Ok(lang) = var("LANG") { let path = locale_dir.join(Path::new(&lang)).join(Path::new(file_name)); if path.exists() { return Some(path); } }
if_condition
[ { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn main() {\n\n println!(\"Listing locale info not (yet) supported on this system\");\n\n}\n", "file_path": "examples/localeinfo.rs", "rank": 5, "score": 33880.010529542735 }, { "content": "/// Return LocaleFactory appropriate for default user locale, as far as it can be determined.\n\n///\n\n/// The returned locale factory provides locale facets implemented using standard localization\n\n/// functionality of the underlying operating system and configured for user's default locale.\n\n//\n\n// FIXME: The global instance should simply default-initialize to default user locale with proper\n\n// fallback if it fails to construct and then we don't need this.\n\npub fn user_locale_factory() -> SystemLocaleFactory {\n\n // FIXME: Error handling? Constructing locale with \"\" should never fail as far as I can tell.\n\n SystemLocaleFactory::new(\"\").unwrap()\n\n}\n\n\n\n// ---- locale facets ----\n\n\n\n\n\n// ---- numeric stuff ----\n\n\n\n/// Information on how to format numbers.\n\n#[derive(Debug, Clone)]\n\npub struct Numeric {\n\n /// The punctuation that separates the decimal part of a non-integer number. Usually a decimal\n\n /// point or a decimal comma.\n\n pub decimal_sep: String,\n\n\n\n /// The punctuation that separates groups of digits in long numbers.\n\n pub thousands_sep: String,\n\n}\n", "file_path": "src/lib.rs", "rank": 6, "score": 26732.294477642296 }, { "content": "//! Locale implementation using GNU libc\n\n\n\nuse ::std::borrow::Cow;\n\nuse ::std::ffi::{CStr,CString};\n\nuse ::std::io::{Error,Result};\n\nuse ::std::sync::Arc;\n\nuse super::{LocaleFactory,Numeric,Time};\n\n\n\npub mod ffi;\n\npub mod langinfo;\n\n\n\n/// Wrapper for libc's locale_t.\n\n#[derive(Debug)]\n\npub struct CLocale {\n\n c_locale: ffi::locale_t,\n\n}\n\n\n\nimpl CLocale {\n\n /// Constructs new complete locale.\n\n ///\n", "file_path": "src/linux/mod.rs", "rank": 9, "score": 13.534920797751154 }, { "content": " decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::NumericStringItems::*;\n\n\n\npub use self::NumericStringItems::__DECIMAL_POINT as RADIXCHAR;\n\npub use self::NumericStringItems::__THOUSANDS_SEP as THOUSEP;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum TimeStringItems {\n\n ABDAY_1 = ffi::ABDAY_1 as isize,\n\n ABDAY_2 = ffi::ABDAY_2 as isize,\n\n ABDAY_3 = ffi::ABDAY_3 as isize,\n\n ABDAY_4 = ffi::ABDAY_4 as isize,\n\n ABDAY_5 = ffi::ABDAY_5 as isize,\n\n ABDAY_6 = ffi::ABDAY_6 as isize,\n\n ABDAY_7 = ffi::ABDAY_7 as isize,\n\n DAY_1 = ffi::DAY_1 as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 11, "score": 9.258473167878348 }, { "content": " _NL_TIME_ERA_ENTRIES = ffi::_NL_TIME_ERA_ENTRIES as isize,\n\n _NL_TIME_TIMEZONE = ffi::_NL_TIME_TIMEZONE as isize,\n\n _DATE_FMT = ffi::_DATE_FMT as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for TimeStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_TIME_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::TimeStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum TimeStringListItems {\n\n ERA = ffi::ERA as isize,\n\n ALT_DIGITS = ffi::ALT_DIGITS as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 13, "score": 8.939561893019398 }, { "content": " type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_MONETARY_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::MonetaryStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum NumericStringItems {\n\n __DECIMAL_POINT = ffi::__DECIMAL_POINT as isize,\n\n __THOUSANDS_SEP = ffi::__THOUSANDS_SEP as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for NumericStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_NUMERIC_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n", "file_path": "src/linux/langinfo.rs", "rank": 14, "score": 8.83380759803543 }, { "content": "\n\nimpl Numeric {\n\n pub fn load_user_locale() -> Result<Numeric> {\n\n if let Ok(mut factory) = SystemLocaleFactory::new(\"\") {\n\n if let Some(numeric) = factory.get_numeric() {\n\n return Ok(*numeric);\n\n }\n\n }\n\n Ok(Numeric::english())\n\n }\n\n\n\n pub fn english() -> Numeric {\n\n Numeric::new(\".\", \",\")\n\n }\n\n\n\n pub fn new(decimal_sep: &str, thousands_sep: &str) -> Numeric {\n\n Numeric {\n\n decimal_sep: decimal_sep.to_string(),\n\n thousands_sep: thousands_sep.to_string(),\n\n }\n", "file_path": "src/lib.rs", "rank": 15, "score": 8.80743388772583 }, { "content": "}\n\n\n\nimpl<'a> LanginfoItem<'a> for TimeStringListItems {\n\n type Type = Vec<Cow<'a, str>>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_TIME_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Vec<Cow<'a, str>> {\n\n decode_strings(ptr, iconv, 100)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::TimeStringListItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum MessagesStringItems {\n\n YESEXPR = ffi::__YESEXPR as isize,\n\n NOEXPR = ffi::__NOEXPR as isize,\n\n YESSTR = ffi::__YESSTR as isize,\n\n NOSTR = ffi::__NOSTR as isize,\n\n}\n", "file_path": "src/linux/langinfo.rs", "rank": 16, "score": 8.739237999602997 }, { "content": " // XXX: Is there better way to skip Drop then zeroing+check? And the associated need to\n\n // have the field mut though it's otherwise not needed and not desired?\n\n from.c_locale = ::std::ptr::null_mut();\n\n if res.is_null() {\n\n Err(Error::last_os_error())\n\n } else {\n\n Ok(CLocale { c_locale: res, })\n\n }\n\n }\n\n\n\n /// Returns locale ID that is in use for given category.\n\n /// \n\n /// As indicated by `locale_t::names[category]`.\n\n pub fn name<'a>(&'a self, category: ::libc::c_int) -> Cow<'a, str> {\n\n assert!(category >= 0 && category <= 12);\n\n unsafe {\n\n let ptr = (*self.c_locale).__names[category as usize];\n\n if ptr.is_null() {\n\n return Cow::Borrowed(\"C\");\n\n }\n", "file_path": "src/linux/mod.rs", "rank": 17, "score": 8.331709394772105 }, { "content": "#![crate_name = \"locale\"]\n\n#![crate_type = \"rlib\"]\n\n#![crate_type = \"dylib\"]\n\n\n\n//! Localisation is hard.\n\n//!\n\n//! Getting your program to work well in multiple languages is a world fraught with edge-cases,\n\n//! minor grammatical errors, and most importantly, subtle things that don't map over well that you\n\n//! have absolutely no idea are different in other cultures.\n\n//!\n\n//! Many people are aware of the simpler ones, such as whether to use decimal points or decimal\n\n//! commas, or that the names of the months are different in other languages. But there are also\n\n//! different ways to format dates and times, or variations on what day the week begins. It's\n\n//! perfectly possible to write your program unaware of how these things have to be changed at all,\n\n//! and that's why it's so hard.\n\n\n\nextern crate libc;\n\n\n\nuse std::fmt::Display;\n\nuse std::io::Result;\n", "file_path": "src/lib.rs", "rank": 18, "score": 8.023880958705647 }, { "content": " }\n\n\n\n pub fn short_day_name(&self, days_from_sunday: usize) -> String {\n\n self.day_names[days_from_sunday].clone()\n\n }\n\n\n\n}\n\n\n\n// ---- tests ----\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn thousands_separator() {\n\n let numeric_options = Numeric::new(\"/\", \"=\");\n\n assert_eq!(\"1=234=567\".to_string(), numeric_options.format_int(1234567))\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 8.008994678245113 }, { "content": "\n\n// ---- time stuff ---\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Time {\n\n month_names: Vec<String>,\n\n long_month_names: Vec<String>,\n\n day_names: Vec<String>,\n\n long_day_names: Vec<String>,\n\n}\n\n\n\nimpl Time {\n\n pub fn load_user_locale() -> Result<Time> {\n\n if let Ok(mut factory) = SystemLocaleFactory::new(\"\") {\n\n if let Some(time) = factory.get_time() {\n\n return Ok(*time);\n\n }\n\n }\n\n Ok(Time::english())\n\n }\n", "file_path": "src/lib.rs", "rank": 20, "score": 7.995402477028469 }, { "content": " }\n\n}\n\n\n\n/// Factory of invariant locales.\n\n///\n\n/// Invariant locale, called \"C\" or \"POSIX\" by standard C library locale functions, is default\n\n/// locale definitions for when no information about desired locale is available or localization is\n\n/// turned off.\n\n#[derive(Debug, Clone, Default)]\n\npub struct InvariantLocaleFactory;\n\n\n\nimpl InvariantLocaleFactory {\n\n /// Constructs invariant locale factory.\n\n ///\n\n /// The signature is just so that it matches the other locale factories so the classes can be\n\n /// substituted depending on target operating system and the code using them does not have to\n\n /// care.\n\n #[allow(unused_variables)]\n\n pub fn new(locale: &str) -> Result<Self> {\n\n Ok(InvariantLocaleFactory)\n", "file_path": "src/lib.rs", "rank": 21, "score": 7.215105120065461 }, { "content": "\n\nimpl<'a> LanginfoItem<'a> for CodesetItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { None }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, _: Option<&IConv>) -> Cow<'a, str> {\n\n String::from_utf8_lossy(CStr::from_ptr(ptr).to_bytes())\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::CodesetItems::*;\n\npub use self::CodesetItems::_NL_CTYPE_CODESET_NAME as CODESET;\n\n\n\n/// `nl_langinfo` items in `LC_COLLATE` category that have string values\n\n///\n\n/// The decoding function uses appropriate \n\n#[derive(Copy, Clone, Debug)]\n\npub enum CollateStringItems {\n\n _NL_COLLATE_RULESETS = ffi::_NL_COLLATE_RULESETS as isize,\n\n}\n", "file_path": "src/linux/langinfo.rs", "rank": 22, "score": 7.203692466699179 }, { "content": " let cfrom = try!(::std::ffi::CString::new(from));\n\n let res = unsafe { ffi::iconv_open(cto.as_ptr(), cfrom.as_ptr()) };\n\n if res.is_null() {\n\n Err(Error::last_os_error())\n\n } else {\n\n Ok(IConv { iconv: res, })\n\n }\n\n }\n\n\n\n /// Convert data with iconv\n\n ///\n\n /// See [`iconv`(3)](http://man7.org/linux/man-pages/man3/iconv.3.html). The parameters are\n\n ///\n\n /// 1. `src`: The input buffer.\n\n /// 2. `dst`: The output buffer.\n\n ///\n\n /// Return values are:\n\n ///\n\n /// 1. Result of `iconv`. If -1, the reason can be read from `errno` (unfortunately\n\n /// `::std::io::Error::last_os_error()` does not seem to be able to distinguish them at the\n", "file_path": "src/linux/mod.rs", "rank": 23, "score": 7.193870529461632 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<F: LocaleFactory, S: LocaleFactory> LocaleFactory for CompositeLocaleFactory<F, S> {\n\n // XXX: Make a macro for this\n\n fn get_numeric(&mut self) -> Option<Box<Numeric>> {\n\n if let Some(v) = self.first.get_numeric() {\n\n Some(v)\n\n } else {\n\n self.second.get_numeric()\n\n }\n\n }\n\n\n\n fn get_time(&mut self) -> Option<Box<Time>> {\n\n if let Some(v) = self.first.get_time() {\n\n Some(v)\n\n } else {\n\n self.second.get_time()\n\n }\n", "file_path": "src/lib.rs", "rank": 24, "score": 6.998565259962653 }, { "content": " /// Constructs `CLocale` with all categories from locale `locale`. See\n\n /// [`newlocale`](http://man7.org/linux/man-pages/man3/newlocale.3.html).\n\n pub fn new(locale: &str) -> Result<Self> {\n\n let cloc = try!(CString::new(locale));\n\n let res = unsafe { ffi::newlocale(ffi::LC_ALL_MASK, cloc.as_ptr(), ::std::ptr::null_mut()) };\n\n if res.is_null() {\n\n Err(Error::last_os_error())\n\n } else {\n\n Ok(CLocale { c_locale: res, })\n\n }\n\n }\n\n\n\n /// Constructs new complete locale.\n\n ///\n\n /// Constructs `CLocale` with specified categories from locale `locale` and the rest\n\n /// from `from`. `from` is destroyed in the process. See\n\n /// [`newlocale`(3)](http://man7.org/linux/man-pages/man3/newlocale.3.html).\n\n pub fn new_from(mask: ::libc::c_int, locale: &str, mut from: Self) -> Result<CLocale> {\n\n let cloc = try!(CString::new(locale));\n\n let res = unsafe { ffi::newlocale(mask, cloc.as_ptr(), from.c_locale) };\n", "file_path": "src/linux/mod.rs", "rank": 25, "score": 6.66090428884611 }, { "content": "//! Items for use with nl_langinfo_l and associated properties.\n\n#![allow(non_camel_case_types)]\n\n\n\nuse ::std::borrow::Cow;\n\nuse ::std::ffi::CStr;\n\nuse ::std::fmt::Debug;\n\nuse ::std::mem::transmute_copy;\n\nuse super::ffi;\n\nuse super::IConv;\n\n\n", "file_path": "src/linux/langinfo.rs", "rank": 26, "score": 6.048595518935669 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct IConv {\n\n iconv: ffi::iconv_t,\n\n}\n\n\n\n/// Wrapper for iconv.\n\n///\n\n/// See [`iconv`(3)](http://man7.org/linux/man-pages/man3/iconv.3.html).\n\n/// \n\n/// On Linux this is part of standard C library and should always be able to convert any charset\n\n/// that the locale component presents, so we can conveniently use it for translating that to the\n\n/// Rust's internal utf-8 encoding there.\n\nimpl IConv {\n\n /// Construct iconv converter.\n\n ///\n\n /// See [`iconv_open`(3)](http://man7.org/linux/man-pages/man3/iconv_open.3.html).\n\n pub fn new(to: &str, from: &str) -> Result<Self> {\n\n let cto = try!(::std::ffi::CString::new(to));\n", "file_path": "src/linux/mod.rs", "rank": 27, "score": 6.038691443452879 }, { "content": " if let Some(ref iconv) = self.iconv[LibCLocaleFactory::codeset_index(cs)] {\n\n conv = Some(&**iconv);\n\n }\n\n }\n\n unsafe {\n\n item.decode(ffi::nl_langinfo_l(item.to_ffi(), self.locale.c_locale), conv)\n\n }\n\n }\n\n}\n\n\n\nimpl LocaleFactory for LibCLocaleFactory {\n\n fn get_numeric(&mut self) -> Option<Box<Numeric>> {\n\n return Some(\n\n Box::new(\n\n Numeric::new(\n\n &self.langinfo(langinfo::RADIXCHAR),\n\n &self.langinfo(langinfo::THOUSEP))));\n\n }\n\n\n\n fn get_time(&mut self) -> Option<Box<Time>> {\n", "file_path": "src/linux/mod.rs", "rank": 28, "score": 5.903164295520957 }, { "content": " let cres: &'a CStr = CStr::from_ptr(ptr);\n\n return String::from_utf8_lossy(cres.to_bytes());\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for CLocale {\n\n fn drop(&mut self) {\n\n if !self.c_locale.is_null() {\n\n unsafe { ffi::freelocale(self.c_locale) };\n\n }\n\n }\n\n}\n\n\n\nimpl Clone for CLocale {\n\n fn clone(&self) -> Self {\n\n CLocale {\n\n c_locale: unsafe { ffi::duplocale(self.c_locale) },\n\n }\n\n }\n", "file_path": "src/linux/mod.rs", "rank": 29, "score": 5.874115818959883 }, { "content": " type Type = i8;\n\n fn needs_iconv() -> Option<CodesetItems> { None }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, _: Option<&IConv>) -> i8 {\n\n *ptr as i8\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::ByteItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum ByteArrayItems {\n\n // CType\n\n _NL_CTYPE_WIDTH = ffi::_NL_CTYPE_WIDTH as isize,\n\n // Monetary\n\n __MON_GROUPING = ffi::__MON_GROUPING as isize,\n\n // Numeric\n\n __GROUPING = ffi::__GROUPING as isize,\n\n}\n\n\n", "file_path": "src/linux/langinfo.rs", "rank": 30, "score": 5.703498530391757 }, { "content": " ::std::slice::from_raw_parts(ptr as *const i8, ::libc::strlen(ptr) as usize)\n\n }\n\n}\n\n\n\n/// `nl_langinfo` items that return charset names\n\n#[derive(Copy, Clone, Debug)]\n\npub enum CodesetItems {\n\n _NL_COLLATE_CODESET = ffi::_NL_COLLATE_CODESET as isize,\n\n _NL_CTYPE_CODESET_NAME = ffi::_NL_CTYPE_CODESET_NAME as isize,\n\n _NL_MONETARY_CODESET = ffi::_NL_MONETARY_CODESET as isize,\n\n _NL_NUMERIC_CODESET = ffi::_NL_NUMERIC_CODESET as isize,\n\n _NL_TIME_CODESET = ffi::_NL_TIME_CODESET as isize,\n\n _NL_MESSAGES_CODESET = ffi::_NL_MESSAGES_CODESET as isize,\n\n _NL_PAPER_CODESET = ffi::_NL_PAPER_CODESET as isize,\n\n _NL_NAME_CODESET = ffi::_NL_NAME_CODESET as isize,\n\n _NL_ADDRESS_CODESET = ffi::_NL_ADDRESS_CODESET as isize,\n\n _NL_TELEPHONE_CODESET = ffi::_NL_TELEPHONE_CODESET as isize,\n\n _NL_MEASUREMENT_CODESET = ffi::_NL_MEASUREMENT_CODESET as isize,\n\n _NL_IDENTIFICATION_CODESET = ffi::_NL_IDENTIFICATION_CODESET as isize,\n\n}\n", "file_path": "src/linux/langinfo.rs", "rank": 31, "score": 5.392507499384308 }, { "content": " long_day_names: vec![\n\n self.langinfo(langinfo::DAY_1).into_owned(),\n\n self.langinfo(langinfo::DAY_2).into_owned(),\n\n self.langinfo(langinfo::DAY_3).into_owned(),\n\n self.langinfo(langinfo::DAY_4).into_owned(),\n\n self.langinfo(langinfo::DAY_5).into_owned(),\n\n self.langinfo(langinfo::DAY_6).into_owned(),\n\n self.langinfo(langinfo::DAY_7).into_owned(),\n\n ],\n\n }));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use ::std::ffi::CStr;\n\n use super::*;\n\n\n\n fn has_locale(locale: &str) -> bool {\n\n CLocale::new(locale).is_ok()\n", "file_path": "src/linux/mod.rs", "rank": 32, "score": 5.389303633637765 }, { "content": " fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_CTYPE_CODESET_NAME) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::CTypeStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum CTypeStringListItems {\n\n _NL_CTYPE_CLASS_NAMES = ffi::_NL_CTYPE_CLASS_NAMES as isize,\n\n _NL_CTYPE_MAP_NAMES = ffi::_NL_CTYPE_MAP_NAMES as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for CTypeStringListItems {\n\n type Type = Vec<Cow<'a, str>>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_CTYPE_CODESET_NAME) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Vec<Cow<'a, str>> {\n\n decode_strings(ptr, iconv, 32)\n", "file_path": "src/linux/langinfo.rs", "rank": 33, "score": 5.339223849889227 }, { "content": "pub const LC_ALL_MASK: ::libc::c_int = LC_CTYPE_MASK\n\n\t\t\t\t | LC_NUMERIC_MASK\n\n\t\t\t\t | LC_TIME_MASK\n\n\t\t\t\t | LC_COLLATE_MASK\n\n\t\t\t\t | LC_MONETARY_MASK\n\n\t\t\t\t | LC_MESSAGES_MASK\n\n\t\t\t\t | LC_PAPER_MASK\n\n\t\t\t\t | LC_NAME_MASK\n\n\t\t\t\t | LC_ADDRESS_MASK\n\n\t\t\t\t | LC_TELEPHONE_MASK\n\n\t\t\t\t | LC_MEASUREMENT_MASK\n\n\t\t\t\t | LC_IDENTIFICATION_MASK;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct Struct_lconv {\n\n pub decimal_point: *mut ::libc::c_char,\n\n pub thousands_sep: *mut ::libc::c_char,\n\n pub grouping: *mut ::libc::c_char,\n\n pub int_curr_symbol: *mut ::libc::c_char,\n", "file_path": "src/linux/ffi.rs", "rank": 34, "score": 5.206173552521824 }, { "content": " assert_eq!(\".\", langinfo(&m, ffi::RADIXCHAR));\n\n assert_eq!(\"Po\", langinfo(&m, ffi::ABDAY_2));\n\n if let Ok(n) = CLocale::new_from(ffi::LC_TIME_MASK, \"de_DE\", m.clone()) {\n\n assert_eq!(\".\", langinfo(&n, ffi::RADIXCHAR));\n\n assert_eq!(\"Mi\", langinfo(&n, ffi::ABDAY_4));\n\n assert_eq!(\".\", langinfo(&m, ffi::RADIXCHAR));\n\n assert_eq!(\"Po\", langinfo(&m, ffi::ABDAY_2));\n\n assert_eq!(\"cs_CZ\", n.name(ffi::LC_CTYPE));\n\n assert_eq!(\"en_GB\", n.name(ffi::LC_NUMERIC));\n\n assert_eq!(\"de_DE\", n.name(ffi::LC_TIME));\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn locale_with_convert() {\n\n if let Ok(lf) = LibCLocaleFactory::new(\"cs_CZ\") {\n\n // only test if the host has cs_CZ (non-unicode) locale (travis boxen don't)\n\n assert_eq!(\"ISO-8859-2\", lf.langinfo(langinfo::CODESET));\n\n assert_eq!(\"Út\", lf.langinfo(langinfo::ABDAY_3));\n\n }\n\n }\n\n}\n", "file_path": "src/linux/mod.rs", "rank": 35, "score": 5.096409852816504 }, { "content": "pub enum TelephoneStringItems {\n\n _NL_TELEPHONE_TEL_INT_FMT = ffi::_NL_TELEPHONE_TEL_INT_FMT as isize,\n\n _NL_TELEPHONE_TEL_DOM_FMT = ffi::_NL_TELEPHONE_TEL_DOM_FMT as isize,\n\n _NL_TELEPHONE_INT_SELECT = ffi::_NL_TELEPHONE_INT_SELECT as isize,\n\n _NL_TELEPHONE_INT_PREFIX = ffi::_NL_TELEPHONE_INT_PREFIX as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for TelephoneStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_TELEPHONE_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::TelephoneStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum ByteItems {\n", "file_path": "src/linux/langinfo.rs", "rank": 36, "score": 5.061742805129463 }, { "content": "}\n\n\n\nimpl<'a> LanginfoItem<'a> for NameStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_NAME_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::NameStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum AddressStringItems {\n\n _NL_ADDRESS_POSTAL_FMT = ffi::_NL_ADDRESS_POSTAL_FMT as isize,\n\n _NL_ADDRESS_COUNTRY_NAME = ffi::_NL_ADDRESS_COUNTRY_NAME as isize,\n\n _NL_ADDRESS_COUNTRY_POST = ffi::_NL_ADDRESS_COUNTRY_POST as isize,\n\n _NL_ADDRESS_COUNTRY_AB2 = ffi::_NL_ADDRESS_COUNTRY_AB2 as isize,\n\n _NL_ADDRESS_COUNTRY_AB3 = ffi::_NL_ADDRESS_COUNTRY_AB3 as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 37, "score": 4.987272162915824 }, { "content": " langinfo::_NL_CTYPE_CODESET_NAME => 1,\n\n langinfo::_NL_MONETARY_CODESET => 2,\n\n langinfo::_NL_NUMERIC_CODESET => 3,\n\n langinfo::_NL_TIME_CODESET => 4,\n\n langinfo::_NL_MESSAGES_CODESET => 5,\n\n langinfo::_NL_PAPER_CODESET => 6,\n\n langinfo::_NL_NAME_CODESET => 7,\n\n langinfo::_NL_ADDRESS_CODESET => 8,\n\n langinfo::_NL_TELEPHONE_CODESET => 9,\n\n langinfo::_NL_MEASUREMENT_CODESET => 10,\n\n langinfo::_NL_IDENTIFICATION_CODESET => 11,\n\n }\n\n }\n\n\n\n // TODO TODO: Could also try overriding all components to their corresponding UTF-8 variants,\n\n // though that's quite a bit more work.\n\n pub fn new_from_c_locale(c_locale: CLocale) -> Self {\n\n fn get_iconv(codeset: langinfo::CodesetItems, locale: &CLocale) -> Option<Arc<IConv>> {\n\n let cs = unsafe {\n\n ::std::str::from_utf8_unchecked(\n", "file_path": "src/linux/mod.rs", "rank": 38, "score": 4.889084179195891 }, { "content": " _NL_ADDRESS_COUNTRY_CAR = ffi::_NL_ADDRESS_COUNTRY_CAR as isize,\n\n _NL_ADDRESS_COUNTRY_ISBN = ffi::_NL_ADDRESS_COUNTRY_ISBN as isize,\n\n _NL_ADDRESS_LANG_NAME = ffi::_NL_ADDRESS_LANG_NAME as isize,\n\n _NL_ADDRESS_LANG_AB = ffi::_NL_ADDRESS_LANG_AB as isize,\n\n _NL_ADDRESS_LANG_TERM = ffi::_NL_ADDRESS_LANG_TERM as isize,\n\n _NL_ADDRESS_LANG_LIB = ffi::_NL_ADDRESS_LANG_LIB as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for AddressStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_ADDRESS_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::AddressStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "src/linux/langinfo.rs", "rank": 39, "score": 4.844832772675408 }, { "content": "\n\nimpl<'a> LanginfoItem<'a> for CollateStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_COLLATE_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::CollateStringItems::*;\n\n\n\n/// `nl_langinfo` items in `LC_CTYPE` category that have string values\n\n#[derive(Copy, Clone, Debug)]\n\npub enum CTypeStringItems {\n\n _NL_CTYPE_INDIGITS0_MB = ffi::_NL_CTYPE_INDIGITS0_MB as isize,\n\n _NL_CTYPE_INDIGITS1_MB = ffi::_NL_CTYPE_INDIGITS1_MB as isize,\n\n _NL_CTYPE_INDIGITS2_MB = ffi::_NL_CTYPE_INDIGITS2_MB as isize,\n\n _NL_CTYPE_INDIGITS3_MB = ffi::_NL_CTYPE_INDIGITS3_MB as isize,\n\n _NL_CTYPE_INDIGITS4_MB = ffi::_NL_CTYPE_INDIGITS4_MB as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 40, "score": 4.776674531066129 }, { "content": " ::std::ffi::CStr::from_ptr(\n\n ffi::nl_langinfo_l(codeset as ::libc::c_uint, locale.c_locale)).to_bytes())\n\n };\n\n if cs != \"UTF-8\" {\n\n if let Ok(i) = IConv::new(\"UTF-8\", cs) {\n\n return Some(Arc::new(i));\n\n }\n\n }\n\n return None;\n\n }\n\n return LibCLocaleFactory{\n\n iconv: [\n\n get_iconv(langinfo::_NL_COLLATE_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_CTYPE_CODESET_NAME, &c_locale),\n\n get_iconv(langinfo::_NL_MONETARY_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_NUMERIC_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_TIME_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_MESSAGES_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_PAPER_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_NAME_CODESET, &c_locale),\n", "file_path": "src/linux/mod.rs", "rank": 41, "score": 4.7707156100988914 }, { "content": "\n\nimpl<'a> LanginfoItem<'a> for MessagesStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_MESSAGES_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::MessagesStringItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum NameStringItems {\n\n _NL_NAME_NAME_FMT = ffi::_NL_NAME_NAME_FMT as isize,\n\n _NL_NAME_NAME_GEN = ffi::_NL_NAME_NAME_GEN as isize,\n\n _NL_NAME_NAME_MR = ffi::_NL_NAME_NAME_MR as isize,\n\n _NL_NAME_NAME_MRS = ffi::_NL_NAME_NAME_MRS as isize,\n\n _NL_NAME_NAME_MISS = ffi::_NL_NAME_NAME_MISS as isize,\n\n _NL_NAME_NAME_MS = ffi::_NL_NAME_NAME_MS as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 42, "score": 4.754386762203822 }, { "content": " }\n\n\n\n #[test]\n\n fn bad_locale() {\n\n let l = LibCLocaleFactory::new(\"wrong\");\n\n assert!(l.is_err());\n\n }\n\n\n\n #[test]\n\n fn mixed_locale() {\n\n fn langinfo(loc: &CLocale, item: ::libc::c_uint) -> &str {\n\n let res = unsafe { CStr::from_ptr(ffi::nl_langinfo_l(item, loc.c_locale)) };\n\n ::std::str::from_utf8(res.to_bytes()).unwrap()\n\n }\n\n\n\n if let Ok(l) = CLocale::new(\"cs_CZ\") {\n\n // only test if the host has these locales (travis boxen don't)\n\n assert_eq!(\",\", langinfo(&l, ffi::RADIXCHAR));\n\n assert_eq!(\"Po\", langinfo(&l, ffi::ABDAY_2));\n\n if let Ok(m) = CLocale::new_from(ffi::LC_NUMERIC_MASK, \"en_GB\", l) {\n", "file_path": "src/linux/mod.rs", "rank": 43, "score": 4.617478150620515 }, { "content": "/// Trait defining how to obtain various components of a locale.\n\n///\n\n/// Use implementation of this trait to construct parts of the `Locale` object.\n\n///\n\n/// There may be various methods for obtaining locale data. The lowest common denominator is\n\n/// standard C library. It is however quite limited and some systems (notably Android) don't\n\n/// actually contain the corresponding data. Many systems also provide additional configurability\n\n/// for the locale setting (Windows, KDE, etc.) that are only accessible via that system's specific\n\n/// interface. So this trait exists to allow combining the methods for obtaining the data.\n\n///\n\n/// The implementations for individual locale categories are returned boxed, because they may need\n\n/// to be polymorphic _and_ in options to allow combining partial implementations. Creating locale\n\n/// data is not a performance critical operation, so dynamic polymrphism is used for sake of\n\n/// simplicity.\n\n///\n\n/// All methods default to simply returning None, again so partial implementations that delegate to\n\n/// another factory are possible. See `CompositeLocaleFactory`.\n\npub trait LocaleFactory {\n\n /// Get implementation of the Numeric locale category.\n\n fn get_numeric(&mut self) -> Option<Box<Numeric>> { None }\n\n\n\n /// Get implementation of the Time locale category.\n\n fn get_time(&mut self) -> Option<Box<Time>> { None }\n\n}\n\n\n\n/// Auxiliary class for creating composing partial implementations of locale factories.\n\n// FIXME: Create (doc) test when there actually is another implementation to substitute.\n\n#[derive(Debug, Clone)]\n\npub struct CompositeLocaleFactory<First: LocaleFactory, Second: LocaleFactory> {\n\n first: First,\n\n second: Second,\n\n}\n\n\n\nimpl<F: LocaleFactory, S: LocaleFactory> CompositeLocaleFactory<F, S> {\n\n pub fn new(first: F, second: S) -> Self {\n\n CompositeLocaleFactory::<F, S> {\n\n first: first, second: second\n", "file_path": "src/lib.rs", "rank": 44, "score": 4.509414813293528 }, { "content": "pub const AM_STR: ::libc::c_uint = 131110;\n\npub const PM_STR: ::libc::c_uint = 131111;\n\npub const D_T_FMT: ::libc::c_uint = 131112;\n\npub const D_FMT: ::libc::c_uint = 131113;\n\npub const T_FMT: ::libc::c_uint = 131114;\n\npub const T_FMT_AMPM: ::libc::c_uint = 131115;\n\npub const ERA: ::libc::c_uint = 131116;\n\npub const __ERA_YEAR: ::libc::c_uint = 131117;\n\npub const ERA_D_FMT: ::libc::c_uint = 131118;\n\npub const ALT_DIGITS: ::libc::c_uint = 131119;\n\npub const ERA_D_T_FMT: ::libc::c_uint = 131120;\n\npub const ERA_T_FMT: ::libc::c_uint = 131121;\n\npub const _NL_TIME_ERA_NUM_ENTRIES: ::libc::c_uint = 131122;\n\npub const _NL_TIME_ERA_ENTRIES: ::libc::c_uint = 131123;\n\npub const _NL_WABDAY_1: ::libc::c_uint = 131124;\n\npub const _NL_WABDAY_2: ::libc::c_uint = 131125;\n\npub const _NL_WABDAY_3: ::libc::c_uint = 131126;\n\npub const _NL_WABDAY_4: ::libc::c_uint = 131127;\n\npub const _NL_WABDAY_5: ::libc::c_uint = 131128;\n\npub const _NL_WABDAY_6: ::libc::c_uint = 131129;\n", "file_path": "src/linux/ffi.rs", "rank": 45, "score": 4.189653817540249 }, { "content": "pub const LC_ALL: ::libc::c_int = 6;\n\npub const LC_PAPER: ::libc::c_int = 7;\n\npub const LC_NAME: ::libc::c_int = 8;\n\npub const LC_ADDRESS: ::libc::c_int = 9;\n\npub const LC_TELEPHONE: ::libc::c_int = 10;\n\npub const LC_MEASUREMENT: ::libc::c_int = 11;\n\npub const LC_IDENTIFICATION: ::libc::c_int = 12;\n\n\n\npub const LC_CTYPE_MASK: ::libc::c_int = 1 << LC_CTYPE;\n\npub const LC_NUMERIC_MASK: ::libc::c_int = 1 << LC_NUMERIC;\n\npub const LC_TIME_MASK: ::libc::c_int = 1 << LC_TIME;\n\npub const LC_COLLATE_MASK: ::libc::c_int = 1 << LC_COLLATE;\n\npub const LC_MONETARY_MASK: ::libc::c_int = 1 << LC_MONETARY;\n\npub const LC_MESSAGES_MASK: ::libc::c_int = 1 << LC_MESSAGES;\n\npub const LC_PAPER_MASK: ::libc::c_int = 1 << LC_PAPER;\n\npub const LC_NAME_MASK: ::libc::c_int = 1 << LC_NAME;\n\npub const LC_ADDRESS_MASK: ::libc::c_int = 1 << LC_ADDRESS;\n\npub const LC_TELEPHONE_MASK: ::libc::c_int = 1 << LC_TELEPHONE;\n\npub const LC_MEASUREMENT_MASK: ::libc::c_int = 1 << LC_MEASUREMENT;\n\npub const LC_IDENTIFICATION_MASK: ::libc::c_int = 1 << LC_IDENTIFICATION;\n", "file_path": "src/linux/ffi.rs", "rank": 46, "score": 3.9250526467230396 }, { "content": " #[test]\n\n fn thousands_separator_2() {\n\n let numeric_options = Numeric::new(\"/\", \"=\");\n\n assert_eq!(\"123=456\".to_string(), numeric_options.format_int(123456))\n\n }\n\n\n\n #[test]\n\n fn thousands_separator_3() {\n\n let numeric_options = Numeric::new(\"/\", \"=\");\n\n assert_eq!(\"12=345=678\".to_string(), numeric_options.format_int(12345678))\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 47, "score": 3.8787309983866027 }, { "content": " get_iconv(langinfo::_NL_ADDRESS_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_TELEPHONE_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_MEASUREMENT_CODESET, &c_locale),\n\n get_iconv(langinfo::_NL_IDENTIFICATION_CODESET, &c_locale),\n\n ],\n\n locale: Arc::new(c_locale),\n\n };\n\n }\n\n\n\n pub fn new(locale: &str) -> Result<Self> {\n\n let loc = try!(CLocale::new(locale));\n\n\n\n return Ok(LibCLocaleFactory::new_from_c_locale(loc));\n\n }\n\n\n\n pub fn langinfo<'a, I>(&'a self, item: I) -> I::Type\n\n where I: langinfo::LanginfoItem<'a>\n\n {\n\n let mut conv = None;\n\n if let Some(cs) = I::needs_iconv() {\n", "file_path": "src/linux/mod.rs", "rank": 48, "score": 3.7210857450078914 }, { "content": " _NL_IDENTIFICATION_DATE = ffi::_NL_IDENTIFICATION_DATE as isize,\n\n _NL_IDENTIFICATION_CATEGORY = ffi::_NL_IDENTIFICATION_CATEGORY as isize,\n\n _NL_IDENTIFICATION_CATEGORY_1,\n\n _NL_IDENTIFICATION_CATEGORY_2,\n\n _NL_IDENTIFICATION_CATEGORY_3,\n\n _NL_IDENTIFICATION_CATEGORY_4,\n\n _NL_IDENTIFICATION_CATEGORY_5,\n\n _NL_IDENTIFICATION_CATEGORY_6,\n\n _NL_IDENTIFICATION_CATEGORY_7,\n\n _NL_IDENTIFICATION_CATEGORY_8,\n\n _NL_IDENTIFICATION_CATEGORY_9,\n\n _NL_IDENTIFICATION_CATEGORY_10,\n\n _NL_IDENTIFICATION_CATEGORY_11,\n\n _NL_IDENTIFICATION_CATEGORY_12,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for IdentificationStringItems {\n\n type Type = Cow<'a, str>;\n\n fn needs_iconv() -> Option<CodesetItems> { Some(CodesetItems::_NL_IDENTIFICATION_CODESET) }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, iconv: Option<&IConv>) -> Cow<'a, str> {\n\n decode_string(ptr, iconv)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::IdentificationStringItems::*;\n", "file_path": "src/linux/langinfo.rs", "rank": 49, "score": 3.6500960385820016 }, { "content": "pub const _NL_WALT_DIGITS: ::libc::c_uint = 131170;\n\npub const _NL_WERA_D_T_FMT: ::libc::c_uint = 131171;\n\npub const _NL_WERA_T_FMT: ::libc::c_uint = 131172;\n\npub const _NL_TIME_WEEK_NDAYS: ::libc::c_uint = 131173;\n\npub const _NL_TIME_WEEK_1STDAY: ::libc::c_uint = 131174;\n\npub const _NL_TIME_WEEK_1STWEEK: ::libc::c_uint = 131175;\n\npub const _NL_TIME_FIRST_WEEKDAY: ::libc::c_uint = 131176;\n\npub const _NL_TIME_FIRST_WORKDAY: ::libc::c_uint = 131177;\n\npub const _NL_TIME_CAL_DIRECTION: ::libc::c_uint = 131178;\n\npub const _NL_TIME_TIMEZONE: ::libc::c_uint = 131179;\n\npub const _DATE_FMT: ::libc::c_uint = 131180;\n\npub const _NL_W_DATE_FMT: ::libc::c_uint = 131181;\n\npub const _NL_TIME_CODESET: ::libc::c_uint = 131182;\n\npub const _NL_NUM_LC_TIME: ::libc::c_uint = 131183;\n\npub const _NL_COLLATE_NRULES: ::libc::c_uint = 196608;\n\npub const _NL_COLLATE_RULESETS: ::libc::c_uint = 196609;\n\npub const _NL_COLLATE_TABLEMB: ::libc::c_uint = 196610;\n\npub const _NL_COLLATE_WEIGHTMB: ::libc::c_uint = 196611;\n\npub const _NL_COLLATE_EXTRAMB: ::libc::c_uint = 196612;\n\npub const _NL_COLLATE_INDIRECTMB: ::libc::c_uint = 196613;\n", "file_path": "src/linux/ffi.rs", "rank": 50, "score": 3.409202354152744 }, { "content": " _NL_MONETARY_DUO_N_SEP_BY_SPACE = ffi::_NL_MONETARY_DUO_N_SEP_BY_SPACE as isize,\n\n _NL_MONETARY_DUO_INT_P_CS_PRECEDES = ffi::_NL_MONETARY_DUO_INT_P_CS_PRECEDES as isize,\n\n _NL_MONETARY_DUO_INT_P_SEP_BY_SPACE = ffi::_NL_MONETARY_DUO_INT_P_SEP_BY_SPACE as isize,\n\n _NL_MONETARY_DUO_INT_N_CS_PRECEDES = ffi::_NL_MONETARY_DUO_INT_N_CS_PRECEDES as isize,\n\n _NL_MONETARY_DUO_INT_N_SEP_BY_SPACE = ffi::_NL_MONETARY_DUO_INT_N_SEP_BY_SPACE as isize,\n\n _NL_MONETARY_DUO_P_SIGN_POSN = ffi::_NL_MONETARY_DUO_P_SIGN_POSN as isize,\n\n _NL_MONETARY_DUO_N_SIGN_POSN = ffi::_NL_MONETARY_DUO_N_SIGN_POSN as isize,\n\n _NL_MONETARY_DUO_INT_P_SIGN_POSN = ffi::_NL_MONETARY_DUO_INT_P_SIGN_POSN as isize,\n\n _NL_MONETARY_DUO_INT_N_SIGN_POSN = ffi::_NL_MONETARY_DUO_INT_N_SIGN_POSN as isize,\n\n // Time\n\n _NL_TIME_WEEK_NDAYS = ffi::_NL_TIME_WEEK_NDAYS as isize,\n\n _NL_TIME_WEEK_1STWEEK = ffi::_NL_TIME_WEEK_1STWEEK as isize,\n\n _NL_TIME_FIRST_WEEKDAY = ffi::_NL_TIME_FIRST_WEEKDAY as isize,\n\n _NL_TIME_FIRST_WORKDAY = ffi::_NL_TIME_FIRST_WORKDAY as isize,\n\n _NL_TIME_CAL_DIRECTION = ffi::_NL_TIME_CAL_DIRECTION as isize,\n\n // Measurement\n\n _NL_MEASUREMENT_MEASUREMENT = ffi::_NL_MEASUREMENT_MEASUREMENT as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for ByteItems {\n", "file_path": "src/linux/langinfo.rs", "rank": 51, "score": 3.256062506982757 }, { "content": "}\n\n\n\nimpl<'a> LanginfoItem<'a> for IntegralItems {\n\n type Type = u32;\n\n fn needs_iconv() -> Option<CodesetItems> { None }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, _: Option<&IConv>) -> u32 {\n\n transmute_copy(&ptr)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::IntegralItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum CharacterItems {\n\n // CType\n\n _NL_CTYPE_OUTDIGIT0_WC = ffi::_NL_CTYPE_OUTDIGIT0_WC as isize,\n\n _NL_CTYPE_OUTDIGIT1_WC = ffi::_NL_CTYPE_OUTDIGIT1_WC as isize,\n\n _NL_CTYPE_OUTDIGIT2_WC = ffi::_NL_CTYPE_OUTDIGIT2_WC as isize,\n\n _NL_CTYPE_OUTDIGIT3_WC = ffi::_NL_CTYPE_OUTDIGIT3_WC as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 52, "score": 3.1984896231913487 }, { "content": "/* originally generated by rust-bindgen */\n\n\n\n//! Bindings for GNU LibC localization functions.\n\n//!\n\n//! This was generated by rust-bindgen from `<locale.h>`, `<langinfo.h>` and `<iconv.h>`. Iconv is\n\n//! included for converting output from nl_langinfo_l to utf-8 in case the user-selected locale is\n\n//! not utf-8. Thankfully in Linux all these functions are part of the libc itself, so they are\n\n//! already available and we don't need to do any additional linking and so there is no need to put\n\n//! this in separate crate either.\n\n\n\n#![allow(non_camel_case_types)]\n\n\n\n// Note: rust-bindgen does not generate defines, so these had to be cone manually. Fortunately the\n\n// parameters for nl_langinfo are in anonymous enum and were generated.\n\npub const LC_CTYPE: ::libc::c_int = 0;\n\npub const LC_NUMERIC: ::libc::c_int = 1;\n\npub const LC_TIME: ::libc::c_int = 2;\n\npub const LC_COLLATE: ::libc::c_int = 3;\n\npub const LC_MONETARY: ::libc::c_int = 4;\n\npub const LC_MESSAGES: ::libc::c_int = 5;\n", "file_path": "src/linux/ffi.rs", "rank": 53, "score": 3.0965509149543373 }, { "content": " \"Thu\".to_string(), \"Fri\".to_string(), \"Sat\".to_string(),\n\n ],\n\n long_day_names: vec![\n\n \"Sunday\".to_string(),\n\n \"Monday\".to_string(), \"Tuesday\".to_string(), \"Wednesday\".to_string(),\n\n \"Thursday\".to_string(), \"Friday\".to_string(), \"Saturday\".to_string(),\n\n ],\n\n }\n\n }\n\n\n\n pub fn long_month_name(&self, months_from_january: usize) -> String {\n\n self.long_month_names[months_from_january].clone()\n\n }\n\n\n\n pub fn short_month_name(&self, months_from_january: usize) -> String {\n\n self.month_names[months_from_january].clone()\n\n }\n\n\n\n pub fn long_day_name(&self, days_from_sunday: usize) -> String {\n\n self.day_names[days_from_sunday].clone()\n", "file_path": "src/lib.rs", "rank": 54, "score": 2.9682809697614747 }, { "content": "impl<'a> LanginfoItem<'a> for ByteArrayItems {\n\n type Type = &'a [i8];\n\n fn needs_iconv() -> Option<CodesetItems> { None }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, _: Option<&IConv>) -> &'a [i8] {\n\n decode_bytes(ptr)\n\n }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::ByteArrayItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum IntegralItems {\n\n // Collate\n\n _NL_COLLATE_NRULES = ffi::_NL_COLLATE_NRULES as isize,\n\n _NL_COLLATE_SYMB_HASH_SIZEMB = ffi::_NL_COLLATE_SYMB_HASH_SIZEMB as isize,\n\n // CType\n\n _NL_CTYPE_MB_CUR_MAX = ffi::_NL_CTYPE_MB_CUR_MAX as isize,\n\n _NL_CTYPE_CLASS_OFFSET = ffi::_NL_CTYPE_CLASS_OFFSET as isize,\n\n _NL_CTYPE_MAP_OFFSET = ffi::_NL_CTYPE_MAP_OFFSET as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 55, "score": 2.9292800896649216 }, { "content": " show(&f, langinfo::_NL_MONETARY_DUO_INT_N_SEP_BY_SPACE);\n\n show(&f, langinfo::_NL_MONETARY_DUO_P_SIGN_POSN);\n\n show(&f, langinfo::_NL_MONETARY_DUO_N_SIGN_POSN);\n\n show(&f, langinfo::_NL_MONETARY_DUO_INT_P_SIGN_POSN);\n\n show(&f, langinfo::_NL_MONETARY_DUO_INT_N_SIGN_POSN);\n\n show(&f, langinfo::_NL_TIME_WEEK_NDAYS);\n\n show(&f, langinfo::_NL_TIME_WEEK_1STWEEK);\n\n show(&f, langinfo::_NL_TIME_FIRST_WEEKDAY);\n\n show(&f, langinfo::_NL_TIME_FIRST_WORKDAY);\n\n show(&f, langinfo::_NL_TIME_CAL_DIRECTION);\n\n show(&f, langinfo::_NL_MEASUREMENT_MEASUREMENT);\n\n show(&f, langinfo::_NL_CTYPE_WIDTH);\n\n show(&f, langinfo::__MON_GROUPING);\n\n show(&f, langinfo::__GROUPING);\n\n show(&f, langinfo::_NL_COLLATE_NRULES);\n\n show(&f, langinfo::_NL_COLLATE_SYMB_HASH_SIZEMB);\n\n show(&f, langinfo::_NL_CTYPE_MB_CUR_MAX);\n\n show(&f, langinfo::_NL_CTYPE_CLASS_OFFSET);\n\n show(&f, langinfo::_NL_CTYPE_MAP_OFFSET);\n\n show(&f, langinfo::_NL_CTYPE_INDIGITS_MB_LEN);\n", "file_path": "examples/localeinfo.rs", "rank": 56, "score": 2.926103318620224 }, { "content": " _NL_CTYPE_OUTDIGIT4_WC = ffi::_NL_CTYPE_OUTDIGIT4_WC as isize,\n\n _NL_CTYPE_OUTDIGIT5_WC = ffi::_NL_CTYPE_OUTDIGIT5_WC as isize,\n\n _NL_CTYPE_OUTDIGIT6_WC = ffi::_NL_CTYPE_OUTDIGIT6_WC as isize,\n\n _NL_CTYPE_OUTDIGIT7_WC = ffi::_NL_CTYPE_OUTDIGIT7_WC as isize,\n\n _NL_CTYPE_OUTDIGIT8_WC = ffi::_NL_CTYPE_OUTDIGIT8_WC as isize,\n\n _NL_CTYPE_OUTDIGIT9_WC = ffi::_NL_CTYPE_OUTDIGIT9_WC as isize,\n\n // Monetary\n\n _NL_MONETARY_DECIMAL_POINT_WC = ffi::_NL_MONETARY_DECIMAL_POINT_WC as isize,\n\n _NL_MONETARY_THOUSANDS_SEP_WC = ffi::_NL_MONETARY_THOUSANDS_SEP_WC as isize,\n\n // Numeric\n\n _NL_NUMERIC_DECIMAL_POINT_WC = ffi::_NL_NUMERIC_DECIMAL_POINT_WC as isize,\n\n _NL_NUMERIC_THOUSANDS_SEP_WC = ffi::_NL_NUMERIC_THOUSANDS_SEP_WC as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for CharacterItems {\n\n type Type = char;\n\n fn needs_iconv() -> Option<CodesetItems> { None }\n\n unsafe fn decode(&self, ptr: *const ::libc::c_char, _: Option<&IConv>) -> char {\n\n transmute_copy(&ptr)\n\n }\n", "file_path": "src/linux/langinfo.rs", "rank": 57, "score": 2.9135094528992207 }, { "content": " }\n\n fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::CTypeStringListItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum MonetaryStringItems {\n\n INT_CURR_SYMBOL = ffi::__INT_CURR_SYMBOL as isize,\n\n CURRENCY_SYMBOL = ffi::__CURRENCY_SYMBOL as isize,\n\n MON_DECIMAL_POINT = ffi::__MON_DECIMAL_POINT as isize,\n\n MON_THOUSANDS_SEP = ffi::__MON_THOUSANDS_SEP as isize,\n\n POSITIVE_SIGN = ffi::__POSITIVE_SIGN as isize,\n\n NEGATIVE_SIGN = ffi::__NEGATIVE_SIGN as isize,\n\n _NL_MONETARY_CRNCYSTR = ffi::_NL_MONETARY_CRNCYSTR as isize,\n\n _NL_MONETARY_DUO_INT_CURR_SYMBOL = ffi::_NL_MONETARY_DUO_INT_CURR_SYMBOL as isize,\n\n _NL_MONETARY_DUO_CURRENCY_SYMBOL = ffi::_NL_MONETARY_DUO_CURRENCY_SYMBOL as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for MonetaryStringItems {\n", "file_path": "src/linux/langinfo.rs", "rank": 58, "score": 2.849339305557953 }, { "content": " MON_3 = ffi::MON_3 as isize,\n\n MON_4 = ffi::MON_4 as isize,\n\n MON_5 = ffi::MON_5 as isize,\n\n MON_6 = ffi::MON_6 as isize,\n\n MON_7 = ffi::MON_7 as isize,\n\n MON_8 = ffi::MON_8 as isize,\n\n MON_9 = ffi::MON_9 as isize,\n\n MON_10 = ffi::MON_10 as isize,\n\n MON_11 = ffi::MON_11 as isize,\n\n MON_12 = ffi::MON_12 as isize,\n\n AM_STR = ffi::AM_STR as isize,\n\n PM_STR = ffi::PM_STR as isize,\n\n D_T_FMT = ffi::D_T_FMT as isize,\n\n D_FMT = ffi::D_FMT as isize,\n\n T_FMT = ffi::T_FMT as isize,\n\n T_FMT_AMPM = ffi::T_FMT_AMPM as isize,\n\n ERA_YEAR = ffi::__ERA_YEAR as isize,\n\n ERA_D_FMT = ffi::ERA_D_FMT as isize,\n\n ERA_D_T_FMT = ffi::ERA_D_T_FMT as isize,\n\n ERA_T_FMT = ffi::ERA_T_FMT as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 59, "score": 2.7742404124748243 }, { "content": "pub const _NL_MONETARY_UNO_VALID_FROM: ::libc::c_uint = 262182;\n\npub const _NL_MONETARY_UNO_VALID_TO: ::libc::c_uint = 262183;\n\npub const _NL_MONETARY_DUO_VALID_FROM: ::libc::c_uint = 262184;\n\npub const _NL_MONETARY_DUO_VALID_TO: ::libc::c_uint = 262185;\n\npub const _NL_MONETARY_CONVERSION_RATE: ::libc::c_uint = 262186;\n\npub const _NL_MONETARY_DECIMAL_POINT_WC: ::libc::c_uint = 262187;\n\npub const _NL_MONETARY_THOUSANDS_SEP_WC: ::libc::c_uint = 262188;\n\npub const _NL_MONETARY_CODESET: ::libc::c_uint = 262189;\n\npub const _NL_NUM_LC_MONETARY: ::libc::c_uint = 262190;\n\npub const __DECIMAL_POINT: ::libc::c_uint = 65536;\n\npub const RADIXCHAR: ::libc::c_uint = 65536;\n\npub const __THOUSANDS_SEP: ::libc::c_uint = 65537;\n\npub const THOUSEP: ::libc::c_uint = 65537;\n\npub const __GROUPING: ::libc::c_uint = 65538;\n\npub const _NL_NUMERIC_DECIMAL_POINT_WC: ::libc::c_uint = 65539;\n\npub const _NL_NUMERIC_THOUSANDS_SEP_WC: ::libc::c_uint = 65540;\n\npub const _NL_NUMERIC_CODESET: ::libc::c_uint = 65541;\n\npub const _NL_NUM_LC_NUMERIC: ::libc::c_uint = 65542;\n\npub const __YESEXPR: ::libc::c_uint = 327680;\n\npub const __NOEXPR: ::libc::c_uint = 327681;\n", "file_path": "src/linux/ffi.rs", "rank": 60, "score": 2.7662822574268717 }, { "content": " _NL_CTYPE_INDIGITS_MB_LEN = ffi::_NL_CTYPE_INDIGITS_MB_LEN as isize,\n\n _NL_CTYPE_INDIGITS_WC_LEN = ffi::_NL_CTYPE_INDIGITS_WC_LEN as isize,\n\n _NL_CTYPE_TRANSLIT_TAB_SIZE = ffi::_NL_CTYPE_TRANSLIT_TAB_SIZE as isize,\n\n _NL_CTYPE_TRANSLIT_DEFAULT_MISSING_LEN = ffi::_NL_CTYPE_TRANSLIT_DEFAULT_MISSING_LEN as isize,\n\n _NL_CTYPE_TRANSLIT_IGNORE_LEN = ffi::_NL_CTYPE_TRANSLIT_IGNORE_LEN as isize,\n\n _NL_CTYPE_MAP_TO_NONASCII = ffi::_NL_CTYPE_MAP_TO_NONASCII as isize,\n\n _NL_CTYPE_NONASCII_CASE = ffi::_NL_CTYPE_NONASCII_CASE as isize,\n\n // Monetary\n\n _NL_MONETARY_UNO_VALID_FROM = ffi::_NL_MONETARY_UNO_VALID_FROM as isize,\n\n _NL_MONETARY_UNO_VALID_TO = ffi::_NL_MONETARY_UNO_VALID_TO as isize,\n\n _NL_MONETARY_DUO_VALID_FROM = ffi::_NL_MONETARY_DUO_VALID_FROM as isize,\n\n _NL_MONETARY_DUO_VALID_TO = ffi::_NL_MONETARY_DUO_VALID_TO as isize,\n\n // Time\n\n _NL_TIME_ERA_NUM_ENTRIES = ffi::_NL_TIME_ERA_NUM_ENTRIES as isize,\n\n _NL_TIME_WEEK_1STDAY = ffi::_NL_TIME_WEEK_1STDAY as isize,\n\n // Paper\n\n _NL_PAPER_HEIGHT = ffi::_NL_PAPER_HEIGHT as isize,\n\n _NL_PAPER_WIDTH = ffi::_NL_PAPER_WIDTH as isize,\n\n // Address\n\n _NL_ADDRESS_COUNTRY_NUM = ffi::_NL_ADDRESS_COUNTRY_NUM as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 61, "score": 2.655252873413019 }, { "content": " }\n\n}\n\n\n\nimpl LocaleFactory for InvariantLocaleFactory {\n\n // NOTE: Yep, it's empty. This just returns nothing and the Locale constructor will take care\n\n // of the actual defaults.\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub mod linux;\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub use linux::LibCLocaleFactory as SystemLocaleFactory;\n\n\n\n// FIXME: #[cfg(target_os = \"macos\")], but for the moment I need to test whether it compiles, don't\n\n// have MacOS box nor cross-compiler and it does not actually contain anything system-specific yet\n\npub mod macos;\n\n\n\n#[cfg(target_os = \"macos\")]\n\npub use macos::MacOSLocaleFactory as SystemLocaleFactory;\n\n\n\n#[cfg(not(any(target_os = \"linux\", target_os = \"macos\")))]\n\npub use InvariantLocaleFactory as SystemLocaleFactory;\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 2.6498431138991636 }, { "content": " }\n\n }\n\n return String::from_utf8_lossy(cres.to_bytes());\n\n}\n\n\n\nunsafe fn decode_strings<'a>(mut ptr: *const ::libc::c_char, iconv: Option<&IConv>, max: usize) -> Vec<Cow<'a, str>> {\n\n let mut res = Vec::with_capacity(max);\n\n while max > 0 && !ptr.is_null() && *ptr != 0 {\n\n let len = CStr::from_ptr(ptr).to_bytes_with_nul().len();\n\n let s = decode_string(ptr, iconv);\n\n ptr = ptr.offset(len as isize);\n\n res.push(s);\n\n }\n\n return res;\n\n}\n\n\n\nunsafe fn decode_bytes<'a>(ptr: *const ::libc::c_char) -> &'a [i8] {\n\n if ptr.is_null() {\n\n &[]\n\n } else {\n", "file_path": "src/linux/langinfo.rs", "rank": 63, "score": 2.6042687266900604 }, { "content": " fn to_ffi(self) -> ffi::nl_item { self as ffi::nl_item }\n\n}\n\n\n\npub use self::CharacterItems::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum IdentificationStringItems {\n\n _NL_IDENTIFICATION_TITLE = ffi::_NL_IDENTIFICATION_TITLE as isize,\n\n _NL_IDENTIFICATION_SOURCE = ffi::_NL_IDENTIFICATION_SOURCE as isize,\n\n _NL_IDENTIFICATION_ADDRESS = ffi::_NL_IDENTIFICATION_ADDRESS as isize,\n\n _NL_IDENTIFICATION_CONTACT = ffi::_NL_IDENTIFICATION_CONTACT as isize,\n\n _NL_IDENTIFICATION_EMAIL = ffi::_NL_IDENTIFICATION_EMAIL as isize,\n\n _NL_IDENTIFICATION_TEL = ffi::_NL_IDENTIFICATION_TEL as isize,\n\n _NL_IDENTIFICATION_FAX = ffi::_NL_IDENTIFICATION_FAX as isize,\n\n _NL_IDENTIFICATION_LANGUAGE = ffi::_NL_IDENTIFICATION_LANGUAGE as isize,\n\n _NL_IDENTIFICATION_TERRITORY = ffi::_NL_IDENTIFICATION_TERRITORY as isize,\n\n _NL_IDENTIFICATION_AUDIENCE = ffi::_NL_IDENTIFICATION_AUDIENCE as isize,\n\n _NL_IDENTIFICATION_APPLICATION = ffi::_NL_IDENTIFICATION_APPLICATION as isize,\n\n _NL_IDENTIFICATION_ABBREVIATION = ffi::_NL_IDENTIFICATION_ABBREVIATION as isize,\n\n _NL_IDENTIFICATION_REVISION = ffi::_NL_IDENTIFICATION_REVISION as isize,\n", "file_path": "src/linux/langinfo.rs", "rank": 64, "score": 2.5816179179253917 }, { "content": " show(&f, langinfo::ABMON_8);\n\n show(&f, langinfo::ABMON_9);\n\n show(&f, langinfo::ABMON_10);\n\n show(&f, langinfo::ABMON_11);\n\n show(&f, langinfo::ABMON_12);\n\n show(&f, langinfo::MON_1);\n\n show(&f, langinfo::MON_2);\n\n show(&f, langinfo::MON_3);\n\n show(&f, langinfo::MON_4);\n\n show(&f, langinfo::MON_5);\n\n show(&f, langinfo::MON_6);\n\n show(&f, langinfo::MON_7);\n\n show(&f, langinfo::MON_8);\n\n show(&f, langinfo::MON_9);\n\n show(&f, langinfo::MON_10);\n\n show(&f, langinfo::MON_11);\n\n show(&f, langinfo::MON_12);\n\n show(&f, langinfo::AM_STR);\n\n show(&f, langinfo::PM_STR);\n\n show(&f, langinfo::D_T_FMT);\n", "file_path": "examples/localeinfo.rs", "rank": 65, "score": 2.5401717648414794 }, { "content": "\n\n pub fn english() -> Time {\n\n Time {\n\n month_names: vec![\n\n \"Jan\".to_string(), \"Feb\".to_string(), \"Mar\".to_string(),\n\n \"Apr\".to_string(), \"May\".to_string(), \"Jun\".to_string(),\n\n \"Jul\".to_string(), \"Aug\".to_string(), \"Sep\".to_string(),\n\n \"Oct\".to_string(), \"Nov\".to_string(), \"Dec\".to_string(),\n\n ],\n\n long_month_names: vec![\n\n \"January\".to_string(), \"February\".to_string(),\n\n \"March\".to_string(), \"April\".to_string(),\n\n \"May\".to_string(), \"June\".to_string(),\n\n \"July\".to_string(), \"August\".to_string(),\n\n \"September\".to_string(), \"October\".to_string(),\n\n \"November\".to_string(), \"December\".to_string(),\n\n ],\n\n day_names: vec![\n\n \"Sun\".to_string(),\n\n \"Mon\".to_string(), \"Tue\".to_string(), \"Wed\".to_string(),\n", "file_path": "src/lib.rs", "rank": 66, "score": 2.511614266602068 }, { "content": " /// moment).\n\n /// 2. Number of bytes processed from `src`.\n\n /// 3. Number of bytes written to `dst`.\n\n ///\n\n /// The C interface returns the remaining buffers instead, but that is actually hard to work\n\n /// with in Rust.\n\n pub fn convert(&self, src: &[u8], dst: &mut [u8]) -> (isize, usize, usize) {\n\n let mut inptr: *const ::libc::c_char = src.as_ptr() as *const ::libc::c_char;\n\n let mut insize: ::libc::size_t = src.len() as ::libc::size_t;\n\n let mut outptr: *mut ::libc::c_char = dst.as_ptr() as *mut ::libc::c_char;\n\n let mut outsize: ::libc::size_t = dst.len() as ::libc::size_t;\n\n // XXX: Do we need error handling? We don't expect errors and can't do much about them here.\n\n let res = unsafe {\n\n ffi::iconv(self.iconv,\n\n &mut inptr, &mut insize,\n\n &mut outptr, &mut outsize)\n\n };\n\n (res as isize, src.len() - (insize as usize), dst.len() - (outsize as usize))\n\n }\n\n}\n", "file_path": "src/linux/mod.rs", "rank": 67, "score": 2.385383241100333 }, { "content": " show(&f, langinfo::D_FMT);\n\n show(&f, langinfo::T_FMT);\n\n show(&f, langinfo::T_FMT_AMPM);\n\n show(&f, langinfo::ERA_YEAR);\n\n show(&f, langinfo::ERA_D_FMT);\n\n show(&f, langinfo::ERA_D_T_FMT);\n\n show(&f, langinfo::ERA_T_FMT);\n\n show(&f, langinfo::_NL_TIME_ERA_ENTRIES);\n\n show(&f, langinfo::_NL_TIME_TIMEZONE);\n\n show(&f, langinfo::_DATE_FMT);\n\n show(&f, langinfo::ERA);\n\n show(&f, langinfo::ALT_DIGITS);\n\n show(&f, langinfo::YESEXPR);\n\n show(&f, langinfo::NOEXPR);\n\n show(&f, langinfo::YESSTR);\n\n show(&f, langinfo::NOSTR);\n\n show(&f, langinfo::_NL_NAME_NAME_FMT);\n\n show(&f, langinfo::_NL_NAME_NAME_GEN);\n\n show(&f, langinfo::_NL_NAME_NAME_MR);\n\n show(&f, langinfo::_NL_NAME_NAME_MRS);\n", "file_path": "examples/localeinfo.rs", "rank": 68, "score": 2.283434571885022 }, { "content": "\n\n/// Trait defining how to obtain various components of a locale.\n\n///\n\n/// Use implementation of this trait to construct parts of the `Locale` object.\n\n///\n\n/// There may be various methods for obtaining locale data. The lowest common denominator is\n\n/// standard C library. It is however quite limited and some systems (notably Android) don't\n\n/// actually contain the corresponding data. Many systems also provide additional configurability\n\n/// for the locale setting (Windows, KDE, etc.) that are only accessible via that system's specific\n\n/// interface. So this trait exists to allow combining the methods for obtaining the data.\n\n///\n\n/// The implementations for individual locale categories are returned boxed, because they may need\n\n/// to be polymorphic _and_ in options to allow combining partial implementations. Creating locale\n\n/// data is not a performance critical operation, so dynamic polymrphism is used for sake of\n\n/// simplicity.\n\n///\n\n/// All methods default to simply returning None, again so partial implementations that delegate to\n\n/// another factory are possible. See `CompositeLocaleFactory`.\n", "file_path": "src/lib.rs", "rank": 69, "score": 2.199954534762501 }, { "content": " }\n\n\n\n pub fn format_int<I: Display>(&self, input: I) -> String {\n\n let s = input.to_string();\n\n let mut buf = String::new();\n\n\n\n for (i, c) in s.chars().enumerate() {\n\n buf.push(c);\n\n if (s.len() - i - 1) % 3 == 0 && i != s.len() - 1 {\n\n buf.push_str(&self.thousands_sep[..]);\n\n }\n\n }\n\n\n\n buf\n\n }\n\n\n\n pub fn format_float<F: Display>(&self, input: F, decimal_places: usize) -> String {\n\n format!(\"{:.*}\", decimal_places, input).replace(\".\", &self.decimal_sep)\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 70, "score": 2.1285940862308252 }, { "content": "pub const _NL_WMON_1: ::libc::c_uint = 131150;\n\npub const _NL_WMON_2: ::libc::c_uint = 131151;\n\npub const _NL_WMON_3: ::libc::c_uint = 131152;\n\npub const _NL_WMON_4: ::libc::c_uint = 131153;\n\npub const _NL_WMON_5: ::libc::c_uint = 131154;\n\npub const _NL_WMON_6: ::libc::c_uint = 131155;\n\npub const _NL_WMON_7: ::libc::c_uint = 131156;\n\npub const _NL_WMON_8: ::libc::c_uint = 131157;\n\npub const _NL_WMON_9: ::libc::c_uint = 131158;\n\npub const _NL_WMON_10: ::libc::c_uint = 131159;\n\npub const _NL_WMON_11: ::libc::c_uint = 131160;\n\npub const _NL_WMON_12: ::libc::c_uint = 131161;\n\npub const _NL_WAM_STR: ::libc::c_uint = 131162;\n\npub const _NL_WPM_STR: ::libc::c_uint = 131163;\n\npub const _NL_WD_T_FMT: ::libc::c_uint = 131164;\n\npub const _NL_WD_FMT: ::libc::c_uint = 131165;\n\npub const _NL_WT_FMT: ::libc::c_uint = 131166;\n\npub const _NL_WT_FMT_AMPM: ::libc::c_uint = 131167;\n\npub const _NL_WERA_YEAR: ::libc::c_uint = 131168;\n\npub const _NL_WERA_D_FMT: ::libc::c_uint = 131169;\n", "file_path": "src/linux/ffi.rs", "rank": 71, "score": 2.054693375191361 }, { "content": "\n\nimpl Drop for IConv {\n\n fn drop(&mut self) {\n\n if !self.iconv.is_null() {\n\n unsafe { ffi::iconv_close(self.iconv); }\n\n }\n\n }\n\n}\n\n\n\n// FIXME FIXME FIXME #[derive(Clone)]\n\n#[derive(Debug)]\n\npub struct LibCLocaleFactory {\n\n locale: Arc<CLocale>,\n\n iconv: [Option<Arc<IConv>>; 12],\n\n}\n\n\n\nimpl LibCLocaleFactory {\n\n fn codeset_index(item: langinfo::CodesetItems) -> usize {\n\n match item {\n\n langinfo::_NL_COLLATE_CODESET => 0,\n", "file_path": "src/linux/mod.rs", "rank": 72, "score": 2.029809170779191 }, { "content": " show(&f, langinfo::_NL_CTYPE_INDIGITS_WC_LEN);\n\n show(&f, langinfo::_NL_CTYPE_TRANSLIT_TAB_SIZE);\n\n show(&f, langinfo::_NL_CTYPE_TRANSLIT_DEFAULT_MISSING_LEN);\n\n show(&f, langinfo::_NL_CTYPE_TRANSLIT_IGNORE_LEN);\n\n show(&f, langinfo::_NL_CTYPE_MAP_TO_NONASCII);\n\n show(&f, langinfo::_NL_CTYPE_NONASCII_CASE);\n\n show(&f, langinfo::_NL_MONETARY_UNO_VALID_FROM);\n\n show(&f, langinfo::_NL_MONETARY_UNO_VALID_TO);\n\n show(&f, langinfo::_NL_MONETARY_DUO_VALID_FROM);\n\n show(&f, langinfo::_NL_MONETARY_DUO_VALID_TO);\n\n show(&f, langinfo::_NL_TIME_ERA_NUM_ENTRIES);\n\n show(&f, langinfo::_NL_TIME_WEEK_1STDAY);\n\n show(&f, langinfo::_NL_PAPER_HEIGHT);\n\n show(&f, langinfo::_NL_PAPER_WIDTH);\n\n show(&f, langinfo::_NL_ADDRESS_COUNTRY_NUM);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT0_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT1_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT2_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT3_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT4_WC);\n", "file_path": "examples/localeinfo.rs", "rank": 73, "score": 1.9981681062924266 }, { "content": " show(&f, langinfo::_NL_CTYPE_OUTDIGIT5_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT6_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT7_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT8_WC);\n\n show(&f, langinfo::_NL_CTYPE_OUTDIGIT9_WC);\n\n show(&f, langinfo::_NL_MONETARY_DECIMAL_POINT_WC);\n\n show(&f, langinfo::_NL_MONETARY_THOUSANDS_SEP_WC);\n\n show(&f, langinfo::_NL_NUMERIC_DECIMAL_POINT_WC);\n\n show(&f, langinfo::_NL_NUMERIC_THOUSANDS_SEP_WC);\n\n show(&f, langinfo::_NL_IDENTIFICATION_TITLE);\n\n show(&f, langinfo::_NL_IDENTIFICATION_SOURCE);\n\n show(&f, langinfo::_NL_IDENTIFICATION_ADDRESS);\n\n show(&f, langinfo::_NL_IDENTIFICATION_CONTACT);\n\n show(&f, langinfo::_NL_IDENTIFICATION_EMAIL);\n\n show(&f, langinfo::_NL_IDENTIFICATION_TEL);\n\n show(&f, langinfo::_NL_IDENTIFICATION_FAX);\n\n show(&f, langinfo::_NL_IDENTIFICATION_LANGUAGE);\n\n show(&f, langinfo::_NL_IDENTIFICATION_TERRITORY);\n\n show(&f, langinfo::_NL_IDENTIFICATION_AUDIENCE);\n\n show(&f, langinfo::_NL_IDENTIFICATION_APPLICATION);\n\n show(&f, langinfo::_NL_IDENTIFICATION_ABBREVIATION);\n\n show(&f, langinfo::_NL_IDENTIFICATION_REVISION);\n\n show(&f, langinfo::_NL_IDENTIFICATION_DATE);\n\n show(&f, langinfo::_NL_IDENTIFICATION_CATEGORY);\n\n}\n\n\n", "file_path": "examples/localeinfo.rs", "rank": 74, "score": 1.9496132197783402 }, { "content": " return Some(\n\n Box::new(\n\n Time {\n\n month_names: vec![\n\n self.langinfo(langinfo::ABMON_1).into_owned(),\n\n self.langinfo(langinfo::ABMON_2).into_owned(),\n\n self.langinfo(langinfo::ABMON_3).into_owned(),\n\n self.langinfo(langinfo::ABMON_4).into_owned(),\n\n self.langinfo(langinfo::ABMON_5).into_owned(),\n\n self.langinfo(langinfo::ABMON_6).into_owned(),\n\n self.langinfo(langinfo::ABMON_7).into_owned(),\n\n self.langinfo(langinfo::ABMON_8).into_owned(),\n\n self.langinfo(langinfo::ABMON_9).into_owned(),\n\n self.langinfo(langinfo::ABMON_10).into_owned(),\n\n self.langinfo(langinfo::ABMON_11).into_owned(),\n\n self.langinfo(langinfo::ABMON_12).into_owned(),\n\n ],\n\n long_month_names: vec![\n\n self.langinfo(langinfo::MON_1).into_owned(),\n\n self.langinfo(langinfo::MON_2).into_owned(),\n", "file_path": "src/linux/mod.rs", "rank": 75, "score": 1.8173579689024835 }, { "content": "[![TravisCI Build Status](https://travis-ci.org/rust-locale/rust-locale.svg?branch=master)](https://travis-ci.org/rust-locale/rust-locale)\n\n[![AppVeyor Build Status](https://ci.appveyor.com/api/projects/status/xb23rxc48wrmwq6q/branch/master?svg=true)](https://ci.appveyor.com/project/jan-hudec/rust-locale/branch/master)\n\n[![Crates.io Version](https://img.shields.io/crates/v/locale.svg)](https://crates.io/crates/locale)\n\n[![Docs.rs](https://docs.rs/locale/badge.svg)](https://docs.rs/locale/)\n\n\n\n# `rust-locale`\n\n\n\n**WORK IN PROGRESS**\n\n\n\nWill implement basic localization support.\n\n\n\n**Warning**: This version is mostly useless. Major rewrite is pending for version 0.3.\n\n\n\n## Documentation\n\n\n\nOn [![Docs.rs](https://docs.rs/locale/badge.svg)](https://docs.rs/locale/) or [github](https://rust-locale.github.io/rust-locale/locale/).\n\n\n\n## Installation\n\n\n\nIt uses [Cargo](http://crates.io/), Rust's package manager. You can\n\ndepend on this library by adding `locale` to your Cargo dependencies:\n\n\n\n```toml\n\n[dependencies]\n\nlocale = \"0.2\"\n\n```\n\n\n\nOr, to use the Git repo directly:\n\n\n\n```toml\n\n[dependencies.locale]\n\ngit = \"https://github.com/rust-locale/rust-locale.git\"\n\n```\n", "file_path": "README.md", "rank": 76, "score": 1.784370807337273 }, { "content": "pub type nl_item = ::libc::c_uint;\n\npub type Enum_Unnamed1 = ::libc::c_uint;\n\npub const ABDAY_1: ::libc::c_uint = 131072;\n\npub const ABDAY_2: ::libc::c_uint = 131073;\n\npub const ABDAY_3: ::libc::c_uint = 131074;\n\npub const ABDAY_4: ::libc::c_uint = 131075;\n\npub const ABDAY_5: ::libc::c_uint = 131076;\n\npub const ABDAY_6: ::libc::c_uint = 131077;\n\npub const ABDAY_7: ::libc::c_uint = 131078;\n\npub const DAY_1: ::libc::c_uint = 131079;\n\npub const DAY_2: ::libc::c_uint = 131080;\n\npub const DAY_3: ::libc::c_uint = 131081;\n\npub const DAY_4: ::libc::c_uint = 131082;\n\npub const DAY_5: ::libc::c_uint = 131083;\n\npub const DAY_6: ::libc::c_uint = 131084;\n\npub const DAY_7: ::libc::c_uint = 131085;\n\npub const ABMON_1: ::libc::c_uint = 131086;\n\npub const ABMON_2: ::libc::c_uint = 131087;\n\npub const ABMON_3: ::libc::c_uint = 131088;\n\npub const ABMON_4: ::libc::c_uint = 131089;\n", "file_path": "src/linux/ffi.rs", "rank": 77, "score": 1.7456346495040025 }, { "content": "}\n\nimpl ::std::default::Default for Struct_lconv {\n\n fn default() -> Struct_lconv { unsafe { ::std::mem::zeroed() } }\n\n}\n\npub enum Struct___locale_data { }\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct Struct___locale_struct {\n\n pub __locales: [*mut Struct___locale_data; 13usize],\n\n pub __ctype_b: *const ::libc::c_ushort,\n\n pub __ctype_tolower: *const ::libc::c_int,\n\n pub __ctype_toupper: *const ::libc::c_int,\n\n pub __names: [*const ::libc::c_char; 13usize],\n\n}\n\nimpl ::std::default::Default for Struct___locale_struct {\n\n fn default() -> Struct___locale_struct { unsafe { ::std::mem::zeroed() } }\n\n}\n\npub type __locale_t = *mut Struct___locale_struct;\n\npub type locale_t = __locale_t;\n\npub type nl_catd = *mut ::libc::c_void;\n", "file_path": "src/linux/ffi.rs", "rank": 78, "score": 1.6269084293030223 }, { "content": " _NL_CTYPE_INDIGITS5_MB = ffi::_NL_CTYPE_INDIGITS5_MB as isize,\n\n _NL_CTYPE_INDIGITS6_MB = ffi::_NL_CTYPE_INDIGITS6_MB as isize,\n\n _NL_CTYPE_INDIGITS7_MB = ffi::_NL_CTYPE_INDIGITS7_MB as isize,\n\n _NL_CTYPE_INDIGITS8_MB = ffi::_NL_CTYPE_INDIGITS8_MB as isize,\n\n _NL_CTYPE_INDIGITS9_MB = ffi::_NL_CTYPE_INDIGITS9_MB as isize,\n\n _NL_CTYPE_OUTDIGIT0_MB = ffi::_NL_CTYPE_OUTDIGIT0_MB as isize,\n\n _NL_CTYPE_OUTDIGIT1_MB = ffi::_NL_CTYPE_OUTDIGIT1_MB as isize,\n\n _NL_CTYPE_OUTDIGIT2_MB = ffi::_NL_CTYPE_OUTDIGIT2_MB as isize,\n\n _NL_CTYPE_OUTDIGIT3_MB = ffi::_NL_CTYPE_OUTDIGIT3_MB as isize,\n\n _NL_CTYPE_OUTDIGIT4_MB = ffi::_NL_CTYPE_OUTDIGIT4_MB as isize,\n\n _NL_CTYPE_OUTDIGIT5_MB = ffi::_NL_CTYPE_OUTDIGIT5_MB as isize,\n\n _NL_CTYPE_OUTDIGIT6_MB = ffi::_NL_CTYPE_OUTDIGIT6_MB as isize,\n\n _NL_CTYPE_OUTDIGIT7_MB = ffi::_NL_CTYPE_OUTDIGIT7_MB as isize,\n\n _NL_CTYPE_OUTDIGIT8_MB = ffi::_NL_CTYPE_OUTDIGIT8_MB as isize,\n\n _NL_CTYPE_OUTDIGIT9_MB = ffi::_NL_CTYPE_OUTDIGIT9_MB as isize,\n\n _NL_CTYPE_TRANSLIT_IGNORE = ffi::_NL_CTYPE_TRANSLIT_IGNORE as isize,\n\n}\n\n\n\nimpl<'a> LanginfoItem<'a> for CTypeStringItems {\n\n type Type = Cow<'a, str>;\n", "file_path": "src/linux/langinfo.rs", "rank": 79, "score": 1.0353237634088184 } ]
Rust
src/shape.rs
magnusstrale/raytracer
58ea8e85380de87a9abffa5e376dd01fc4c2901c
use std::any::Any; use std::fmt; use super::tuple::Tuple; use super::ray::Ray; use super::intersection::Intersections; use super::material::Material; use super::matrix::{Matrix, IDENTITY_MATRIX}; pub trait Shape: Any + fmt::Debug { fn box_clone(&self) -> BoxShape; fn box_eq(&self, other: &dyn Any) -> bool; fn as_any(&self) -> &dyn Any; fn inner_intersect(&self, object_ray: Ray) -> Intersections; fn inner_normal_at(&self, object_point: Tuple) -> Tuple; fn material(&self) -> &Material; fn transformation(&self) -> Matrix; fn inverse_transformation(&self) -> Matrix; fn intersect(&self, world_ray: Ray) -> Intersections { self.inner_intersect(world_ray.transform(self.inverse_transformation())) } fn normal_at(&self, world_point: Tuple) -> Tuple { let object_normal = self.inner_normal_at(self.inverse_transformation() * world_point); let mut world_normal = self.inverse_transformation().transpose() * object_normal; world_normal.w = 0.; world_normal.normalize() } } pub type BoxShape = Box<dyn Shape>; pub fn inverse_transform_parameter(transform: Option<Matrix>) -> Matrix { match transform { None => IDENTITY_MATRIX, Some(t) => t.inverse().unwrap() } } impl Clone for BoxShape { fn clone(&self) -> Self { self.box_clone() } } impl PartialEq for BoxShape { fn eq(&self, other: &BoxShape) -> bool { self.box_eq(other.as_any()) } } #[cfg(test)] mod tests { use super::*; use std::f64::consts::{PI, SQRT_2}; use crate::color::GREEN; use crate::tuple::{ORIGO, VECTOR_Y_UP}; use crate::material::DEFAULT_MATERIAL; static mut SAVED_RAY: Ray = Ray { origin: ORIGO, direction: VECTOR_Y_UP }; #[derive(Clone, Debug, PartialEq)] struct TestShape { material: Material, inverse_transform: Matrix, transform: Matrix } impl Shape for TestShape { fn as_any(&self) -> &dyn Any { self } fn box_eq(&self, other: &dyn Any) -> bool { other.downcast_ref::<Self>().map_or(false, |a| self == a) } fn box_clone(&self) -> BoxShape { Box::new((*self).clone()) } fn inner_intersect(&self, object_ray: Ray) -> Intersections { unsafe { SAVED_RAY = object_ray; } Intersections::new(vec![]) } fn inner_normal_at(&self, object_point: Tuple) -> Tuple { Tuple::vector(object_point.x, object_point.y, object_point.z) } fn material(&self) -> &Material { &self.material } fn transformation(&self) -> Matrix { self.transform } fn inverse_transformation(&self) -> Matrix { self.inverse_transform } } impl TestShape { fn new(material: Option<Material>, transform: Option<Matrix>) -> Self { Self { material: material.unwrap_or_default(), transform: transform.unwrap_or_default(), inverse_transform: inverse_transform_parameter(transform) } } } #[test] fn default_transformation() { let s = TestShape::new(None, None); assert_eq!(s.transformation(), IDENTITY_MATRIX); } #[test] fn assign_transformation() { let tr = Matrix::translation(2., 3., 4.); let s = TestShape::new(None, Some(tr)); assert_eq!(s.transformation(), tr); } #[test] fn default_material() { let s = TestShape::new(None, None); let m = s.material(); assert_eq!(*m, DEFAULT_MATERIAL); } #[test] fn assign_material() { let m = Material::new(GREEN, 0.1, 0.2, 0.3, 0.4, None); let s = TestShape::new(Some(m.clone()), None); assert_eq!(*s.material(), m); } #[test] fn intersect_scaled_shape_with_ray() { let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.)); let tr = Matrix::scaling(2., 2., 2.); let s = TestShape::new(None, Some(tr)); s.intersect(r); unsafe { assert_eq!(SAVED_RAY.origin, Tuple::point(0., 0., -2.5)); assert_eq!(SAVED_RAY.direction, Tuple::vector(0., 0., 0.5)); } } #[test] fn intersect_translated_shape_with_ray() { let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.)); let tr = Matrix::translation(5., 0., 0.); let s = TestShape::new(None, Some(tr)); s.intersect(r); unsafe { assert_eq!(SAVED_RAY.origin, Tuple::point(-5., 0., -5.)); assert_eq!(SAVED_RAY.direction, Tuple::vector(0., 0., 1.)); } } #[test] fn compute_normal_on_translated_shape() { let tr = Matrix::translation(0., 1., 0.); let s = TestShape::new(None, Some(tr)); let n = s.normal_at(Tuple::point(0., 1.70711, -0.70711)); assert_eq!(n, Tuple::vector(0., 0.70711, -0.70711)); } #[test] fn compute_normal_on_transformed_shape() { let tr = Matrix::scaling(1., 0.5, 1.) * Matrix::rotation_z(PI / 5.); let s = TestShape::new(None, Some(tr)); let n = s.normal_at(Tuple::point(0., SQRT_2 / 2., -SQRT_2 / 2.)); assert_eq!(n, Tuple::vector(0., 0.97014, -0.24254)); } }
use std::any::Any; use std::fmt; use super::tuple::Tuple; use super::ray::Ray; use super::intersection::Intersections; use super::material::Material; use super::matrix::{Matrix, IDENTITY_MATRIX}; pub trait Shape: Any + fmt::Debug { fn box_clone(&self) -> BoxShape; fn box_eq(&self, other: &dyn Any) -> bool; fn as_any(&self) -> &dyn Any; fn inner_intersect(&self, object_ray: Ray) -> Intersections; fn inner_normal_at(&self, object_point: Tuple) -> Tuple; fn material(&self) -> &Material; fn transformation(&self) -> Matrix; fn inverse_transformation(&self) -> Matrix; fn intersect(&self, world_ray: Ray) -> Intersections { self.inner_intersect(world_ray.transform(self.inverse_transformation())) } fn normal_at(&self, world_point: Tuple) -> Tuple { let object_normal = self.inner_normal_at(self.inverse_transformation() * world_point); let mut world_normal = self.inverse_transformation().transpose() * object_normal; world_normal.w = 0.; world_normal.normalize() } } pub type BoxShape = Box<dyn Shape>; pub fn inverse_transform_parameter(transform: Option<Matrix>) -> Matrix { match transform { None => IDENTITY_MATRIX, Some(t) => t.inverse().unwrap() } } impl Clone for BoxShape { fn clone(&self) -> Self { self.box_clone() } } impl PartialEq for BoxShape { fn eq(&self, other: &BoxShape) -> bool { self.box_eq(other.as_any()) } } #[cfg(test)] mod tests { use super::*; use std::f64::consts::{PI, SQRT_2}; use crate::color::GREEN; use crate::tuple::{ORIGO, VECTOR_Y_UP}; use crate::material::DEFAULT_MATERIAL; static mut SAVED_RAY: Ray = Ray { origin: ORIGO, direction: VECTOR_Y_UP }; #[derive(Clone, Debug, PartialEq)] struct TestShape { material: Material, inverse_transform: Matrix, transform: Matrix } impl Shape for TestShape { fn as_any(&self) -> &dyn Any { self } fn box_eq(&self, other: &dyn Any) -> bool { other.downcast_ref::<Self>().map_or(false, |a| self == a) } fn box_clone(&self) -> BoxShape { Box::new((*self).clone()) } fn inner_intersect(&self, object_ray: Ray) -> Intersections { unsafe { SAVED_RAY = object_ray; } Intersections::new(vec![]) } fn inner_normal_at(&self, object_point: Tuple) -> Tuple { Tuple::vector(object_point.x, object_point.y, object_point.z) } fn material(&self) -> &Material { &self.material } fn transformation(&self) -> Matrix { self.transform } fn inverse_transf
) { let s = TestShape::new(None, None); assert_eq!(s.transformation(), IDENTITY_MATRIX); } #[test] fn assign_transformation() { let tr = Matrix::translation(2., 3., 4.); let s = TestShape::new(None, Some(tr)); assert_eq!(s.transformation(), tr); } #[test] fn default_material() { let s = TestShape::new(None, None); let m = s.material(); assert_eq!(*m, DEFAULT_MATERIAL); } #[test] fn assign_material() { let m = Material::new(GREEN, 0.1, 0.2, 0.3, 0.4, None); let s = TestShape::new(Some(m.clone()), None); assert_eq!(*s.material(), m); } #[test] fn intersect_scaled_shape_with_ray() { let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.)); let tr = Matrix::scaling(2., 2., 2.); let s = TestShape::new(None, Some(tr)); s.intersect(r); unsafe { assert_eq!(SAVED_RAY.origin, Tuple::point(0., 0., -2.5)); assert_eq!(SAVED_RAY.direction, Tuple::vector(0., 0., 0.5)); } } #[test] fn intersect_translated_shape_with_ray() { let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.)); let tr = Matrix::translation(5., 0., 0.); let s = TestShape::new(None, Some(tr)); s.intersect(r); unsafe { assert_eq!(SAVED_RAY.origin, Tuple::point(-5., 0., -5.)); assert_eq!(SAVED_RAY.direction, Tuple::vector(0., 0., 1.)); } } #[test] fn compute_normal_on_translated_shape() { let tr = Matrix::translation(0., 1., 0.); let s = TestShape::new(None, Some(tr)); let n = s.normal_at(Tuple::point(0., 1.70711, -0.70711)); assert_eq!(n, Tuple::vector(0., 0.70711, -0.70711)); } #[test] fn compute_normal_on_transformed_shape() { let tr = Matrix::scaling(1., 0.5, 1.) * Matrix::rotation_z(PI / 5.); let s = TestShape::new(None, Some(tr)); let n = s.normal_at(Tuple::point(0., SQRT_2 / 2., -SQRT_2 / 2.)); assert_eq!(n, Tuple::vector(0., 0.97014, -0.24254)); } }
ormation(&self) -> Matrix { self.inverse_transform } } impl TestShape { fn new(material: Option<Material>, transform: Option<Matrix>) -> Self { Self { material: material.unwrap_or_default(), transform: transform.unwrap_or_default(), inverse_transform: inverse_transform_parameter(transform) } } } #[test] fn default_transformation(
random
[ { "content": "pub trait Pattern: Any + fmt::Debug {\n\n fn box_clone(&self) -> BoxPattern;\n\n fn box_eq(&self, other: &dyn Any) -> bool;\n\n fn as_any(&self) -> &dyn Any;\n\n fn transformation(&self) -> Matrix;\n\n fn inverse_transformation(&self) -> Matrix;\n\n fn inner_pattern_at(&self, pattern_point: Tuple) -> Color;\n\n fn pattern_at_shape(&self, object: &dyn Shape, world_point: Tuple) -> Color {\n\n let object_point = object.inverse_transformation() * world_point;\n\n let pattern_point = self.inverse_transformation() * object_point;\n\n self.inner_pattern_at(pattern_point)\n\n }\n\n}\n\n\n\npub type BoxPattern = Box<dyn Pattern>;\n\n\n\nimpl Clone for BoxPattern {\n\n fn clone(&self) -> Self {\n\n self.box_clone()\n\n }\n", "file_path": "src/pattern.rs", "rank": 2, "score": 93739.83330958692 }, { "content": "pub fn approx_eq(a: f64, b: f64) -> bool {\n\n (a - b).abs() < EPSILON\n\n}", "file_path": "src/lib.rs", "rank": 3, "score": 80893.48875268504 }, { "content": "fn main()\n\n{\n\n canvas_to_file(\"black.png\");\n\n circle_shadow(\"shadow.png\");\n\n rendered_sphere(\"sphere.png\");\n\n camera_render_world(\"three_spheres.png\");\n\n}\n", "file_path": "src/main.rs", "rank": 4, "score": 34986.738521881496 }, { "content": "fn canvas_to_file(filename: &str)\n\n{\n\n let mut c = Canvas::new(100, 100);\n\n c.write_pixel(1, 1, RED);\n\n c.write_pixel(99, 0, GREEN);\n\n c.write_pixel(99, 99, WHITE);\n\n c.save(filename).unwrap();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 29088.284435262012 }, { "content": "fn circle_shadow(filename: &str)\n\n{\n\n const CANVAS_PIXELS: usize = 1000;\n\n const WALL_SIZE:usize = 7;\n\n let ray_origin = Tuple::point(0., 0., -5.);\n\n let wall_z = 10.0;\n\n let pixel_size = WALL_SIZE as f64 / CANVAS_PIXELS as f64;\n\n let half = WALL_SIZE as f64 / 2.0;\n\n\n\n let mut canvas = Canvas::new(CANVAS_PIXELS, CANVAS_PIXELS);\n\n let color = Color::new(1., 0., 0.);\n\n let tr = Matrix::shearing(1., 0., 0.5, 0., 0., 0.) * Matrix::scaling(1., 0.5, 1.);\n\n let shape = Sphere::new(None, Some(tr));\n\n for y in 0..CANVAS_PIXELS {\n\n let world_y = half - pixel_size * (y as f64);\n\n for x in 0..CANVAS_PIXELS {\n\n let world_x = -half + pixel_size * (x as f64);\n\n let position = Tuple::point(world_x, world_y, wall_z);\n\n let r = Ray::new(ray_origin, (position - ray_origin).normalize());\n\n let xs = shape.intersect(r);\n\n match xs.hit() {\n\n Some(_i) => canvas.write_pixel(x, y, color),\n\n None => ()\n\n }\n\n }\n\n }\n\n canvas.save(filename).unwrap();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 29088.284435262012 }, { "content": "fn rendered_sphere(filename: &str)\n\n{\n\n const CANVAS_PIXELS: usize = 1000;\n\n const WALL_SIZE:usize = 7;\n\n let ray_origin = Tuple::point(0., 0., -5.);\n\n let wall_z = 10.0;\n\n let pixel_size = WALL_SIZE as f64 / CANVAS_PIXELS as f64;\n\n let half = WALL_SIZE as f64 / 2.0;\n\n\n\n let mut canvas = Canvas::new(CANVAS_PIXELS, CANVAS_PIXELS);\n\n let mut m = Material::default();\n\n m.color = Color::new(1., 0.2, 1.);\n\n m.pattern = Some(StripePattern::new_boxed(GREEN, RED, Some(Matrix::scaling(0.1, 0.1, 0.1))));\n\n let tr = Matrix::shearing(1., 0., 0.5, 0., 0., 0.) * Matrix::scaling(1., 0.5, 1.);\n\n let shape = Sphere::new(Some(m), Some(tr));\n\n let light_position = Tuple::point(-10., 10., -10.);\n\n let light_color = WHITE;\n\n let light = PointLight::new(light_position, light_color);\n\n\n\n for y in 0..CANVAS_PIXELS {\n", "file_path": "src/main.rs", "rank": 7, "score": 29088.284435262012 }, { "content": "use super::matrix::Matrix;\n\nuse super::tuple::Tuple;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Ray {\n\n pub origin: Tuple,\n\n pub direction: Tuple\n\n}\n\n\n\nimpl Ray {\n\n pub fn new(origin: Tuple, direction: Tuple) -> Self {\n\n if !origin.is_point() { panic!(\"origin should be a point\"); }\n\n if !direction.is_vector() { panic!(\"direction should be a vector\"); }\n\n Ray { origin, direction }\n\n }\n\n\n\n pub fn position(&self, t: f64) -> Tuple {\n\n self.origin + self.direction * t\n\n }\n\n\n", "file_path": "src/ray.rs", "rank": 8, "score": 28513.963217334847 }, { "content": " pub fn transform(&self, m: Matrix) -> Ray {\n\n Ray::new(m * self.origin, m * self.direction)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn creating_querying_ray()\n\n {\n\n let origin = Tuple::point(1., 2., 3.);\n\n let direction = Tuple::vector(4., 5., 6.);\n\n let r = Ray::new(origin, direction);\n\n\n\n assert_eq!(r.origin, origin);\n\n assert_eq!(r.direction, direction);\n\n }\n\n\n", "file_path": "src/ray.rs", "rank": 9, "score": 28510.390042607894 }, { "content": " #[should_panic]\n\n #[test]\n\n fn creating_ray_invalid_origin()\n\n {\n\n let origin = Tuple::vector(1., 2., 3.);\n\n let direction = Tuple::vector(4., 5., 6.);\n\n Ray::new(origin, direction);\n\n }\n\n\n\n #[should_panic]\n\n #[test]\n\n fn creating_ray_invalid_direction()\n\n {\n\n let origin = Tuple::point(1., 2., 3.);\n\n let direction = Tuple::point(4., 5., 6.);\n\n Ray::new(origin, direction);\n\n }\n\n\n\n #[test]\n\n fn computing_point_from_distance()\n", "file_path": "src/ray.rs", "rank": 10, "score": 28499.17947843302 }, { "content": " {\n\n let r = Ray::new(Tuple::point(2., 3., 4.), Tuple::vector(1., 0., 0.));\n\n assert_eq!(r.position(0.), Tuple::point(2., 3., 4.));\n\n assert_eq!(r.position(1.), Tuple::point(3., 3., 4.));\n\n assert_eq!(r.position(-1.), Tuple::point(1., 3., 4.));\n\n assert_eq!(r.position(2.5), Tuple::point(4.5, 3., 4.));\n\n }\n\n\n\n #[test]\n\n fn translating_ray() {\n\n let r = Ray::new(Tuple::point(1., 2., 3.), Tuple::vector(0., 1., 0.));\n\n let m = Matrix::translation(3., 4., 5.);\n\n let r2 = r.transform(m);\n\n\n\n assert_eq!(r2.origin, Tuple::point(4., 6., 8.));\n\n assert_eq!(r2.direction, Tuple::vector(0., 1., 0.));\n\n }\n\n\n\n #[test]\n\n fn scaling_ray() {\n\n let r = Ray::new(Tuple::point(1., 2., 3.), Tuple::vector(0., 1., 0.));\n\n let m = Matrix::scaling(2., 3., 4.);\n\n let r2 = r.transform(m);\n\n\n\n assert_eq!(r2.origin, Tuple::point(2., 6., 12.));\n\n assert_eq!(r2.direction, Tuple::vector(0., 3., 0.));\n\n }\n\n}\n", "file_path": "src/ray.rs", "rank": 11, "score": 28498.48311783224 }, { "content": "use super::color::{Color, BLACK, WHITE};\n\nuse super::tuple::Tuple;\n\nuse super::light::PointLight;\n\nuse super::pattern::BoxPattern;\n\nuse super::shape::Shape;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Material {\n\n pub color: Color,\n\n pub ambient: f64,\n\n pub diffuse: f64,\n\n pub specular: f64,\n\n pub shininess: f64,\n\n pub pattern: Option<BoxPattern>\n\n}\n\n\n\npub const DEFAULT_AMBIENT: f64 = 0.1;\n\npub const DEFAULT_DIFFUSE: f64 = 0.9;\n\npub const DEFAULT_SPECULAR: f64 = 0.9;\n\npub const DEFAULT_SHININESS: f64 = 200.0;\n", "file_path": "src/material.rs", "rank": 12, "score": 28341.487552341492 }, { "content": "pub const DEFAULT_MATERIAL: Material = Material {\n\n color: WHITE, \n\n ambient: DEFAULT_AMBIENT, \n\n diffuse: DEFAULT_DIFFUSE, \n\n specular: DEFAULT_SPECULAR, \n\n shininess: DEFAULT_SHININESS,\n\n pattern: None };\n\n\n\nimpl Default for Material {\n\n fn default() -> Self {\n\n Material::new(WHITE, DEFAULT_AMBIENT, DEFAULT_DIFFUSE, DEFAULT_SPECULAR, DEFAULT_SHININESS, None)\n\n }\n\n}\n\n\n\nimpl Material {\n\n pub fn new(color: Color, ambient: f64, diffuse: f64, specular: f64, shininess: f64, pattern: Option<BoxPattern>) -> Material {\n\n Material { color, ambient, diffuse, specular, shininess, pattern }\n\n }\n\n\n\n pub fn lighting(&self, object: &dyn Shape, light: &PointLight, point: Tuple, eyev: Tuple, normalv: Tuple, in_shadow: bool) -> Color {\n", "file_path": "src/material.rs", "rank": 13, "score": 28339.947979900244 }, { "content": " let factor = reflect_dot_eye.powf(self.shininess);\n\n light.intensity * self.specular * factor\n\n }\n\n )\n\n };\n\n ambient + if in_shadow { BLACK } else { diffuse + specular }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::tuple::ORIGO;\n\n use crate::sphere::Sphere;\n\n use crate::pattern::StripePattern;\n\n\n\n #[test]\n\n fn default_material() {\n\n let m = Material::default();\n\n assert_eq!(m.ambient, 0.1);\n", "file_path": "src/material.rs", "rank": 14, "score": 28338.62545160637 }, { "content": " fn lighing_light_behind_surface() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let eyev = Tuple::vector(0., 0., -1.0 );\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 0., 10.), WHITE);\n\n let result = m.lighting(&object, &light, position, eyev, normalv, false);\n\n\n\n assert_eq!(result, Color::new(0.1, 0.1, 0.1));\n\n }\n\n\n\n #[test]\n\n fn lighing_with_surface_in_shadow() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let eyev = Tuple::vector(0., 0., -1.);\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 0., -10.), WHITE);\n", "file_path": "src/material.rs", "rank": 15, "score": 28334.52963287339 }, { "content": " let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let pv = 2.0f64.sqrt() / 2.0;\n\n let eyev = Tuple::vector(0., pv, -pv);\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 0., -10.), WHITE);\n\n let result = m.lighting(&object, &light, position, eyev, normalv, false);\n\n\n\n assert_eq!(result, Color::new(1., 1., 1.));\n\n }\n\n\n\n #[test]\n\n fn lighing_eye_opposite_surface_light_offset_45_degrees() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let eyev = Tuple::vector(0., 0., -1.0 );\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 10., -10.), WHITE);\n", "file_path": "src/material.rs", "rank": 16, "score": 28334.167528617087 }, { "content": " assert_eq!(m.diffuse, 0.9);\n\n assert_eq!(m.specular, 0.9);\n\n assert_eq!(m.shininess, 200.);\n\n }\n\n\n\n #[test]\n\n fn lighing_eye_between_light_and_surface() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let eyev = Tuple::vector(0., 0., -1.);\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 0., -10.), WHITE);\n\n let result = m.lighting(&object, &light, position, eyev, normalv, false);\n\n\n\n assert_eq!(result, Color::new(1.9, 1.9, 1.9));\n\n }\n\n\n\n #[test]\n\n fn lighing_eye_between_light_and_surface_eye_offset_45_degrees() {\n", "file_path": "src/material.rs", "rank": 17, "score": 28332.665363792512 }, { "content": " let result = m.lighting(&object, &light, position, eyev, normalv, false);\n\n\n\n assert_eq!(result, Color::new(0.7364, 0.7364, 0.7364));\n\n }\n\n\n\n #[test]\n\n fn lighing_eye_in_path_of_reflection_vector() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::default();\n\n let position = ORIGO;\n\n let pv = -2.0f64.sqrt() / 2.0;\n\n let eyev = Tuple::vector(0., pv, pv);\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 10., -10.), WHITE);\n\n let result = m.lighting(&object, &light, position, eyev, normalv, false);\n\n\n\n assert_eq!(result, Color::new(1.6364, 1.6364, 1.6364));\n\n }\n\n\n\n #[test]\n", "file_path": "src/material.rs", "rank": 18, "score": 28332.49785737414 }, { "content": " let in_shadow = true;\n\n let result = m.lighting(&object, &light, position, eyev, normalv, in_shadow);\n\n\n\n assert_eq!(result, Color::new(0.1, 0.1, 0.1));\n\n }\n\n\n\n #[test]\n\n fn lighting_with_pattern_applied() {\n\n let object = Sphere::new(None, None);\n\n let m = Material::new(WHITE, 1., 0., 0., DEFAULT_SHININESS, Some(StripePattern::new_boxed(WHITE, BLACK, None)));\n\n let eyev = Tuple::vector(0., 0., -1.);\n\n let normalv = Tuple::vector(0., 0., -1.);\n\n let light = PointLight::new(Tuple::point(0., 0., -10.), WHITE);\n\n let c1 = m.lighting(&object, &light, Tuple::point(0.9, 0., 0.), eyev, normalv, false);\n\n let c2 = m.lighting(&object, &light, Tuple::point(1.1, 0., 0.), eyev, normalv, false);\n\n\n\n assert_eq!(c1, WHITE);\n\n assert_eq!(c2, BLACK);\n\n }\n\n}", "file_path": "src/material.rs", "rank": 20, "score": 28330.400496347807 }, { "content": " let color = match &self.pattern {\n\n Some(p) => p.pattern_at_shape(object, point),\n\n None => self.color\n\n };\n\n let effective_color = color * light.intensity;\n\n let lightv = (light.position - point).normalize();\n\n let ambient = effective_color * self.ambient;\n\n let light_dot_normal = lightv.dot(&normalv);\n\n let (diffuse, specular) = \n\n if light_dot_normal < 0.0 {\n\n (BLACK, BLACK)\n\n }\n\n else {\n\n let reflectv = (-lightv).reflect(normalv);\n\n let reflect_dot_eye = reflectv.dot(&eyev);\n\n (effective_color * self.diffuse * light_dot_normal, \n\n if reflect_dot_eye <= 0.0 { \n\n BLACK\n\n }\n\n else {\n", "file_path": "src/material.rs", "rank": 21, "score": 28327.410064547574 }, { "content": "use core::ops;\n\nuse super::EPSILON;\n\nuse super::shape::*;\n\nuse super::ray::Ray;\n\nuse super::precomputed_data::PrecomputedData;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Intersection {\n\n pub t: f64,\n\n pub object: BoxShape\n\n}\n\n\n\nimpl PartialEq for Intersection {\n\n fn eq(&self, other: &Intersection) -> bool {\n\n self.t == other.t &&\n\n &self.object == &other.object\n\n }\n\n}\n\n\n\nimpl Intersection {\n", "file_path": "src/intersection.rs", "rank": 29, "score": 28220.993739945065 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::matrix::Matrix;\n\n use crate::tuple::Tuple;\n\n use crate::sphere::Sphere;\n\n\n\n #[test]\n\n fn intersection_encapsulates_t_and_object() {\n\n let s = Sphere::default_boxed();\n\n let i = Intersection::new(3.5, s.clone());\n\n\n\n assert_eq!(i.t, 3.5);\n\n assert_eq!(&i.object, &s);\n\n }\n\n\n\n #[test]\n\n fn aggregate_intersections() {\n\n let s = Sphere::default_boxed();\n\n let i1 = Intersection::new(1., s.clone());\n", "file_path": "src/intersection.rs", "rank": 30, "score": 28213.6608029232 }, { "content": " pub fn new(t: f64, object: BoxShape) -> Self {\n\n Intersection { t, object }\n\n }\n\n\n\n pub fn prepare_computations(&self, ray: Ray) -> PrecomputedData {\n\n let point = ray.position(self.t);\n\n let eyev = -ray.direction;\n\n let mut normalv = self.object.normal_at(point);\n\n let inside = if normalv.dot(&eyev) < 0. {\n\n normalv = -normalv;\n\n true\n\n } else {\n\n false\n\n };\n\n let over_point = point + normalv * EPSILON;\n\n\n\n PrecomputedData::new(\n\n self.t,\n\n self.object.clone(),\n\n point,\n", "file_path": "src/intersection.rs", "rank": 31, "score": 28210.251711278146 }, { "content": "\n\n assert!(!comps.inside);\n\n }\n\n\n\n #[test]\n\n fn hit_when_intersection_on_inside() {\n\n let r = Ray::new(Tuple::point(0., 0., 0.), Tuple::vector(0., 0., 1.));\n\n let shape = Sphere::default_boxed();\n\n let i = Intersection::new(1., shape);\n\n let comps = i.prepare_computations(r);\n\n\n\n assert_eq!(comps.point, Tuple::point(0., 0., 1.));\n\n assert_eq!(comps.eyev, Tuple::vector(0., 0., -1.));\n\n assert!(comps.inside);\n\n assert_eq!(comps.normalv, Tuple::vector(0., 0., -1.));\n\n }\n\n\n\n #[test]\n\n fn hit_should_offset_point() {\n\n let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.));\n\n let transform = Matrix::translation(0., 0., 1.);\n\n let shape = Sphere::new_boxed(None, Some(transform));\n\n let i = Intersection::new(5., shape);\n\n let comps = i.prepare_computations(r);\n\n assert!(comps.over_point.z < - EPSILON / 2.);\n\n assert!(comps.point.z > comps.over_point.z);\n\n }\n\n}\n", "file_path": "src/intersection.rs", "rank": 32, "score": 28209.197073389838 }, { "content": "\n\nimpl Intersections {\n\n\n\n pub fn new(range: Vec<Intersection>) -> Intersections {\n\n let mut xs = Intersections { inner: range, current_hit: None };\n\n xs.inner.sort_by(|a, b| a.t.partial_cmp(&b.t).unwrap());\n\n for i in xs.inner.iter() {\n\n if i.t >= 0. { \n\n xs.current_hit = Some(i.clone());\n\n break;\n\n };\n\n }\n\n xs\n\n }\n\n\n\n pub fn extend(&mut self, range: Intersections) {\n\n self.inner.extend(range.inner);\n\n match range.current_hit {\n\n Some(range_hit) =>\n\n match &self.current_hit {\n", "file_path": "src/intersection.rs", "rank": 33, "score": 28208.9564977681 }, { "content": " }\n\n\n\n #[test]\n\n fn precompute_state_of_intersection() {\n\n let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.));\n\n let shape = Sphere::default_boxed();\n\n let i = Intersection::new(4., shape);\n\n let comps = i.prepare_computations(r);\n\n\n\n assert_eq!(comps.t, i.t);\n\n assert_eq!(comps.point, Tuple::point(0., 0., -1.));\n\n assert_eq!(comps.eyev, Tuple::vector(0., 0., -1.));\n\n }\n\n\n\n #[test]\n\n fn hit_when_intersection_on_outside() {\n\n let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.));\n\n let shape = Sphere::default_boxed();\n\n let i = Intersection::new(4., shape);\n\n let comps = i.prepare_computations(r);\n", "file_path": "src/intersection.rs", "rank": 34, "score": 28206.577466125633 }, { "content": " None => self.current_hit = Some(range_hit.clone()),\n\n Some(i) => if i.t > range_hit.t { self.current_hit = Some(range_hit.clone());}\n\n }\n\n _ => ()\n\n }\n\n self.inner.sort_by(|a, b| a.t.partial_cmp(&b.t).unwrap());\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.inner.len()\n\n }\n\n\n\n pub fn hit(&self) -> Option<&Intersection> {\n\n match &self.current_hit {\n\n None => None,\n\n Some(i) => Some(i).clone()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/intersection.rs", "rank": 35, "score": 28206.40950818451 }, { "content": " eyev,\n\n normalv,\n\n inside,\n\n over_point\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Intersections {\n\n inner: Vec<Intersection>,\n\n current_hit: Option<Intersection>\n\n}\n\n\n\nimpl ops::Index<usize> for Intersections {\n\n type Output = Intersection;\n\n fn index(&self, i: usize) -> &Self::Output {\n\n &self.inner[i]\n\n }\n\n}\n", "file_path": "src/intersection.rs", "rank": 36, "score": 28205.570020809475 }, { "content": " assert_eq!(3., xs[2].t);\n\n assert_eq!(4., xs[3].t);\n\n }\n\n\n\n #[test]\n\n fn intersect_sets_object_on_intersection() {\n\n let r = Ray::new(Tuple::point(0., 0., -5.), Tuple::vector(0., 0., 1.));\n\n let s = Sphere::default_boxed();\n\n let xs = s.intersect(r);\n\n\n\n assert_eq!(2, xs.len());\n\n assert_eq!(&xs[0].object, &s);\n\n assert_eq!(&xs[1].object, &s);\n\n }\n\n\n\n #[test]\n\n fn hit_all_intersections_positive_t() {\n\n let s = Sphere::default_boxed();\n\n let i1 = Intersection::new(1., s.clone());\n\n let i2 = Intersection::new(2., s);\n", "file_path": "src/intersection.rs", "rank": 37, "score": 28204.26011595856 }, { "content": " let i1 = Intersection::new(-2., s.clone());\n\n let i2 = Intersection::new(-1., s);\n\n let xs = Intersections::new(vec![i2, i1]);\n\n let i = xs.hit();\n\n\n\n assert_eq!(i, None);\n\n }\n\n\n\n #[test]\n\n fn hit_lowest_non_negative_intersection() {\n\n let s = Sphere::default_boxed();\n\n let i1 = Intersection::new(5., s.clone());\n\n let i2 = Intersection::new(7., s.clone());\n\n let i3 = Intersection::new(-3., s.clone());\n\n let i4 = Intersection::new(2., s);\n\n let xs = Intersections::new(vec![i1, i2, i3, i4.clone()]);\n\n let i = xs.hit().unwrap();\n\n\n\n assert_eq!(*i, i4);\n\n }\n", "file_path": "src/intersection.rs", "rank": 38, "score": 28202.827580614616 }, { "content": "\n\n #[test]\n\n fn extend_intersections_gets_union() {\n\n let s1 = Sphere::default_boxed();\n\n let i1 = Intersection::new(5., s1.clone());\n\n let i2 = Intersection::new(7., s1.clone());\n\n let i3 = Intersection::new(-3., s1.clone());\n\n let i4 = Intersection::new(2., s1);\n\n let mut xs1 = Intersections::new(vec![i1, i2, i3, i4]);\n\n\n\n let s2 = Sphere::default_boxed();\n\n let i5 = Intersection::new(-1., s2.clone());\n\n let i6 = Intersection::new(1., s2.clone());\n\n let i7 = Intersection::new(2., s2);\n\n let xs2 = Intersections::new(vec![i5, i6.clone(), i7]);\n\n\n\n xs1.extend(xs2); // xs2 is moved\n\n\n\n assert_eq!(xs1.len(), 7);\n\n assert_eq!(*xs1.hit().unwrap(), i6);\n", "file_path": "src/intersection.rs", "rank": 39, "score": 28202.11013903615 }, { "content": " let xs = Intersections::new(vec![i2, i1.clone()]);\n\n let i = xs.hit().unwrap();\n\n\n\n assert_eq!(*i, i1);\n\n }\n\n\n\n #[test]\n\n fn hit_some_intersections_negative_t() {\n\n let s = Sphere::default_boxed();\n\n let i1 = Intersection::new(-1., s.clone());\n\n let i2 = Intersection::new(1., s);\n\n let xs = Intersections::new(vec![i2.clone(), i1]);\n\n let i = xs.hit().unwrap();\n\n\n\n assert_eq!(*i, i2);\n\n }\n\n\n\n #[test]\n\n fn hit_all_intersections_negative_t() {\n\n let s = Sphere::default_boxed();\n", "file_path": "src/intersection.rs", "rank": 40, "score": 28201.537625331504 }, { "content": " let i2 = Intersection::new(2., s);\n\n let xs = Intersections::new(vec![i1, i2]);\n\n\n\n assert_eq!(2, xs.len());\n\n assert_eq!(1., xs[0].t);\n\n assert_eq!(2., xs[1].t);\n\n }\n\n\n\n #[test]\n\n fn aggregate_intersections_with_add() {\n\n let s = Sphere::default_boxed();\n\n let i1 = Intersection::new(1., s.clone());\n\n let i2 = Intersection::new(2., s.clone());\n\n let i3 = Intersection::new(3., s.clone());\n\n let i4 = Intersection::new(4., s);\n\n let xs = Intersections::new(vec![i1, i2, i3, i4]);\n\n\n\n assert_eq!(4, xs.len());\n\n assert_eq!(1., xs[0].t);\n\n assert_eq!(2., xs[1].t);\n", "file_path": "src/intersection.rs", "rank": 41, "score": 28200.497809596964 }, { "content": "use super::matrix::{Matrix, IDENTITY_MATRIX};\n\nuse super::tuple::Tuple;\n\n\n\nimpl Matrix {\n\n pub fn translation(x: f64, y: f64, z: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n\n m.set(0, 3, x);\n\n m.set(1, 3, y);\n\n m.set(2, 3, z);\n\n m\n\n }\n\n\n\n pub fn scaling(x: f64, y: f64, z: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n\n m.set(0, 0, x);\n\n m.set(1, 1, y);\n\n m.set(2, 2, z);\n\n m\n\n }\n\n\n", "file_path": "src/transform.rs", "rank": 42, "score": 28079.283070406334 }, { "content": " let left = forward.cross(&up.normalize());\n\n let true_up = left.cross(&forward);\n\n let orientation = Matrix::new(\n\n [ left.x, left.y, left.z, 0.],\n\n [ true_up.x, true_up.y, true_up.z, 0.],\n\n [-forward.x, -forward.y, -forward.z, 0.],\n\n [ 0., 0., 0., 1.]);\n\n orientation * Matrix::translation(-from.x, -from.y, -from.z)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::f64::consts::*;\n\n use crate::tuple::ORIGO;\n\n\n\n #[test]\n\n fn multiply_by_translation_matrix() {\n\n let transform = Matrix::translation(5., -3., 2.);\n", "file_path": "src/transform.rs", "rank": 43, "score": 28077.62066714225 }, { "content": " let actual = t * p;\n\n assert_eq!(actual, Tuple::point(15., 0., 7.));\n\n }\n\n\n\n #[test]\n\n fn view_transformation_matrix_for_default_orientation() {\n\n let from = ORIGO;\n\n let to = Tuple::point(0., 0., -1.);\n\n let up = Tuple::vector(0., 1., 0.);\n\n let t = Matrix::view_transform(from, to, up);\n\n\n\n assert_eq!(t, IDENTITY_MATRIX);\n\n }\n\n\n\n #[test]\n\n fn view_transformation_matrix_looking_positive_z_direction() {\n\n let from = ORIGO;\n\n let to = Tuple::point(0., 0., 1.);\n\n let up = Tuple::vector(0., 1., 0.);\n\n let t = Matrix::view_transform(from, to, up);\n", "file_path": "src/transform.rs", "rank": 44, "score": 28076.891402952355 }, { "content": "\n\n assert_eq!(t, Matrix::scaling(-1., 1., -1.));\n\n }\n\n\n\n #[test]\n\n fn view_transformation_matrix_moves_world() {\n\n let from = Tuple::point(0., 0., 8.);\n\n let to = ORIGO;\n\n let up = Tuple::vector(0., 1., 0.);\n\n let t = Matrix::view_transform(from, to, up);\n\n\n\n assert_eq!(t, Matrix::translation(0., 0., -8.));\n\n }\n\n\n\n #[test]\n\n fn arbitrary_view_transformation_matrix() {\n\n let from = Tuple::point(1., 3., 2.);\n\n let to = Tuple::point(4., -2., 8.);\n\n let up = Tuple::vector(1., 1., 0.);\n\n let t = Matrix::view_transform(from, to, up);\n\n let expected = Matrix::new(\n\n [-0.50709, 0.50709, 0.67612, -2.36643],\n\n [ 0.76772, 0.60609, 0.12122, -2.82842],\n\n [-0.35857, 0.59761, -0.71714, 0.00000],\n\n [ 0.00000, 0.00000, 0.00000, 1.00000]);\n\n\n\n assert_eq!(t, expected);\n\n }\n\n}", "file_path": "src/transform.rs", "rank": 45, "score": 28074.00810805786 }, { "content": " m.set(0, 0, rad.cos());\n\n m.set(0, 1, -rad.sin());\n\n m.set(1, 0, rad.sin());\n\n m.set(1, 1, rad.cos());\n\n m\n\n }\n\n\n\n pub fn shearing(x_to_y: f64, x_to_z: f64, y_to_x: f64, y_to_z: f64, z_to_x: f64, z_to_y: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n\n m.set(0, 1, x_to_y);\n\n m.set(0, 2, x_to_z);\n\n m.set(1, 0, y_to_x);\n\n m.set(1, 2, y_to_z);\n\n m.set(2, 0, z_to_x);\n\n m.set(2, 1, z_to_y);\n\n m\n\n }\n\n\n\n pub fn view_transform(from: Tuple, to: Tuple, up: Tuple) -> Self {\n\n let forward = (to - from).normalize();\n", "file_path": "src/transform.rs", "rank": 46, "score": 28073.44673042472 }, { "content": " let transform = Matrix::translation(5., -3., 2.);\n\n let v = Tuple::vector(-3., 4., 5.);\n\n let actual = transform * v;\n\n\n\n assert_eq!(actual, v);\n\n }\n\n\n\n #[test]\n\n fn scaling_matrix_applied_to_point() {\n\n let transform = Matrix::scaling(2., 3., 4.);\n\n let p = Tuple::point(-4., 6., 8.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(-8., 18., 32.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn scaling_matrix_applied_to_vector() {\n\n let transform = Matrix::scaling(2., 3., 4.);\n", "file_path": "src/transform.rs", "rank": 47, "score": 28071.9898555122 }, { "content": " let expected = Tuple::point(2., 5., 4.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_y_in_proportion_to_z() {\n\n let transform = Matrix::shearing(0., 0., 0., 1., 0., 0.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(2., 7., 4.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_z_in_proportion_to_x() {\n\n let transform = Matrix::shearing(0., 0., 0., 0., 1., 0.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n", "file_path": "src/transform.rs", "rank": 48, "score": 28071.437835327408 }, { "content": " let expected = Tuple::point(5., 3., 4.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_x_in_proportion_to_z() {\n\n let transform = Matrix::shearing(0., 1., 0., 0., 0., 0.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(6., 3., 4.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_y_in_proportion_to_x() {\n\n let transform = Matrix::shearing(0., 0., 1., 0., 0., 0.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n", "file_path": "src/transform.rs", "rank": 49, "score": 28071.437835327408 }, { "content": " let expected = Tuple::point(2., 3., 6.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_z_in_proportion_to_y() {\n\n let transform = Matrix::shearing(0., 0., 0., 0., 0., 1.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(2., 3., 7.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn individual_transformations_applied_in_sequence() {\n\n let p = Tuple::point(1., 0., 1.);\n\n let a = Matrix::rotation_x(FRAC_PI_2);\n\n let b = Matrix::scaling(5., 5., 5.);\n", "file_path": "src/transform.rs", "rank": 50, "score": 28071.43019612279 }, { "content": " let p = Tuple::point(-3., 4., 5.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(2., 1., 7.);\n\n \n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn multiply_by_inverted_translation_matrix() {\n\n let transform = Matrix::translation(5., -3., 2.);\n\n let inv = transform.inverse().unwrap();\n\n let p = Tuple::point(-3., 4., 5.);\n\n let actual = inv * p;\n\n let expected = Tuple::point(-8., 7., 3.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn translation_does_not_change_vector() {\n", "file_path": "src/transform.rs", "rank": 51, "score": 28070.795392349915 }, { "content": " let v = Tuple::vector(-4., 6., 8.);\n\n let actual = transform * v;\n\n let expected = Tuple::vector(-8., 18., 32.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn inverted_scaling_matrix_applied_to_vector() {\n\n let transform = Matrix::scaling(2., 3., 4.);\n\n let inv = transform.inverse().unwrap();\n\n let v = Tuple::vector(-4., 6., 8.);\n\n let actual = inv * v;\n\n let expected = Tuple::vector(-2., 2., 2.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn reflection_is_scaling_by_negative_value() {\n", "file_path": "src/transform.rs", "rank": 52, "score": 28070.628481368287 }, { "content": " assert_eq!(actual_full_quarter, Tuple::point(1., 0., 0.));\n\n }\n\n\n\n #[test]\n\n fn rotate_point_around_z_axis() {\n\n let p = Tuple::point(0., 1., 0.);\n\n let half_quarter = Matrix::rotation_z(FRAC_PI_4);\n\n let full_quarter = Matrix::rotation_z(FRAC_PI_2);\n\n let actual_half_quarter = half_quarter * p;\n\n let actual_full_quarter = full_quarter * p;\n\n\n\n assert_eq!(actual_half_quarter, Tuple::point(-2.0_f64.sqrt()/2., 2.0_f64.sqrt()/2., 0.));\n\n assert_eq!(actual_full_quarter, Tuple::point(-1., 0., 0.));\n\n }\n\n\n\n #[test]\n\n fn shearing_transformation_moves_x_in_proportion_to_y() {\n\n let transform = Matrix::shearing(1., 0., 0., 0., 0., 0.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n", "file_path": "src/transform.rs", "rank": 53, "score": 28069.50770954925 }, { "content": " let c = Matrix::translation(10., 5., 7.);\n\n\n\n let p2 = a * p;\n\n assert_eq!(p2, Tuple::point(1., -1., 0.));\n\n\n\n let p3 = b * p2;\n\n assert_eq!(p3, Tuple::point(5., -5., 0.));\n\n\n\n let p4 = c * p3;\n\n assert_eq!(p4, Tuple::point(15., 0., 7.));\n\n }\n\n\n\n #[test]\n\n fn chained_transformations_applied_in_reverse_order() {\n\n let p = Tuple::point(1., 0., 1.);\n\n let a = Matrix::rotation_x(FRAC_PI_2);\n\n let b = Matrix::scaling(5., 5., 5.);\n\n let c = Matrix::translation(10., 5., 7.);\n\n\n\n let t = c * b * a;\n", "file_path": "src/transform.rs", "rank": 54, "score": 28069.484921370047 }, { "content": " pub fn rotation_x(rad: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n\n m.set(1, 1, rad.cos());\n\n m.set(1, 2, -rad.sin());\n\n m.set(2, 1, rad.sin());\n\n m.set(2, 2, rad.cos());\n\n m\n\n }\n\n\n\n pub fn rotation_y(rad: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n\n m.set(0, 0, rad.cos());\n\n m.set(0, 2, rad.sin());\n\n m.set(2, 0, -rad.sin());\n\n m.set(2, 2, rad.cos());\n\n m\n\n }\n\n\n\n pub fn rotation_z(rad: f64) -> Matrix {\n\n let mut m = IDENTITY_MATRIX;\n", "file_path": "src/transform.rs", "rank": 55, "score": 28069.173736057743 }, { "content": " let transform = Matrix::scaling(-1., 1., 1.);\n\n let p = Tuple::point(2., 3., 4.);\n\n let actual = transform * p;\n\n let expected = Tuple::point(-2., 3., 4.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn rotate_point_around_x_axis() {\n\n let p = Tuple::point(0., 1., 0.);\n\n let half_quarter = Matrix::rotation_x(FRAC_PI_4);\n\n let full_quarter = Matrix::rotation_x(FRAC_PI_2);\n\n let actual_half_quarter = half_quarter * p;\n\n let actual_full_quarter = full_quarter * p;\n\n\n\n assert_eq!(actual_half_quarter, Tuple::point(0., 2.0_f64.sqrt()/2., 2.0_f64.sqrt()/2.));\n\n assert_eq!(actual_full_quarter, Tuple::point(0., 0., 1.));\n\n }\n\n\n", "file_path": "src/transform.rs", "rank": 56, "score": 28068.627501750572 }, { "content": " #[test]\n\n fn inverse_rotate_point_around_x_axis() {\n\n let p = Tuple::point(0., 1., 0.);\n\n let half_quarter = Matrix::rotation_x(FRAC_PI_4);\n\n let inv = half_quarter.inverse().unwrap();\n\n let actual = inv * p;\n\n let expected = Tuple::point(0., 2.0_f64.sqrt()/2., -2.0_f64.sqrt()/2.);\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n\n\n #[test]\n\n fn rotate_point_around_y_axis() {\n\n let p = Tuple::point(0., 0., 1.);\n\n let half_quarter = Matrix::rotation_y(FRAC_PI_4);\n\n let full_quarter = Matrix::rotation_y(FRAC_PI_2);\n\n let actual_half_quarter = half_quarter * p;\n\n let actual_full_quarter = full_quarter * p;\n\n\n\n assert_eq!(actual_half_quarter, Tuple::point(2.0_f64.sqrt()/2., 0., 2.0_f64.sqrt()/2.));\n", "file_path": "src/transform.rs", "rank": 57, "score": 28066.387426024205 }, { "content": "use core::ops;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Tuple {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n\n pub w: f64\n\n}\n\n\n\npub const ORIGO: Tuple = Tuple { x: 0., y: 0., z: 0., w: 1. };\n\npub const VECTOR_Y_UP: Tuple = Tuple { x: 0., y: 1., z: 0., w: 0. };\n\n\n\nimpl PartialEq for Tuple {\n\n fn eq(&self, other: &Self) -> bool {\n\n super::approx_eq(self.x, other.x) &&\n\n super::approx_eq(self.y, other.y) &&\n\n super::approx_eq(self.z, other.z) &&\n\n self.w == other.w\n\n }\n", "file_path": "src/tuple.rs", "rank": 58, "score": 28015.650224699948 }, { "content": " self.y * t.y +\n\n self.z * t.z +\n\n self.w * t.w\n\n }\n\n\n\n pub fn cross(&self, t: &Tuple) -> Self {\n\n Tuple::vector(\n\n self.y * t.z - self.z * t.y,\n\n self.z * t.x - self.x * t.z,\n\n self.x * t.y - self.y * t.x)\n\n }\n\n\n\n pub fn reflect(&self, normal: Tuple) -> Self {\n\n *self - normal * 2. * self.dot(&normal)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/tuple.rs", "rank": 59, "score": 28012.701506674457 }, { "content": "}\n\n\n\nimpl From<[f64; 4]> for Tuple {\n\n fn from(item: [f64; 4]) -> Self {\n\n Tuple { x: item[0], y: item[1], z: item[2], w: item[3] }\n\n }\n\n}\n\n\n\nimpl Tuple {\n\n pub fn new(x: f64, y: f64, z: f64, w: f64) -> Self {\n\n Self {x, y, z, w}\n\n }\n\n\n\n pub fn point(x: f64, y: f64, z: f64) -> Self {\n\n Tuple::new(x, y, z, 1.)\n\n }\n\n\n\n pub fn vector(x: f64, y: f64, z: f64) -> Self {\n\n Tuple::new(x, y, z, 0.)\n\n }\n", "file_path": "src/tuple.rs", "rank": 60, "score": 28005.128477779774 }, { "content": "\n\n pub fn is_point(&self) -> bool {\n\n self.w == 1.\n\n }\n\n\n\n pub fn is_vector(&self) -> bool {\n\n self.w == 0.\n\n }\n\n\n\n pub fn magnitude(&self) -> f64 {\n\n (self.x * self.x + self.y * self.y + self.z * self.z).sqrt()\n\n }\n\n\n\n pub fn normalize(&self) -> Tuple {\n\n let m = self.magnitude();\n\n Tuple::vector(self.x / m, self.y / m, self.z / m)\n\n }\n\n\n\n pub fn dot(&self, t: &Tuple) -> f64 {\n\n self.x * t.x +\n", "file_path": "src/tuple.rs", "rank": 61, "score": 28002.840795098466 }, { "content": " assert_eq!(v.w, 0.);\n\n assert!(!v.is_point());\n\n assert!(v.is_vector());\n\n }\n\n\n\n #[test]\n\n fn origo_is_point() {\n\n assert!(ORIGO.is_point());\n\n }\n\n \n\n #[test]\n\n fn point_creates_tuple_with_w_1() {\n\n let p = Tuple::point(4., -4., 3.);\n\n let pt = Tuple { x: 4., y: -4., z: 3., w: 1. };\n\n\n\n assert_eq!(p, pt);\n\n }\n\n\n\n #[test]\n\n fn vector_creates_tuple_with_w_0() {\n", "file_path": "src/tuple.rs", "rank": 62, "score": 28001.49769532151 }, { "content": " z: self.z - rhs.z,\n\n w: self.w - rhs.w\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Neg for Tuple {\n\n type Output = Tuple;\n\n fn neg(self) -> Tuple {\n\n Tuple { \n\n x: -self.x ,\n\n y: -self.y,\n\n z: -self.z,\n\n w: -self.w\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Mul<f64> for Tuple {\n\n type Output = Tuple;\n", "file_path": "src/tuple.rs", "rank": 63, "score": 27999.107879652085 }, { "content": "}\n\n\n\nimpl ops::Add<Tuple> for Tuple {\n\n type Output = Tuple;\n\n fn add(self, rhs: Tuple) -> Tuple {\n\n Tuple { \n\n x: self.x + rhs.x,\n\n y: self.y + rhs.y,\n\n z: self.z + rhs.z,\n\n w: self.w + rhs.w\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Sub<Tuple> for Tuple {\n\n type Output = Tuple;\n\n fn sub(self, rhs: Tuple) -> Tuple {\n\n Tuple { \n\n x: self.x - rhs.x,\n\n y: self.y - rhs.y,\n", "file_path": "src/tuple.rs", "rank": 64, "score": 27998.84323343316 }, { "content": "\n\n #[test]\n\n fn negating_a_tuple() {\n\n let a = Tuple { x: 1., y: -2., z: 3., w: -4. };\n\n\n\n let expected = Tuple { x: -1., y: 2., z: -3., w: 4. };\n\n let actual = -a;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn multiplying_tuple_by_scalar() {\n\n let a = Tuple { x: 1., y: -2., z: 3., w: -4. };\n\n\n\n let expected = Tuple { x: 3.5, y: -7., z: 10.5, w: -14. };\n\n let actual = a * 3.5;\n\n\n\n assert_eq!(expected, actual);\n\n }\n", "file_path": "src/tuple.rs", "rank": 65, "score": 27998.38890545219 }, { "content": "\n\n #[test]\n\n fn multiplying_tuple_by_fraction() {\n\n let a = Tuple { x: 1., y: -2., z: 3., w: -4. };\n\n\n\n let expected = Tuple { x: 0.5, y: -1., z: 1.5, w: -2. };\n\n let actual = a * 0.5;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn dividing_tuple_by_scalar() {\n\n let a = Tuple { x: 1., y: -2., z: 3., w: -4. };\n\n\n\n let expected = Tuple { x: 0.5, y: -1., z: 1.5, w: -2. };\n\n let actual = a / 2.0;\n\n\n\n assert_eq!(expected, actual);\n\n }\n", "file_path": "src/tuple.rs", "rank": 66, "score": 27998.343984029507 }, { "content": " assert_eq!(expected, v.normalize());\n\n }\n\n\n\n #[test]\n\n fn normalizing_vector_1_2_3() {\n\n let v = Tuple::vector(1., 2., 3.);\n\n let expected = Tuple::vector(0.26726, 0.53452, 0.80178);\n\n assert_eq!(expected, v.normalize());\n\n }\n\n\n\n #[test]\n\n fn magnitude_of_normalized_vector_is_1() {\n\n let v = Tuple::vector(1., 2., 3.);\n\n let norm = v.normalize();\n\n assert_eq!(1., norm.magnitude());\n\n }\n\n\n\n #[test]\n\n fn dot_product_of_two_tuples() {\n\n let a = Tuple::vector(1., 2., 3.);\n", "file_path": "src/tuple.rs", "rank": 67, "score": 27998.20144666368 }, { "content": " let v = Tuple::vector(4., -4., 3.);\n\n let vt = Tuple { x: 4., y: -4., z: 3., w: 0. };\n\n\n\n assert_eq!(v, vt);\n\n }\n\n\n\n #[test]\n\n fn adding_two_tuples() {\n\n let a1 = Tuple { x: 3., y: -2., z: 5., w: 1. };\n\n let a2 = Tuple { x: -2., y: 3., z: 1., w: 0. };\n\n\n\n let expected = Tuple { x: 1., y: 1., z: 6., w: 1. };\n\n let actual = a1 + a2;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn subtracting_two_points() {\n\n let p1 = Tuple::point(3., 2., 1.);\n", "file_path": "src/tuple.rs", "rank": 68, "score": 27998.138381994773 }, { "content": "\n\n #[test]\n\n fn computing_magnitude_of_vector_1_0_0() {\n\n let v = Tuple::vector(1., 0., 0.);\n\n\n\n assert_eq!(1., v.magnitude());\n\n }\n\n\n\n #[test]\n\n fn computing_magnitude_of_vector_0_1_0() {\n\n let v = Tuple::vector(0., 1., 0.);\n\n\n\n assert_eq!(1., v.magnitude());\n\n }\n\n\n\n #[test]\n\n fn computing_magnitude_of_vector_0_0_1() {\n\n let v = Tuple::vector(0., 0., 1.);\n\n\n\n assert_eq!(1., v.magnitude());\n", "file_path": "src/tuple.rs", "rank": 69, "score": 27998.095165745708 }, { "content": "\n\n #[test]\n\n fn tuple_is_point() {\n\n let p = Tuple { x: 4.3, y: -4.2, z: 3.1, w: 1. };\n\n\n\n assert_eq!(p.x, 4.3);\n\n assert_eq!(p.y, -4.2);\n\n assert_eq!(p.z, 3.1);\n\n assert_eq!(p.w, 1.);\n\n assert!(p.is_point());\n\n assert!(!p.is_vector());\n\n }\n\n\n\n #[test]\n\n fn tuple_is_vector() {\n\n let v = Tuple { x: 4.3, y: -4.2, z: 3.1, w: 0. };\n\n\n\n assert_eq!(v.x, 4.3);\n\n assert_eq!(v.y, -4.2);\n\n assert_eq!(v.z, 3.1);\n", "file_path": "src/tuple.rs", "rank": 70, "score": 27997.99780224984 }, { "content": " let p2 = Tuple::point(5., 6., 7.);\n\n\n\n let expected = Tuple::vector(-2., -4., -6.);\n\n let actual = p1 - p2;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn subtracting_a_vector_from_a_point() {\n\n let p = Tuple::point(3., 2., 1.);\n\n let v = Tuple::vector(5., 6., 7.);\n\n\n\n let expected = Tuple::point(-2., -4., -6.);\n\n let actual = p - v;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n", "file_path": "src/tuple.rs", "rank": 71, "score": 27997.97339490865 }, { "content": " }\n\n\n\n #[test]\n\n fn computing_magnitude_of_vector_1_2_3() {\n\n let v = Tuple::vector(1., 2., 3.);\n\n let expected = 14_f64.sqrt();\n\n assert_eq!(expected, v.magnitude());\n\n }\n\n\n\n #[test]\n\n fn computing_magnitude_of_vector_1_2_3_neg() {\n\n let v = Tuple::vector(-1., -2., -3.);\n\n let expected = 14_f64.sqrt();\n\n assert_eq!(expected, v.magnitude());\n\n }\n\n\n\n #[test]\n\n fn normalizing_vector_4_0_0_gives_1_0_0() {\n\n let v = Tuple::vector(4., 0., 0.);\n\n let expected = Tuple::vector(1., 0., 0.);\n", "file_path": "src/tuple.rs", "rank": 72, "score": 27997.955240578925 }, { "content": " fn mul(self, rhs: f64) -> Tuple {\n\n Tuple { \n\n x: self.x * rhs,\n\n y: self.y * rhs,\n\n z: self.z * rhs,\n\n w: self.w * rhs\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Div<f64> for Tuple {\n\n type Output = Tuple;\n\n fn div(self, rhs: f64) -> Tuple {\n\n Tuple { \n\n x: self.x / rhs,\n\n y: self.y / rhs,\n\n z: self.z / rhs,\n\n w: self.w / rhs\n\n }\n\n }\n", "file_path": "src/tuple.rs", "rank": 73, "score": 27997.920403111144 }, { "content": " let b = Tuple::vector(2., 3., 4.);\n\n let actual = a.dot(&b);\n\n assert_eq!(20., actual);\n\n }\n\n\n\n #[test]\n\n fn cross_product_of_two_vectors() {\n\n let a = Tuple::vector(1., 2., 3.);\n\n let b = Tuple::vector(2., 3., 4.);\n\n\n\n let actual_ab = a.cross(&b);\n\n assert_eq!(Tuple::vector(-1., 2., -1.), actual_ab);\n\n let actual_ba = b.cross(&a);\n\n assert_eq!(Tuple::vector(1., -2., 1.), actual_ba);\n\n }\n\n\n\n #[test]\n\n fn reflecting_vector_approaching_45_degrees() {\n\n let v = Tuple::vector(1., -1., 0.);\n\n let n = Tuple::vector(0., 1., 0.);\n", "file_path": "src/tuple.rs", "rank": 74, "score": 27997.748579401195 }, { "content": " let r = v.reflect(n);\n\n\n\n assert_eq!(r, Tuple::vector(1., 1., 0.));\n\n }\n\n\n\n #[test]\n\n fn reflecting_vector_off_slanted_surface() {\n\n let v = Tuple::vector(0., -1., 0.);\n\n let pv = 2.0f64.sqrt() / 2.0;\n\n let n = Tuple::vector(pv, pv, 0.);\n\n let r = v.reflect(n);\n\n\n\n assert_eq!(r, Tuple::vector(1., 0., 0.));\n\n }\n\n}", "file_path": "src/tuple.rs", "rank": 75, "score": 27997.38733173659 }, { "content": " fn subtracting_a_vector_from_a_vector() {\n\n let v1 = Tuple::vector(3., 2., 1.);\n\n let v2 = Tuple::vector(5., 6., 7.);\n\n\n\n let expected = Tuple::vector(-2., -4., -6.);\n\n let actual = v1 - v2;\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn subtracting_a_vector_from_the_zero_vector() {\n\n let zero = Tuple::vector(0., 0., 0.);\n\n let v = Tuple::vector(1., -2., 3.);\n\n\n\n let expected = Tuple::vector(-1., 2., -3.);\n\n let actual = zero - v;\n\n\n\n assert_eq!(expected, actual);\n\n }\n", "file_path": "src/tuple.rs", "rank": 76, "score": 27997.063621877227 }, { "content": "fn camera_render_world(filename: &str) {\n\n let floor_material = Material::new(Color::new(1., 0.9, 0.9), DEFAULT_AMBIENT, DEFAULT_DIFFUSE, 0., DEFAULT_SHININESS, None);\n\n let floor_transform = Matrix::scaling(10., 0.01, 10.);\n\n let floor = Sphere::new_boxed(Some(floor_material.clone()), Some(floor_transform));\n\n\n\n let left_wall_transform = \n\n Matrix::translation(0., 0., 5.) * \n\n Matrix::rotation_y(-FRAC_PI_4) * \n\n Matrix::rotation_x(FRAC_PI_2) * \n\n Matrix::scaling(10., 0.01, 10.);\n\n let left_wall = Sphere::new_boxed(Some(floor_material.clone()), Some(left_wall_transform));\n\n\n\n let right_wall_transform = \n\n Matrix::translation(0., 0., 5.) *\n\n Matrix::rotation_y(FRAC_PI_4) *\n\n Matrix::rotation_x(FRAC_PI_2) *\n\n Matrix::scaling(10., 0.01, 10.);\n\n let right_wall = Sphere::new_boxed(Some(floor_material), Some(right_wall_transform));\n\n\n\n let middle_transform = Matrix::translation(-0.5, 1., 0.5);\n", "file_path": "src/main.rs", "rank": 77, "score": 27928.26786413931 }, { "content": "use core::ops;\n\nuse super::tuple::Tuple;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Row {\n\n inner: [f64; 4],\n\n size: usize\n\n}\n\n\n\nimpl ops::Index<usize> for Row {\n\n type Output = f64;\n\n fn index(&self, col: usize) -> &Self::Output {\n\n if col >= self.size { panic!(\"Index out-of-bounds\") }\n\n &self.inner[col]\n\n }\n\n}\n\n\n\nimpl PartialEq for Row {\n\n fn eq(&self, other: &Self) -> bool {\n\n (0..self.size).all(|col| super::approx_eq(self[col], other[col]))\n", "file_path": "src/matrix.rs", "rank": 78, "score": 27624.034149684216 }, { "content": " fn empty(&self) -> Self {\n\n match self.size {\n\n 2 => Matrix::new_empty2(),\n\n 3 => Matrix::new_empty3(),\n\n 4 => Matrix::new_empty4(),\n\n _ => { panic!(\"bad dimension\") }\n\n }\n\n }\n\n\n\n pub fn set(&mut self, row: usize, col: usize, value: f64) {\n\n self.inner[row].inner[col] = value;\n\n }\n\n\n\n fn tuple(&self, row: usize) -> Tuple {\n\n let r = &self[row];\n\n Tuple::new(r[0], r[1], r[2], r[3])\n\n }\n\n\n\n pub fn transpose(&self) -> Self {\n\n let mut m = self.empty();\n", "file_path": "src/matrix.rs", "rank": 79, "score": 27620.5405320051 }, { "content": " inverse.set(col, row, self.cofactor(row, col) / det);\n\n }\n\n }\n\n Option::Some(inverse)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn construct_4x4_matrix()\n\n {\n\n let m = Matrix::new(\n\n [1., 2., 3., 4.],\n\n [5.5, 6.5, 7.5, 8.5],\n\n [9., 10., 11., 12.],\n\n [13.5, 14.5, 15.5, 16.5]);\n\n assert_eq!(1., m[0][0]);\n", "file_path": "src/matrix.rs", "rank": 80, "score": 27618.117860064685 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Matrix {\n\n inner: [Row; 4],\n\n pub size: usize\n\n}\n\n\n\npub const IDENTITY_MATRIX: Matrix = Matrix { \n\n inner: [\n\n Row { inner: [1., 0., 0., 0.], size: 4 }, \n\n Row { inner: [0., 1., 0., 0.], size: 4 },\n\n Row { inner: [0., 0., 1., 0.], size: 4 },\n\n Row { inner: [0., 0., 0., 1.], size: 4 } ], \n\n size: 4 };\n\n\n\nimpl ops::Mul<Matrix> for Matrix {\n\n type Output = Matrix;\n\n fn mul(self, rhs: Matrix) -> Matrix {\n", "file_path": "src/matrix.rs", "rank": 81, "score": 27617.53328207296 }, { "content": " }\n\n}\n\n\n\nimpl ops::Index<usize> for Matrix {\n\n type Output = Row;\n\n fn index(&self, row: usize) -> &Self::Output {\n\n if row >= self.size { panic!(\"Index out-of-bounds\") }\n\n &self.inner[row]\n\n }\n\n}\n\n\n\nimpl PartialEq for Matrix {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.size == other.size &&\n\n (0..self.size).all(|row| self[row] == other[row])\n\n }\n\n}\n\n\n\nimpl Default for Matrix {\n\n fn default() -> Self { IDENTITY_MATRIX }\n", "file_path": "src/matrix.rs", "rank": 82, "score": 27616.555577000145 }, { "content": " }\n\n\n\n fn submatrix(&self, row: usize, col: usize) -> Self {\n\n let size = self.size;\n\n let mut m = match size {\n\n 4 => Matrix::new_empty3(),\n\n 3 => Matrix::new_empty2(),\n\n _ => { panic!(\"Invalid matrix size, only 3x3 and 4x4 supported\") }\n\n };\n\n let mut r_new = 0;\n\n \n\n for r in 0..size {\n\n if r == row { continue; }\n\n let mut c_new = 0;\n\n for c in 0..size {\n\n if c == col { continue; }\n\n m.set(r_new, c_new, self[r][c]);\n\n c_new += 1;\n\n }\n\n r_new += 1;\n", "file_path": "src/matrix.rs", "rank": 83, "score": 27615.083852806176 }, { "content": " [16., 26., 46., 42.]);\n\n\n\n let result = a * b;\n\n assert_eq!(expected, result);\n\n }\n\n\n\n #[test]\n\n fn tuple_from_matrix_row()\n\n {\n\n let a = Matrix::new (\n\n [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 2.]);\n\n let b = Tuple::new(5., 4., 3., 2.);\n\n\n\n assert_eq!(b, a.tuple(3));\n\n } \n\n\n\n #[test]\n", "file_path": "src/matrix.rs", "rank": 84, "score": 27614.24793359424 }, { "content": " let mut m = self.empty();\n\n let size = self.size;\n\n for row in 0..size {\n\n for col in 0..size {\n\n let a = (0..size).map(|i| self[row][i] * rhs[i][col]).sum();\n\n m.set(row, col, a);\n\n }\n\n }\n\n m\n\n }\n\n}\n\n\n\nimpl ops::Mul<Tuple> for Matrix {\n\n type Output = Tuple;\n\n fn mul(self, rhs: Tuple) -> Tuple {\n\n Tuple::new(\n\n self.tuple(0).dot(&rhs),\n\n self.tuple(1).dot(&rhs),\n\n self.tuple(2).dot(&rhs),\n\n self.tuple(3).dot(&rhs))\n", "file_path": "src/matrix.rs", "rank": 85, "score": 27614.20643993077 }, { "content": " pub fn new_empty4() -> Self {\n\n Matrix::new(Matrix::EMPTY_ROW, Matrix::EMPTY_ROW, Matrix::EMPTY_ROW, Matrix::EMPTY_ROW)\n\n }\n\n\n\n pub fn new_empty3() -> Self {\n\n Matrix::new3(Matrix::EMPTY_ROW3, Matrix::EMPTY_ROW3, Matrix::EMPTY_ROW3)\n\n }\n\n\n\n pub fn new_empty2() -> Self {\n\n Matrix::new2(Matrix::EMPTY_ROW2, Matrix::EMPTY_ROW2)\n\n }\n\n\n\n fn coerce_array2(arr: [f64; 2]) -> Row {\n\n Row { inner: [arr[0], arr[1], 0., 0.], size: 2 }\n\n }\n\n\n\n fn coerce_array3(arr: [f64; 3]) -> Row {\n\n Row { inner: [arr[0], arr[1], arr[2], 0.], size: 3 }\n\n }\n\n\n", "file_path": "src/matrix.rs", "rank": 86, "score": 27614.008826402533 }, { "content": " fn multiply_matrix_by_tuple()\n\n {\n\n let a = Matrix::new(\n\n [1., 2., 3., 4.],\n\n [2., 4., 4., 2.],\n\n [8., 6., 4., 1.],\n\n [0., 0., 0., 1.]);\n\n let b = Tuple::new(1., 2., 3., 1.);\n\n let expected = Tuple::new(18., 24., 33., 1.);\n\n \n\n assert_eq!(expected, a * b);\n\n }\n\n\n\n #[test]\n\n fn multiply_matrix_by_identity_matrix()\n\n {\n\n let a = Matrix::new(\n\n [0., 1., 2., 8.],\n\n [1., 2., 4., 8.],\n\n [2., 4., 8., 16.],\n", "file_path": "src/matrix.rs", "rank": 87, "score": 27613.992537492548 }, { "content": " assert_eq!(-4071., a.determinant());\n\n }\n\n\n\n #[test]\n\n fn matrix_is_invertible()\n\n {\n\n let a = Matrix::new(\n\n [6., 4., 4., 4.],\n\n [5., 5., 7., 6.],\n\n [4., -9., 3., -7.],\n\n [9., 1., 7., -6.]);\n\n assert_eq!(-2120., a.determinant());\n\n assert_ne!(Option::None, a.inverse());\n\n }\n\n\n\n #[test]\n\n fn matrix_is_not_invertible()\n\n {\n\n let a = Matrix::new(\n\n [-4., 2., -2., -3.],\n", "file_path": "src/matrix.rs", "rank": 88, "score": 27613.792578267505 }, { "content": "}\n\n\n\nimpl Matrix {\n\n const EMPTY_ROW: [f64; 4] = [0., 0., 0., 0.];\n\n const EMPTY_ROW3: [f64; 3] = [0., 0., 0.];\n\n const EMPTY_ROW2: [f64; 2] = [0., 0.];\n\n\n\n pub fn new(row0: [f64; 4], row1: [f64; 4], row2 : [f64; 4], row3 : [f64; 4]) -> Self\n\n {\n\n Self { \n\n inner: [ \n\n Row { inner: row0, size: 4 }, \n\n Row { inner: row1, size: 4 }, \n\n Row { inner: row2, size: 4 }, \n\n Row { inner: row3, size: 4 }], \n\n size: 4}\n\n }\n\n\n\n pub fn new3(row0: [f64; 3], row1: [f64; 3], row2 : [f64; 3]) -> Self\n\n {\n", "file_path": "src/matrix.rs", "rank": 89, "score": 27613.625063502965 }, { "content": " Self { \n\n inner: [ \n\n Matrix::coerce_array3(row0), \n\n Matrix::coerce_array3(row1), \n\n Matrix::coerce_array3(row2), \n\n Matrix::coerce_array3(Matrix::EMPTY_ROW3)], \n\n size: 3}\n\n }\n\n\n\n pub fn new2(row0: [f64; 2], row1: [f64; 2]) -> Self\n\n {\n\n Self { \n\n inner: [ \n\n Matrix::coerce_array2(row0), \n\n Matrix::coerce_array2(row1), \n\n Matrix::coerce_array2(Matrix::EMPTY_ROW2), \n\n Matrix::coerce_array2(Matrix::EMPTY_ROW2)], \n\n size: 2 }\n\n }\n\n\n", "file_path": "src/matrix.rs", "rank": 90, "score": 27613.316882838353 }, { "content": " }\n\n m\n\n }\n\n\n\n fn minor(&self, row: usize, col: usize) -> f64 {\n\n self.submatrix(row, col).determinant()\n\n }\n\n\n\n fn cofactor(&self, row: usize, col: usize) -> f64 {\n\n let minor = self.minor(row, col);\n\n if (row + col) & 1 == 1 { -minor } else { minor }\n\n }\n\n\n\n pub fn inverse(&self) -> Option<Matrix> {\n\n let det = self.determinant();\n\n if det == 0.0 { return Option::None; }\n\n let size = self.size;\n\n let mut inverse = self.empty();\n\n for row in 0..size {\n\n for col in 0..size {\n", "file_path": "src/matrix.rs", "rank": 91, "score": 27612.187949618852 }, { "content": " [9., 6., 2., 6.],\n\n [0., -5., 1., -5.],\n\n [0., 0., 0., 0.]);\n\n assert_eq!(0., a.determinant());\n\n assert_eq!(Option::None, a.inverse());\n\n }\n\n\n\n #[test]\n\n fn inverse_of_matrix() {\n\n let a = Matrix::new(\n\n [-5., 2., 6., -8.],\n\n [1., -5., 1., 8.],\n\n [7., 7., -6., -7.],\n\n [1., -3., 7., 4.]);\n\n let b = a.inverse().unwrap();\n\n assert_eq!(532., a.determinant());\n\n assert_eq!(-160., a.cofactor(2, 3));\n\n assert_eq!(-160.0 / 532., b[3][2]);\n\n assert_eq!(105., a.cofactor(3, 2));\n\n assert_eq!(105.0 / 532., b[2][3]);\n", "file_path": "src/matrix.rs", "rank": 92, "score": 27611.834378561656 }, { "content": " }\n\n\n\n #[test]\n\n fn transpose_identity_matrix()\n\n {\n\n assert_eq!(IDENTITY_MATRIX, IDENTITY_MATRIX.transpose());\n\n }\n\n\n\n #[test]\n\n fn determinant_2x2_matrix()\n\n {\n\n let a = Matrix::new2([1., 5.], [-3., 2.]);\n\n assert_eq!(17., a.determinant());\n\n }\n\n\n\n #[test]\n\n fn submatrix_of_3x3_is_2x2_matrix() {\n\n let a = Matrix::new3(\n\n [1., 5., 0.],\n\n [-3., 2., 7.],\n", "file_path": "src/matrix.rs", "rank": 93, "score": 27611.73058749323 }, { "content": " #[test]\n\n fn construct_2x2_matrix()\n\n {\n\n let m: Matrix = Matrix::new2(\n\n [-3., 5.],\n\n [1., -2.]);\n\n assert_eq!(-3., m[0][0]);\n\n assert_eq!(5., m[0][1]);\n\n assert_eq!(1., m[1][0]);\n\n assert_eq!(-2., m[1][1]);\n\n }\n\n\n\n #[test]\n\n fn matrix_equality_identical_matrices() {\n\n let a = Matrix::new(\n\n [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 2.]);\n\n let b = Matrix::new(\n", "file_path": "src/matrix.rs", "rank": 94, "score": 27611.404826404505 }, { "content": " [0., 6., -3.]);\n\n let expected = Matrix::new2([-3., 2.], [0., 6.]);\n\n assert_eq!(expected, a.submatrix(0, 2));\n\n }\n\n\n\n #[test]\n\n fn submatrix_of_4x4_is_3x3_matrix() {\n\n let a = Matrix::new(\n\n [-6., 1., 1., 6.],\n\n [-8., 5., 8., 6.],\n\n [-1., 0., 8., 2.],\n\n [-7., 1., -1., 1.]);\n\n let expected = Matrix::new3(\n\n [-6., 1., 6.], \n\n [-8., 8., 6.], \n\n [-7., -1., 1.]);\n\n assert_eq!(expected, a.submatrix(2, 1));\n\n }\n\n\n\n #[test]\n", "file_path": "src/matrix.rs", "rank": 95, "score": 27611.244051793536 }, { "content": " }\n\n\n\n #[test]\n\n fn cofactor_of_3x3_matrix() {\n\n let a = Matrix::new3(\n\n [3., 5., 0.],\n\n [2., -1., -7.],\n\n [6., -1., 5.]\n\n );\n\n assert_eq!(-12., a.minor(0, 0));\n\n assert_eq!(-12., a.cofactor(0, 0));\n\n assert_eq!(25., a.minor(1, 0));\n\n assert_eq!(-25., a.cofactor(1, 0));\n\n }\n\n\n\n #[test]\n\n fn determinant_of_3x3_matrix() {\n\n let a = Matrix::new3(\n\n [1., 2., 6.],\n\n [-5., 8., -4.],\n", "file_path": "src/matrix.rs", "rank": 96, "score": 27611.141254404414 }, { "content": " [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 2.]);\n\n\n\n assert_eq!(a, b);\n\n }\n\n\n\n #[test]\n\n fn matrix_equality_different_matrices() {\n\n let a = Matrix::new(\n\n [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 2.]);\n\n let b = Matrix::new(\n\n [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 1.]);\n", "file_path": "src/matrix.rs", "rank": 97, "score": 27611.086432989177 }, { "content": " let size = self.size;\n\n for row in 0..size {\n\n for col in 0..size {\n\n m.set(col, row, self[row][col]);\n\n }\n\n }\n\n m\n\n }\n\n\n\n fn determinant(&self) -> f64 {\n\n let size = self.size;\n\n match size {\n\n 2 => self[0][0] * self[1][1] - self[0][1] * self[1][0],\n\n 3..=4 => {\n\n let r = &self[0].inner;\n\n let mut col = 0;\n\n r.iter().map(|c| { let v = c * self.cofactor(0, col); col += 1; v } ).sum()\n\n }\n\n _ => { panic!(\"Invalid matrix size, only 2x2, 3x3 and 4x4 supported\") }\n\n }\n", "file_path": "src/matrix.rs", "rank": 98, "score": 27611.009241356423 }, { "content": "\n\n assert_ne!(a, b);\n\n }\n\n\n\n #[test]\n\n fn multiplying_two_matrices() {\n\n let a = Matrix::new (\n\n [1., 2., 3., 4.],\n\n [5., 6., 7., 8.],\n\n [9., 8., 7., 6.],\n\n [5., 4., 3., 2.]);\n\n let b = Matrix::new(\n\n [-2., 1., 2., 3.],\n\n [3., 2., 1., -1.],\n\n [4., 3., 6., 5.],\n\n [1., 2., 7., 8.]);\n\n let expected = Matrix::new(\n\n [20., 22., 50., 48.],\n\n [44., 54., 114., 108.],\n\n [40., 58., 110., 102.],\n", "file_path": "src/matrix.rs", "rank": 99, "score": 27610.92935498841 } ]
Rust
src/lib.rs
kchmck/uhttp_content_encoding.rs
683fff450707ae509a4d3a4a863108364d3e643a
#![feature(conservative_impl_trait)] use std::ascii::AsciiExt; pub fn content_encodings<'a>(s: &'a str) -> impl Iterator<Item = ContentEncoding<'a>> { s.split(',').rev().map(ContentEncoding::new) } #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ContentEncoding<'a> { Std(StdContentEncoding), Other(&'a str), } impl<'a> ContentEncoding<'a> { pub fn new(s: &'a str) -> Self { let s = s.trim(); match s.parse() { Ok(enc) => ContentEncoding::Std(enc), Err(_) => ContentEncoding::Other(s), } } } #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum StdContentEncoding { Brottli, Compress, Deflate, EfficientXML, Gzip, Identity, Pack200Gzip, } impl std::str::FromStr for StdContentEncoding { type Err = (); fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { use self::StdContentEncoding::*; if s.eq_ignore_ascii_case("br") { Ok(Brottli) } else if s.eq_ignore_ascii_case("compress") { Ok(Compress) } else if s.eq_ignore_ascii_case("deflate") { Ok(Deflate) } else if s.eq_ignore_ascii_case("exi") { Ok(EfficientXML) } else if s.eq_ignore_ascii_case("gzip") { Ok(Gzip) } else if s.eq_ignore_ascii_case("identity") { Ok(Identity) } else if s.eq_ignore_ascii_case("pack200-gzip") { Ok(Pack200Gzip) } else if s.is_empty() { Ok(Identity) } else { Err(()) } } } #[cfg(test)] mod test { use super::*; #[test] fn test_ce() { use self::StdContentEncoding::*; use self::ContentEncoding::*; assert_eq!(ContentEncoding::new("br"), Std(Brottli)); assert_eq!(ContentEncoding::new("\t\t\rBr "), Std(Brottli)); assert_eq!(ContentEncoding::new("compress"), Std(Compress)); assert_eq!(ContentEncoding::new(" COMpress "), Std(Compress)); assert_eq!(ContentEncoding::new("deflate"), Std(Deflate)); assert_eq!(ContentEncoding::new("\t\n dEFLAte "), Std(Deflate)); assert_eq!(ContentEncoding::new("exi"), Std(EfficientXML)); assert_eq!(ContentEncoding::new("\tEXI\t"), Std(EfficientXML)); assert_eq!(ContentEncoding::new("gzip"), Std(Gzip)); assert_eq!(ContentEncoding::new(" \tgZIP"), Std(Gzip)); assert_eq!(ContentEncoding::new("identity"), Std(Identity)); assert_eq!(ContentEncoding::new("\niDENtiTY\r\r\r "), Std(Identity)); assert_eq!(ContentEncoding::new(""), Std(Identity)); assert_eq!(ContentEncoding::new(" \t "), Std(Identity)); assert_eq!(ContentEncoding::new("pack200-gzip"), Std(Pack200Gzip)); assert_eq!(ContentEncoding::new(" PaCK200-GZip "), Std(Pack200Gzip)); assert_eq!(ContentEncoding::new("ÆØБД❤"), Other("ÆØБД❤")); } #[test] fn test_ces() { use self::StdContentEncoding::*; use self::ContentEncoding::*; let mut ce = content_encodings("deflate, br, identity"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Brottli)); assert_eq!(ce.next().unwrap(), Std(Deflate)); assert!(ce.next().is_none()); let mut ce = content_encodings("identity"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings(""); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings("\t\t,, , ,"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings("Br, exi,pack200-GZip "); assert_eq!(ce.next().unwrap(), Std(Pack200Gzip)); assert_eq!(ce.next().unwrap(), Std(EfficientXML)); assert_eq!(ce.next().unwrap(), Std(Brottli)); assert!(ce.next().is_none()); let mut ce = content_encodings("\t\t\t gzip"); assert_eq!(ce.next().unwrap(), Std(Gzip)); assert!(ce.next().is_none()); let mut ce = content_encodings("\tabc\t\t, def "); assert_eq!(ce.next().unwrap(), Other("def")); assert_eq!(ce.next().unwrap(), Other("abc")); assert!(ce.next().is_none()); } }
#![feature(conservative_impl_trait)] use std::ascii::AsciiExt; pub fn content_encodings<'a>(s: &'a str) -> impl Iterator<Item = ContentEncoding<'a>> { s.split(',').rev().map(ContentEncoding::new) } #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ContentEncoding<'a> { Std(StdContentEncoding), Other(&'a str), } impl<'a> ContentEncoding<'a> {
} #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum StdContentEncoding { Brottli, Compress, Deflate, EfficientXML, Gzip, Identity, Pack200Gzip, } impl std::str::FromStr for StdContentEncoding { type Err = (); fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { use self::StdContentEncoding::*; if s.eq_ignore_ascii_case("br") { Ok(Brottli) } else if s.eq_ignore_ascii_case("compress") { Ok(Compress) } else if s.eq_ignore_ascii_case("deflate") { Ok(Deflate) } else if s.eq_ignore_ascii_case("exi") { Ok(EfficientXML) } else if s.eq_ignore_ascii_case("gzip") { Ok(Gzip) } else if s.eq_ignore_ascii_case("identity") { Ok(Identity) } else if s.eq_ignore_ascii_case("pack200-gzip") { Ok(Pack200Gzip) } else if s.is_empty() { Ok(Identity) } else { Err(()) } } } #[cfg(test)] mod test { use super::*; #[test] fn test_ce() { use self::StdContentEncoding::*; use self::ContentEncoding::*; assert_eq!(ContentEncoding::new("br"), Std(Brottli)); assert_eq!(ContentEncoding::new("\t\t\rBr "), Std(Brottli)); assert_eq!(ContentEncoding::new("compress"), Std(Compress)); assert_eq!(ContentEncoding::new(" COMpress "), Std(Compress)); assert_eq!(ContentEncoding::new("deflate"), Std(Deflate)); assert_eq!(ContentEncoding::new("\t\n dEFLAte "), Std(Deflate)); assert_eq!(ContentEncoding::new("exi"), Std(EfficientXML)); assert_eq!(ContentEncoding::new("\tEXI\t"), Std(EfficientXML)); assert_eq!(ContentEncoding::new("gzip"), Std(Gzip)); assert_eq!(ContentEncoding::new(" \tgZIP"), Std(Gzip)); assert_eq!(ContentEncoding::new("identity"), Std(Identity)); assert_eq!(ContentEncoding::new("\niDENtiTY\r\r\r "), Std(Identity)); assert_eq!(ContentEncoding::new(""), Std(Identity)); assert_eq!(ContentEncoding::new(" \t "), Std(Identity)); assert_eq!(ContentEncoding::new("pack200-gzip"), Std(Pack200Gzip)); assert_eq!(ContentEncoding::new(" PaCK200-GZip "), Std(Pack200Gzip)); assert_eq!(ContentEncoding::new("ÆØБД❤"), Other("ÆØБД❤")); } #[test] fn test_ces() { use self::StdContentEncoding::*; use self::ContentEncoding::*; let mut ce = content_encodings("deflate, br, identity"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Brottli)); assert_eq!(ce.next().unwrap(), Std(Deflate)); assert!(ce.next().is_none()); let mut ce = content_encodings("identity"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings(""); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings("\t\t,, , ,"); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert_eq!(ce.next().unwrap(), Std(Identity)); assert!(ce.next().is_none()); let mut ce = content_encodings("Br, exi,pack200-GZip "); assert_eq!(ce.next().unwrap(), Std(Pack200Gzip)); assert_eq!(ce.next().unwrap(), Std(EfficientXML)); assert_eq!(ce.next().unwrap(), Std(Brottli)); assert!(ce.next().is_none()); let mut ce = content_encodings("\t\t\t gzip"); assert_eq!(ce.next().unwrap(), Std(Gzip)); assert!(ce.next().is_none()); let mut ce = content_encodings("\tabc\t\t, def "); assert_eq!(ce.next().unwrap(), Other("def")); assert_eq!(ce.next().unwrap(), Other("abc")); assert!(ce.next().is_none()); } }
pub fn new(s: &'a str) -> Self { let s = s.trim(); match s.parse() { Ok(enc) => ContentEncoding::Std(enc), Err(_) => ContentEncoding::Other(s), } }
function_block-full_function
[ { "content": "# uhttp\\_content\\_encoding -- Parser for HTTP Content-Encoding header\n\n\n\n[Documentation](https://docs.rs/uhttp_content_encoding)\n\n\n\nThis crate provides a zero-allocation, iterator/slice-based parser for extracting HTTP\n\n[content encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2) types as they\n\nappear in the `Content-Encoding` request header. [Standard\n\nencodings](http://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding)\n\nare extracted as enum values, and unknown encodings are extracted as slices\n\nfor further processing.\n\n\n\n## Example\n\n\n\n```rust\n\nuse uhttp_content_encoding::{content_encodings, ContentEncoding, StdContentEncoding};\n\n\n\nlet mut encs = content_encodings(\" gzip, identity, custom-enc\");\n\nassert_eq!(encs.next(), Some(ContentEncoding::Other(\"custom-enc\")));\n\nassert_eq!(encs.next(), Some(ContentEncoding::Std(StdContentEncoding::Identity)));\n\nassert_eq!(encs.next(), Some(ContentEncoding::Std(StdContentEncoding::Gzip)));\n\nassert_eq!(encs.next(), None);\n\n```\n\n\n\n## Usage\n\n\n\nThis [crate](https://crates.io/crates/uhttp_content_encoding) can be used through cargo by\n\nadding it as a dependency in `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\nuhttp_content_encoding = \"0.5.1\"\n\n```\n\nand importing it in the crate root:\n\n\n\n```rust\n\nextern crate uhttp_content_encoding;\n\n```\n", "file_path": "Readme.md", "rank": 5, "score": 1.6920933350108684 } ]
Rust
src/writer/header.rs
diegodox/ply_rs
0bdce2456117d278b2c176b9b46d5e0363dd3f2f
use std::io::{BufWriter, Write}; use crate::{Comment, Element, Format, GenericElement, PLYFile, Property, PropertyList}; const MAGIC_NUMBER: &str = "ply"; const END_HEADER: &str = "end_header"; pub(crate) trait PlyWriteHeader<T: Write> { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()>; } impl<T: Write> PlyWriteHeader<T> for PLYFile { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "{}", MAGIC_NUMBER)?; self.format.write_header(writer)?; for comment in self.comments.iter() { comment.write_header(writer)?; } for element in self.elements.iter() { match element { Element::Element(e) => e.write_header(writer), Element::ListElement(e) => e.write_header(writer), }?; } writeln!(writer, "{}", END_HEADER)?; Ok(()) } } impl<T: Write> PlyWriteHeader<T> for Format { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { match self { crate::Format::Ascii { version } => writeln!(writer, "format ascii {}", version), crate::Format::BinaryBigEndian { version } => { writeln!(writer, "format binary_big_endian {}", version) } crate::Format::BinaryLittleEndian { version } => { writeln!(writer, "format binary_little_endian {}", version) } } } } #[test] fn test_write_format() { let mut writer = BufWriter::new(Vec::new()); let format = Format::Ascii { version: "1.0".to_string(), }; format.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"format ascii 1.0 "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for Comment { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "comment {}", self.0.join(" ")) } } #[test] fn test_write_comment() { let mut writer = BufWriter::new(Vec::new()); let comment = Comment(vec!["test".to_string(), "comment".to_string()]); comment.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"comment test comment "# .as_bytes(), ) } impl<T: Write, P: PlyWriteHeader<T>> PlyWriteHeader<T> for GenericElement<P> { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "element {} {}", self.name, self.count)?; self.property().write_header(writer) } } #[test] fn test_write_element_header() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let element = GenericElement { name: "vertex".to_string(), count: 20, props: Property { props: vec![ PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, ], names: vec![ "x".to_string(), "y".to_string(), "z".to_string(), "red".to_string(), "green".to_string(), "blue".to_string(), ], }, payloads: Vec::<Payload>::with_capacity(20), }; element.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"element vertex 20 property float x property float y property float z property uchar red property uchar green property uchar blue "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for Property { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { for (name, ply_type) in self.iter() { writeln!(writer, "property {} {}", ply_type.to_str(), name)? } Ok(()) } } #[test] fn test_write_property() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let property = Property { props: vec![ PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, ], names: vec![ "x".to_string(), "y".to_string(), "z".to_string(), "red".to_string(), "green".to_string(), "blue".to_string(), ], }; property.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"property float x property float y property float z property uchar red property uchar green property uchar blue "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for PropertyList { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!( writer, "property list {} {} {}", self.count.to_str(), self.prop.to_str(), self.name ) } } #[test] fn test_write_property_list() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let property = PropertyList { name: "vertex".to_string(), count: PLYValueTypeName::Uchar, prop: PLYValueTypeName::Float, }; property.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"property list uchar float vertex "# .as_bytes(), ) }
use std::io::{BufWriter, Write}; use crate::{Comment, Element, Format, GenericElement, PLYFile, Property, PropertyLi
rite_property_list() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let property = PropertyList { name: "vertex".to_string(), count: PLYValueTypeName::Uchar, prop: PLYValueTypeName::Float, }; property.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"property list uchar float vertex "# .as_bytes(), ) }
st}; const MAGIC_NUMBER: &str = "ply"; const END_HEADER: &str = "end_header"; pub(crate) trait PlyWriteHeader<T: Write> { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()>; } impl<T: Write> PlyWriteHeader<T> for PLYFile { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "{}", MAGIC_NUMBER)?; self.format.write_header(writer)?; for comment in self.comments.iter() { comment.write_header(writer)?; } for element in self.elements.iter() { match element { Element::Element(e) => e.write_header(writer), Element::ListElement(e) => e.write_header(writer), }?; } writeln!(writer, "{}", END_HEADER)?; Ok(()) } } impl<T: Write> PlyWriteHeader<T> for Format { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { match self { crate::Format::Ascii { version } => writeln!(writer, "format ascii {}", version), crate::Format::BinaryBigEndian { version } => { writeln!(writer, "format binary_big_endian {}", version) } crate::Format::BinaryLittleEndian { version } => { writeln!(writer, "format binary_little_endian {}", version) } } } } #[test] fn test_write_format() { let mut writer = BufWriter::new(Vec::new()); let format = Format::Ascii { version: "1.0".to_string(), }; format.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"format ascii 1.0 "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for Comment { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "comment {}", self.0.join(" ")) } } #[test] fn test_write_comment() { let mut writer = BufWriter::new(Vec::new()); let comment = Comment(vec!["test".to_string(), "comment".to_string()]); comment.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"comment test comment "# .as_bytes(), ) } impl<T: Write, P: PlyWriteHeader<T>> PlyWriteHeader<T> for GenericElement<P> { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!(writer, "element {} {}", self.name, self.count)?; self.property().write_header(writer) } } #[test] fn test_write_element_header() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let element = GenericElement { name: "vertex".to_string(), count: 20, props: Property { props: vec![ PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, ], names: vec![ "x".to_string(), "y".to_string(), "z".to_string(), "red".to_string(), "green".to_string(), "blue".to_string(), ], }, payloads: Vec::<Payload>::with_capacity(20), }; element.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"element vertex 20 property float x property float y property float z property uchar red property uchar green property uchar blue "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for Property { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { for (name, ply_type) in self.iter() { writeln!(writer, "property {} {}", ply_type.to_str(), name)? } Ok(()) } } #[test] fn test_write_property() { use crate::*; let mut writer = BufWriter::new(Vec::new()); let property = Property { props: vec![ PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Float, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, PLYValueTypeName::Uchar, ], names: vec![ "x".to_string(), "y".to_string(), "z".to_string(), "red".to_string(), "green".to_string(), "blue".to_string(), ], }; property.write_header(&mut writer).unwrap(); assert_eq!( writer.into_inner().unwrap(), r#"property float x property float y property float z property uchar red property uchar green property uchar blue "# .as_bytes(), ) } impl<T: Write> PlyWriteHeader<T> for PropertyList { fn write_header(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> { writeln!( writer, "property list {} {} {}", self.count.to_str(), self.prop.to_str(), self.name ) } } #[test] fn test_w
random
[ { "content": "#[test]\n\nfn test_write_element_payload_ascii() {\n\n use crate::*;\n\n let mut writer = BufWriter::new(Vec::new());\n\n let element = GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n ],\n\n names: vec![\"red\".to_string(), \"green\".to_string(), \"blue\".to_string()],\n\n },\n\n payloads: vec![\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n ]),\n", "file_path": "src/writer/payload.rs", "rank": 4, "score": 41880.025633534715 }, { "content": "#[test]\n\nfn test_write_ply() {\n\n use crate::*;\n\n let ply = {\n\n let element_vertex = Element::Element(GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Float,\n\n PLYValueTypeName::Float,\n\n PLYValueTypeName::Float,\n\n ],\n\n names: vec![\"x\".to_string(), \"y\".to_string(), \"z\".to_string()],\n\n },\n\n payloads: vec![\n\n Payload(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n ]),\n", "file_path": "src/writer.rs", "rank": 5, "score": 24178.17548721973 }, { "content": "#[test]\n\nfn parse_property_line() {\n\n let line = \"property float x\";\n\n assert_eq!(\n\n parse_header_line(line),\n\n HeaderLine::PropertyLine {\n\n name: \"x\".to_string(),\n\n prop_type: PLYValueTypeName::Float\n\n }\n\n );\n\n}\n", "file_path": "src/reader/header.rs", "rank": 6, "score": 23122.66178235918 }, { "content": "#[test]\n\nfn parse_element_line() {\n\n let line = \"element vertex 8\";\n\n assert_eq!(\n\n parse_header_line(line),\n\n HeaderLine::ElementLine {\n\n name: \"vertex\".to_string(),\n\n count: 8\n\n }\n\n );\n\n}\n", "file_path": "src/reader/header.rs", "rank": 8, "score": 22902.25910310499 }, { "content": "#[test]\n\nfn parse_ascii_format_line() {\n\n let line = \"format ascii 1.0\";\n\n assert_eq!(\n\n parse_header_line(line),\n\n HeaderLine::FormatLine(Format::Ascii {\n\n version: \"1.0\".to_string()\n\n })\n\n );\n\n}\n", "file_path": "src/reader/header.rs", "rank": 9, "score": 22130.05727400295 }, { "content": "#[test]\n\nfn parse_property_list_line() {\n\n let line = \"property list uchar int vertex_index\";\n\n assert_eq!(\n\n parse_header_line(line),\n\n HeaderLine::PropertyListLine(PropertyList {\n\n name: \"vertex_index\".to_string(),\n\n prop: PLYValueTypeName::Int,\n\n count: PLYValueTypeName::Uchar\n\n })\n\n );\n\n}\n", "file_path": "src/reader/header.rs", "rank": 10, "score": 22059.830357160798 }, { "content": "#[test]\n\nfn test_read_to_element_line() {\n\n let lines = vec![\n\n HeaderLine::CommentLine(Comment(vec![\n\n \"this\".to_string(),\n\n \"is\".to_string(),\n\n \"a\".to_string(),\n\n \"cube\".to_string(),\n\n ])),\n\n HeaderLine::ElementLine {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n },\n\n ];\n\n let mut iter = lines.into_iter();\n\n let mut comments = Vec::new();\n\n let next = read_to_element_line(&mut iter, &mut comments);\n\n assert_eq!(\n\n comments,\n\n vec![Comment(vec![\n\n \"this\".to_string(),\n\n \"is\".to_string(),\n\n \"a\".to_string(),\n\n \"cube\".to_string()\n\n ])]\n\n );\n\n assert_eq!(next, Some((r#\"vertex\"#.to_string(), 8)))\n\n}\n\n\n", "file_path": "src/reader/header.rs", "rank": 11, "score": 21849.558470629097 }, { "content": "#[test]\n\nfn test_read_element_props() {\n\n let mut lines = r#\"property float x\n\nproperty float y\n\nproperty float z\n\ncomment color\n\nproperty uchar red\n\nproperty uchar green\n\nproperty uchar blue\"#\n\n .lines()\n\n .map(|line| parse_header_line(line));\n\n let mut comments = Vec::new();\n\n\n\n let (element, next) = read_element_props(&mut lines, &mut comments, (\"vertex\".to_string(), 20));\n\n\n\n assert_eq!(\n\n element,\n\n Element::Element(GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 20,\n\n props: Property {\n", "file_path": "src/reader/header.rs", "rank": 12, "score": 21849.558470629097 }, { "content": "#[test]\n\nfn test_read_element_payload_be() {\n\n use crate::*;\n\n let mut element = GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n ],\n\n names: vec![\"red\".to_string(), \"green\".to_string(), \"blue\".to_string()],\n\n },\n\n payloads: Vec::with_capacity(8),\n\n };\n\n let bytes = [\n\n //0\n\n 0u8.to_be(),\n\n 0u8.to_be(),\n\n 0u8.to_be(),\n", "file_path": "src/reader/payload.rs", "rank": 13, "score": 21849.558470629097 }, { "content": "#[test]\n\nfn test_read_element_payload_le() {\n\n use crate::*;\n\n let mut element = GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n ],\n\n names: vec![\"red\".to_string(), \"green\".to_string(), \"blue\".to_string()],\n\n },\n\n payloads: Vec::with_capacity(8),\n\n };\n\n let bytes = [\n\n //0\n\n 0u8.to_le(),\n\n 0u8.to_le(),\n\n 0u8.to_le(),\n", "file_path": "src/reader/payload.rs", "rank": 14, "score": 20889.379690316382 }, { "content": "#[test]\n\nfn test_read_element_payload_ascii() {\n\n use crate::*;\n\n let mut element = GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n PLYValueTypeName::Uchar,\n\n ],\n\n names: vec![\"red\".to_string(), \"green\".to_string(), \"blue\".to_string()],\n\n },\n\n payloads: Vec::<Payload>::with_capacity(8),\n\n };\n\n let lines = r#\"0 0 0\n\n0 0 1\n\n0 1 1\n\n0 1 0\n\n1 0 0\n", "file_path": "src/reader/payload.rs", "rank": 15, "score": 20889.379690316382 }, { "content": "/// Read element's props, arg `(name, count)` is a name and count of element.\n\n///\n\n/// Return Element and name, usize if they found while reading props.\n\nfn read_element_props<I: Iterator<Item = HeaderLine>>(\n\n lines: &mut I,\n\n comments: &mut Vec<Comment>,\n\n (name, count): (String, usize),\n\n) -> (Element, Option<(String, usize)>) {\n\n let mut prop = Property {\n\n props: Vec::new(),\n\n names: Vec::new(),\n\n };\n\n for line in lines {\n\n match line {\n\n HeaderLine::PropertyLine { name, prop_type } => {\n\n prop.props.push(prop_type);\n\n prop.names.push(name);\n\n }\n\n HeaderLine::PropertyListLine(prop_list) => {\n\n assert!(\n\n prop.props.is_empty(),\n\n r#\"\"property\" and \"property lines\" cannot be used at same element\"#\n\n );\n", "file_path": "src/reader/header.rs", "rank": 16, "score": 19205.24635581879 }, { "content": "/// Read headers for find line `element (name) (count)`\n\n///\n\n/// Return Some((name, count)) if found, None otherwise\n\nfn read_to_element_line<I: Iterator<Item = HeaderLine>>(\n\n lines: &mut I,\n\n comments: &mut Vec<Comment>,\n\n) -> Option<(String, usize)> {\n\n for line in lines {\n\n match line {\n\n HeaderLine::ElementLine { name, count } => {\n\n return Some((name, count));\n\n }\n\n HeaderLine::CommentLine(c) => comments.push(c),\n\n HeaderLine::EmptyLine => { /* do nothing */ }\n\n HeaderLine::UnknownLine => { /* do nothing */ }\n\n HeaderLine::PropertyLine { .. } => panic!(r#\"keyword \"propety\" cannnot use here\"#),\n\n HeaderLine::PropertyListLine(_) => panic!(r#\"keyword \"propety list\" cannnot use here\"#),\n\n HeaderLine::FormatLine(_) => panic!(r#\"keyword \"format\" cannnot use here\"#),\n\n HeaderLine::FileIdentifierLine => panic!(r#\"keyword \"ply\" cannnot use here\"#),\n\n HeaderLine::EndHeader => panic!(r#\"keyword end_header is not allowed here\"#),\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/reader/header.rs", "rank": 17, "score": 19204.3162037982 }, { "content": "use std::io::{BufWriter, Write};\n\n\n\nuse crate::{\n\n ply_value::PlyTryFrom, Format, GenericElement, PLYValue, Payload, Property, PropertyList,\n\n};\n\n\n\npub(crate) fn write_element_payload<T: Write, P: WritePayload<T, Payload = Payload>>(\n\n element: &GenericElement<P>,\n\n writer: &mut BufWriter<T>,\n\n format: &Format,\n\n) -> std::io::Result<()> {\n\n match format {\n\n Format::Ascii { .. } => {\n\n for payload in element.payload() {\n\n element.property().write_payload_ascii(payload, writer)?;\n\n }\n\n }\n\n\n\n Format::BinaryBigEndian { .. } => {\n\n for payload in element.payload() {\n", "file_path": "src/writer/payload.rs", "rank": 18, "score": 13.6901063989597 }, { "content": " element.property().write_payload_be(payload, writer)?;\n\n }\n\n }\n\n Format::BinaryLittleEndian { .. } => {\n\n for payload in element.payload() {\n\n element.property().write_payload_le(payload, writer)?;\n\n }\n\n }\n\n };\n\n Ok(())\n\n}\n\n#[test]\n", "file_path": "src/writer/payload.rs", "rank": 19, "score": 11.743799214547769 }, { "content": "use std::io::BufWriter;\n\n\n\nuse ply::{\n\n Comment, Element, Format, GenericElement, PLYFile, PLYValue, PLYValueTypeName, Payload,\n\n Property, PropertyList,\n\n};\n\n\n\nconst PLY: &str = r#\"ply\n\nformat ascii 1.0\n\ncomment test data\n\nelement vertex 8\n\nproperty float x\n\nproperty float y\n\nproperty float z\n\nelement list 3\n\nproperty list uchar char vertex_id\n\nend_header\n\n0 0 0\n\n0 0 1\n\n0 1 1\n", "file_path": "examples/build_ply.rs", "rank": 21, "score": 10.478461777241952 }, { "content": "//! PLY File Writer\n\n\n\nuse std::io::{BufWriter, Write};\n\n\n\nuse crate::{Element, PLYFile};\n\n\n\npub(crate) mod header;\n\nuse header::PlyWriteHeader;\n\n\n\npub(crate) mod payload;\n\nuse payload::write_element_payload;\n\n\n\nimpl PLYFile {\n\n pub fn write<T: Write>(&self, writer: &mut BufWriter<T>) -> std::io::Result<()> {\n\n self.write_header(writer)?;\n\n for element in self.elements.iter() {\n\n match element {\n\n Element::Element(e) => write_element_payload(e, writer, &self.format),\n\n Element::ListElement(e) => write_element_payload(e, writer, &self.format),\n\n }?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/writer.rs", "rank": 22, "score": 9.965600998535312 }, { "content": " elements: vec![element_vertex, element_list],\n\n }\n\n };\n\n let mut writer = BufWriter::new(Vec::new());\n\n ply.write(&mut writer).unwrap();\n\n assert_eq!(\n\n r#\"ply\n\nformat ascii 1.0\n\ncomment test data\n\nelement vertex 8\n\nproperty float x\n\nproperty float y\n\nproperty float z\n\nelement list 3\n\nproperty list uchar char vertex_id\n\nend_header\n\n0 0 0\n\n0 0 1\n\n0 1 1\n\n0 1 0\n", "file_path": "src/writer.rs", "rank": 25, "score": 8.210103707064503 }, { "content": "use crate::{\n\n Comment, Element, Format, GenericElement, PLYFile, PLYValueTypeName, Property, PropertyList,\n\n};\n\nuse std::convert::AsRef;\n\nuse std::str::FromStr;\n\n\n\n/// Read [HeaderLine]s\n\npub(crate) fn from_header_lines<I: Iterator<Item = HeaderLine>>(lines: &mut I) -> PLYFile {\n\n // assert magic number\n\n assert_eq!(\n\n lines\n\n .next()\n\n .expect(\"Not found: first line, magic number must be ply\"),\n\n HeaderLine::FileIdentifierLine\n\n );\n\n\n\n // read format of PLY file\n\n let format = match lines.next().expect(\"Not found: secound line, format style\") {\n\n HeaderLine::FormatLine(format) => format,\n\n _ => panic!(\"Not found: secound line, format style\"),\n", "file_path": "src/reader/header.rs", "rank": 26, "score": 7.633476924907017 }, { "content": "use std::convert::TryInto;\n\n\n\nuse crate::{GenericElement, Payload, Property, PropertyList};\n\n\n\npub(crate) fn read_elemet_payload_ascii<P, I>(element: &mut GenericElement<P>, lines: &mut I)\n\nwhere\n\n P: ReadPayload<Payload = Payload>,\n\n I: Iterator<Item = String>,\n\n{\n\n for _ in 0..element.count {\n\n let line = lines.next().unwrap();\n\n let payload = element.props.read_as_ascii(line);\n\n element.payloads.push(payload);\n\n }\n\n}\n\npub(crate) fn read_element_payload_be_bytes<P, I>(element: &mut GenericElement<P>, bytes: &mut I)\n\nwhere\n\n P: ReadPayload<Payload = Payload>,\n\n I: Iterator<Item = u8>,\n\n{\n", "file_path": "src/reader/payload.rs", "rank": 27, "score": 7.391875655196721 }, { "content": "pub enum Element {\n\n Element(GenericElement<Property>),\n\n ListElement(GenericElement<PropertyList>),\n\n}\n\nimpl TryInto<GenericElement<Property>> for Element {\n\n type Error = PLYError;\n\n\n\n fn try_into(self) -> Result<GenericElement<Property>, Self::Error> {\n\n match self {\n\n Element::Element(e) => Ok(e),\n\n Element::ListElement(_) => Err(PLYError::MissmatchDataType),\n\n }\n\n }\n\n}\n\nimpl TryInto<GenericElement<PropertyList>> for Element {\n\n type Error = PLYError;\n\n\n\n fn try_into(self) -> Result<GenericElement<PropertyList>, Self::Error> {\n\n match self {\n\n Element::ListElement(e) => Ok(e),\n", "file_path": "src/lib.rs", "rank": 28, "score": 7.209435892478121 }, { "content": "/// Possible Lines in PLY Header\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub(crate) enum HeaderLine {\n\n /// Line just \"ply\"\n\n FileIdentifierLine,\n\n /// Line like \"format ascii 1.0\"\n\n FormatLine(Format),\n\n /// Line like \"comment this file is a cube\"\n\n CommentLine(Comment),\n\n /// Line like \"element vertex 8\"\n\n ElementLine { name: String, count: usize },\n\n /// Line like \"property float z\"\n\n PropertyLine {\n\n name: String,\n\n prop_type: PLYValueTypeName,\n\n },\n\n /// Line like \"property list uchar int vertex_index\"\n\n PropertyListLine(PropertyList),\n\n /// Empty Line (is leagal??)\n\n EmptyLine,\n", "file_path": "src/reader/header.rs", "rank": 30, "score": 6.6310410379555655 }, { "content": " let count = PLYValueTypeName::from_str(words.next().unwrap()).unwrap();\n\n let prop = PLYValueTypeName::from_str(words.next().unwrap()).unwrap();\n\n let name = words.next().expect(\"property name not found\").to_string();\n\n HeaderLine::PropertyListLine(PropertyList { count, prop, name })\n\n }\n\n prop_type_str => HeaderLine::PropertyLine {\n\n name: words.next().expect(\"property name not found\").to_string(),\n\n prop_type: PLYValueTypeName::from_str(prop_type_str).unwrap(),\n\n },\n\n },\n\n \"element\" => HeaderLine::ElementLine {\n\n name: words.next().expect(\"element name not found\").to_string(),\n\n count: words\n\n .next()\n\n .expect(\"element count not found\")\n\n .parse()\n\n .expect(\"element count must be unsined integer\"),\n\n },\n\n \"format\" => {\n\n HeaderLine::FormatLine(match words.next().expect(\"format style not found\") {\n", "file_path": "src/reader/header.rs", "rank": 31, "score": 6.127689331922956 }, { "content": " }\n\n }\n\n crate::Format::BinaryBigEndian { .. } => {\n\n let s = lines.next().unwrap();\n\n let mut bytes = s.as_bytes().iter().copied();\n\n for element in &mut ply.elements {\n\n match element {\n\n crate::Element::Element(element) => {\n\n read_element_payload_be_bytes(element, &mut bytes)\n\n }\n\n crate::Element::ListElement(element) => {\n\n read_element_payload_be_bytes(element, &mut bytes)\n\n }\n\n }\n\n }\n\n }\n\n crate::Format::BinaryLittleEndian { .. } => {\n\n let s = lines.next().unwrap();\n\n let mut bytes = s.as_bytes().iter().copied();\n\n for element in &mut ply.elements {\n", "file_path": "src/reader.rs", "rank": 32, "score": 6.086803700357747 }, { "content": "/// Struct represent PLY File\n\npub struct PLYFile {\n\n pub format: Format,\n\n pub comments: Vec<Comment>,\n\n pub elements: Vec<Element>,\n\n}\n\n\n\nimpl PLYFile {\n\n pub fn new(format: Format) -> Self {\n\n Self {\n\n format,\n\n comments: Vec::new(),\n\n elements: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\n/// Format of PLY file\n\npub enum Format {\n", "file_path": "src/lib.rs", "rank": 33, "score": 6.028852888613519 }, { "content": " let ply = PLYFile::from_lines(&mut lines);\n\n assert_eq!(\n\n ply,\n\n PLYFile {\n\n format: Format::Ascii {\n\n version: \"1.0\".to_string()\n\n },\n\n comments: vec![Comment(vec![\"test\".to_string(), \"data\".to_string()])],\n\n elements: vec![\n\n Element::Element(GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 3,\n\n props: Property {\n\n props: vec![\n\n PLYValueTypeName::Float,\n\n PLYValueTypeName::Float,\n\n PLYValueTypeName::Float,\n\n ],\n\n names: vec![\"x\".to_string(), \"y\".to_string(), \"z\".to_string()],\n\n },\n", "file_path": "src/reader.rs", "rank": 34, "score": 5.6980315698656465 }, { "content": " prop_type: PLYValueTypeName::Float,\n\n name: \"z\".to_string(),\n\n },\n\n ];\n\n let mut iter = lines.into_iter();\n\n let ply_file = from_header_lines(&mut iter);\n\n assert_eq!(\n\n ply_file,\n\n PLYFile {\n\n format: Format::Ascii {\n\n version: \"1.0\".to_string()\n\n },\n\n comments: vec![Comment(vec![\"test\".to_string(), \"data\".to_string()])],\n\n elements: vec![Element::Element(GenericElement {\n\n name: \"vertex\".to_string(),\n\n count: 8,\n\n props: Property {\n\n names: vec![\"x\".to_string(), \"y\".to_string(), \"z\".to_string()],\n\n props: vec![\n\n PLYValueTypeName::Float,\n", "file_path": "src/reader/header.rs", "rank": 35, "score": 5.523346276130458 }, { "content": " Ascii { version: String },\n\n BinaryBigEndian { version: String },\n\n BinaryLittleEndian { version: String },\n\n}\n\nimpl Display for Format {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Format::Ascii { version } => write!(f, \"format ascii {}\", version),\n\n Format::BinaryBigEndian { version } => {\n\n write!(f, \"format binary_big_endian {}\", version)\n\n }\n\n Format::BinaryLittleEndian { version } => {\n\n write!(f, \"format binary_little_endian {}\", version)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Default)]\n\n/// Struct represent Comment\n", "file_path": "src/lib.rs", "rank": 36, "score": 5.5020177055054695 }, { "content": " Element::Element(_) => Err(PLYError::MissmatchDataType),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\n/// Struct represent Generic PLY Element\n\npub struct GenericElement<P> {\n\n pub name: String,\n\n count: usize,\n\n props: P,\n\n payloads: Vec<Payload>,\n\n}\n\nimpl<P> GenericElement<P> {\n\n pub fn new<S: Into<String>>(name: S, property: P) -> GenericElement<P> {\n\n Self {\n\n name: name.into(),\n\n count: 0,\n\n props: property,\n\n payloads: Vec::new(),\n", "file_path": "src/lib.rs", "rank": 37, "score": 5.430018140421135 }, { "content": "\n\nimpl<T: Write> WritePayload<T> for Property {\n\n type Payload = Payload;\n\n\n\n fn write_payload_ascii(\n\n &self,\n\n payload: &Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n\n let line = payload\n\n .0\n\n .iter()\n\n .map(|v| format!(\"{}\", v))\n\n .collect::<Vec<_>>()\n\n .join(\" \");\n\n writeln!(writer, \"{}\", line)\n\n }\n\n\n\n fn write_payload_be(\n\n &self,\n", "file_path": "src/writer/payload.rs", "rank": 38, "score": 5.281597834952441 }, { "content": " let header_lines = read_header_lines(&mut input);\n\n assert_eq!(\n\n header_lines,\n\n vec![\n\n HeaderLine::FileIdentifierLine,\n\n HeaderLine::FormatLine(Format::Ascii {\n\n version: \"1.0\".to_string()\n\n }),\n\n HeaderLine::CommentLine(Comment(vec![\"test\".to_string(), \"data\".to_string()])),\n\n HeaderLine::ElementLine {\n\n name: \"vertex\".to_string(),\n\n count: 8\n\n },\n\n HeaderLine::PropertyLine {\n\n prop_type: PLYValueTypeName::Float,\n\n name: \"x\".to_string()\n\n },\n\n HeaderLine::PropertyLine {\n\n prop_type: PLYValueTypeName::Float,\n\n name: \"y\".to_string()\n", "file_path": "src/reader/header.rs", "rank": 39, "score": 5.225667213425976 }, { "content": " .map(|e| e.unwrap());\n\n Ok(PLYFile::from_lines(&mut lines))\n\n }\n\n pub fn from_lines<I: Iterator<Item = String>>(lines: &mut I) -> PLYFile {\n\n let header_lines = read_header_lines(lines);\n\n let mut ply = {\n\n let mut iter = header_lines.into_iter();\n\n from_header_lines(&mut iter)\n\n };\n\n match ply.format {\n\n crate::Format::Ascii { .. } => {\n\n for element in &mut ply.elements {\n\n match element {\n\n crate::Element::Element(element) => {\n\n read_elemet_payload_ascii(element, lines)\n\n }\n\n crate::Element::ListElement(element) => {\n\n read_elemet_payload_ascii(element, lines)\n\n }\n\n }\n", "file_path": "src/reader.rs", "rank": 40, "score": 4.956092683031789 }, { "content": " }\n\n }\n\n pub fn property(&self) -> &P {\n\n &self.props\n\n }\n\n pub fn property_mut(&mut self) -> &mut P {\n\n &mut self.props\n\n }\n\n pub fn payload(&self) -> &[Payload] {\n\n &self.payloads\n\n }\n\n pub fn payload_mut(&mut self) -> &mut [Payload] {\n\n &mut self.payloads\n\n }\n\n}\n\nimpl GenericElement<Property> {\n\n pub fn push_payload(&mut self, payload: Payload) -> PLYResult<()> {\n\n if self.property().len() != payload.len() {\n\n return Err(PLYError::PropertyLengthErr);\n\n }\n", "file_path": "src/lib.rs", "rank": 41, "score": 4.90383816488894 }, { "content": " PLYValue::Float(1f32),\n\n ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n\n ]))\n\n .unwrap();\n\n element\n\n }));\n\n ply.elements.push(Element::ListElement({\n\n let mut element = GenericElement::new(\n\n \"list\",\n\n PropertyList::new(\"vertex_id\", PLYValueTypeName::Uchar, PLYValueTypeName::Char),\n\n );\n\n element\n\n .push_payload(Payload::from(vec![PLYValue::Char(3)]))\n\n .unwrap();\n", "file_path": "examples/build_ply.rs", "rank": 42, "score": 4.843522440125225 }, { "content": "use std::{\n\n fs::File,\n\n io::{self, BufRead, BufReader},\n\n path::Path,\n\n};\n\n\n\nuse crate::PLYFile;\n\n\n\npub(crate) mod header;\n\nuse header::{from_header_lines, read_header_lines};\n\n\n\npub(crate) mod payload;\n\nuse payload::{\n\n read_element_payload_be_bytes, read_element_payload_le_bytes, read_elemet_payload_ascii,\n\n};\n\n\n\nimpl PLYFile {\n\n pub fn from_file<P: AsRef<Path>>(path: P) -> io::Result<PLYFile> {\n\n let mut lines = BufReader::new(File::open(path).unwrap())\n\n .lines()\n", "file_path": "src/reader.rs", "rank": 43, "score": 4.825139939190123 }, { "content": " payload: &Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n\n for v in payload.0.iter() {\n\n match v {\n\n PLYValue::Char(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_le_bytes())?,\n\n };\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: Write> WritePayload<T> for PropertyList {\n", "file_path": "src/writer/payload.rs", "rank": 44, "score": 4.805766825682562 }, { "content": " let result = element.push_payload(Payload(vec![\n\n PLYValue::Double(1f64),\n\n PLYValue::Double(2f64),\n\n PLYValue::Double(3f64),\n\n ]));\n\n assert_eq!(result, Err(PLYError::MissmatchDataType));\n\n assert!(element.count == 1);\n\n\n\n let result = element.push_payload(Payload(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(2f32),\n\n PLYValue::Float(3f32),\n\n PLYValue::Float(4f32),\n\n ]));\n\n assert_eq!(result, Err(PLYError::PropertyLengthErr));\n\n assert!(element.count == 1);\n\n}\n\n\n\nimpl GenericElement<PropertyList> {\n\n pub fn push_payload(&mut self, payload: Payload) -> PLYResult<()> {\n", "file_path": "src/lib.rs", "rank": 45, "score": 4.692761770450672 }, { "content": " }\n\n });\n\n return (element, Some((next_name, next_count)));\n\n }\n\n HeaderLine::CommentLine(c) => comments.push(c),\n\n HeaderLine::EmptyLine => { /* do nothing */ }\n\n HeaderLine::FileIdentifierLine => {\n\n panic!(r#\"line \"ply\" is not allowed here\"#)\n\n }\n\n HeaderLine::FormatLine(_) => {\n\n panic!(r#\"keyword format is not allowed here\"#)\n\n }\n\n HeaderLine::EndHeader => {\n\n panic!(r#\"keyword end_header is not allowed here\"#)\n\n }\n\n HeaderLine::UnknownLine => {}\n\n }\n\n }\n\n\n\n let element = Element::Element({\n", "file_path": "src/reader/header.rs", "rank": 46, "score": 4.634006334045081 }, { "content": " element\n\n .push_payload(Payload::from(vec![PLYValue::Char(3), PLYValue::Char(3)]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Char(3),\n\n PLYValue::Char(3),\n\n PLYValue::Char(3),\n\n ]))\n\n .unwrap();\n\n element\n\n }));\n\n\n\n // write ply\n\n let mut writer = BufWriter::new(Vec::new());\n\n ply.write(&mut writer).unwrap();\n\n let string = String::from_utf8(writer.into_inner().unwrap()).unwrap();\n\n\n\n // we created same ply file as PLY\n\n assert_eq!(string.as_str(), PLY);\n\n\n\n // print ply\n\n print!(\"{:#?}\", ply);\n\n}\n", "file_path": "examples/build_ply.rs", "rank": 47, "score": 4.627091215692535 }, { "content": " PLYValue::Float(2f32),\n\n PLYValue::Float(3f32),\n\n PLYValue::Float(4f32),\n\n ]));\n\n assert!(result.is_ok());\n\n assert!(element.count == 2);\n\n\n\n let result = element.push_payload(Payload(vec![\n\n PLYValue::Double(1f64),\n\n PLYValue::Double(2f64),\n\n PLYValue::Double(3f64),\n\n ]));\n\n assert_eq!(result, Err(PLYError::MissmatchDataType));\n\n assert!(element.count == 2);\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Default)]\n\n/// property \"prop\" \"name\"\n\npub struct Property {\n\n pub(crate) props: Vec<PLYValueTypeName>,\n", "file_path": "src/lib.rs", "rank": 48, "score": 4.380188512849486 }, { "content": " Payload(vec![\n\n PLYValue::Uchar(1),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(1),\n\n ]),\n\n Payload(vec![\n\n PLYValue::Uchar(1),\n\n PLYValue::Uchar(1),\n\n PLYValue::Uchar(1),\n\n ]),\n\n Payload(vec![\n\n PLYValue::Uchar(1),\n\n PLYValue::Uchar(1),\n\n PLYValue::Uchar(0),\n\n ]),\n\n ],\n\n };\n\n write_element_payload(\n\n &element,\n\n &mut writer,\n", "file_path": "src/writer/payload.rs", "rank": 49, "score": 4.269173521377738 }, { "content": " pub(crate) names: Vec<String>,\n\n}\n\nimpl Property {\n\n pub fn new() -> Property {\n\n Self::default()\n\n }\n\n pub fn push_prop<S: Into<String>>(&mut self, name: S, property: PLYValueTypeName) {\n\n self.props.push(property);\n\n self.names.push(name.into());\n\n }\n\n pub fn is_empty(&self) -> bool {\n\n debug_assert_eq!(self.props.is_empty(), self.names.is_empty());\n\n self.props.is_empty()\n\n }\n\n pub fn len(&self) -> usize {\n\n debug_assert_eq!(self.props.len(), self.names.len());\n\n self.props.len()\n\n }\n\n /// Iterator over element property (name, prop)\n\n pub fn iter(&self) -> impl Iterator<Item = (&str, PLYValueTypeName)> {\n", "file_path": "src/lib.rs", "rank": 50, "score": 4.198909505729594 }, { "content": "//! # ply\n\n//!\n\n//! [PLY (Polygon File Format)](http://paulbourke.net/dataformats/ply/) file parser for Rust\n\n//!\n\nuse std::{\n\n convert::TryInto,\n\n fmt::{Debug, Display},\n\n ops::{Deref, DerefMut},\n\n};\n\n\n\npub mod error;\n\nuse error::{PLYError, PLYResult};\n\n\n\npub(crate) mod ply_value;\n\npub use ply_value::{PLYValue, PLYValueTypeName};\n\n\n\npub(crate) mod reader;\n\npub(crate) mod writer;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n", "file_path": "src/lib.rs", "rank": 51, "score": 4.082099938094538 }, { "content": " match element {\n\n crate::Element::Element(element) => {\n\n read_element_payload_le_bytes(element, &mut bytes)\n\n }\n\n crate::Element::ListElement(element) => {\n\n read_element_payload_le_bytes(element, &mut bytes)\n\n }\n\n }\n\n }\n\n }\n\n }\n\n ply\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/reader.rs", "rank": 52, "score": 4.025968119331714 }, { "content": " match self.count.try_from(payload.len()).unwrap() {\n\n PLYValue::Char(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_be_bytes())?,\n\n };\n\n for v in payload.iter() {\n\n match v {\n\n PLYValue::Char(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_be_bytes())?,\n", "file_path": "src/writer/payload.rs", "rank": 53, "score": 3.814248233181304 }, { "content": "use std::{convert::TryInto, fmt::Display, str::FromStr};\n\n\n\nuse crate::error::{PLYError, PLYResult};\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\n/// Scalar data types a property may have (without value)\n\npub enum PLYValueTypeName {\n\n Char,\n\n Uchar,\n\n Short,\n\n Ushort,\n\n Int,\n\n Uint,\n\n Float,\n\n Double,\n\n}\n\nimpl PLYValueTypeName {\n\n /// Returns bytes length of type\n\n pub fn bytes_len(&self) -> usize {\n\n match self {\n", "file_path": "src/ply_value.rs", "rank": 54, "score": 3.783085123484 }, { "content": " return (\n\n Element::ListElement(GenericElement {\n\n name,\n\n count,\n\n props: prop_list,\n\n payloads: Vec::with_capacity(count),\n\n }),\n\n None,\n\n );\n\n }\n\n HeaderLine::ElementLine {\n\n name: next_name,\n\n count: next_count,\n\n } => {\n\n let element = Element::Element({\n\n GenericElement {\n\n name,\n\n count,\n\n props: prop,\n\n payloads: Vec::with_capacity(count),\n", "file_path": "src/reader/header.rs", "rank": 55, "score": 3.748465700027103 }, { "content": " for _ in 0..element.count {\n\n let payload = element.props.read_as_be(bytes);\n\n element.payloads.push(payload);\n\n }\n\n}\n\n\n\npub(crate) fn read_element_payload_le_bytes<P, I>(element: &mut GenericElement<P>, bytes: &mut I)\n\nwhere\n\n P: ReadPayload<Payload = Payload>,\n\n I: Iterator<Item = u8>,\n\n{\n\n for _ in 0..element.count {\n\n let payload = element.props.read_as_le(bytes);\n\n element.payloads.push(payload);\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/reader/payload.rs", "rank": 56, "score": 3.71125853016384 }, { "content": " payload: &Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n\n for v in payload.0.iter() {\n\n match v {\n\n PLYValue::Char(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_be_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_be_bytes())?,\n\n };\n\n }\n\n Ok(())\n\n }\n\n\n\n fn write_payload_le(\n\n &self,\n", "file_path": "src/writer/payload.rs", "rank": 57, "score": 3.6970549303618667 }, { "content": "impl Display for PLYValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n PLYValue::Char(v) => write!(f, \"{}\", v),\n\n PLYValue::Uchar(v) => write!(f, \"{}\", v),\n\n PLYValue::Short(v) => write!(f, \"{}\", v),\n\n PLYValue::Ushort(v) => write!(f, \"{}\", v),\n\n PLYValue::Int(v) => write!(f, \"{}\", v),\n\n PLYValue::Uint(v) => write!(f, \"{}\", v),\n\n PLYValue::Float(v) => write!(f, \"{}\", v),\n\n PLYValue::Double(v) => write!(f, \"{}\", v),\n\n }\n\n }\n\n}\n\n\n\nimpl TryInto<usize> for PLYValue {\n\n type Error = PLYError;\n\n\n\n fn try_into(self) -> Result<usize, Self::Error> {\n\n match self {\n", "file_path": "src/ply_value.rs", "rank": 58, "score": 3.6823393380001743 }, { "content": " for v in payload.iter() {\n\n match v {\n\n PLYValue::Char(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_le_bytes())?,\n\n };\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/writer/payload.rs", "rank": 59, "score": 3.6579458183197073 }, { "content": " };\n\n\n\n // read comment and element\n\n let mut comments = Vec::new();\n\n let mut elements = Vec::new();\n\n while let Some(mut next) = read_to_element_line(lines, &mut comments) {\n\n loop {\n\n let (element, next_option) = read_element_props(lines, &mut comments, next);\n\n elements.push(element);\n\n if let Some(next_some) = next_option {\n\n next = next_some;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n PLYFile {\n\n format,\n\n comments,\n\n elements,\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/reader/header.rs", "rank": 60, "score": 3.6570114352130663 }, { "content": " };\n\n }\n\n Ok(())\n\n }\n\n\n\n fn write_payload_le(\n\n &self,\n\n payload: &Self::Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n\n match self.count.try_from(payload.len()).unwrap() {\n\n PLYValue::Char(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uchar(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Short(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Ushort(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Int(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Uint(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Float(v) => writer.write(&v.to_le_bytes())?,\n\n PLYValue::Double(v) => writer.write(&v.to_le_bytes())?,\n\n };\n", "file_path": "src/writer/payload.rs", "rank": 61, "score": 3.5798765442006095 }, { "content": "\n\npub(crate) trait WritePayload<T: Write> {\n\n type Payload;\n\n\n\n fn write_payload_ascii(\n\n &self,\n\n payload: &Self::Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()>;\n\n fn write_payload_be(\n\n &self,\n\n payload: &Self::Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()>;\n\n fn write_payload_le(\n\n &self,\n\n payload: &Self::Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()>;\n\n}\n", "file_path": "src/writer/payload.rs", "rank": 62, "score": 3.392257706588562 }, { "content": " self.names\n\n .iter()\n\n .map(|x| x.as_str())\n\n .zip(self.props.iter().copied())\n\n }\n\n /// Iterator over element property (name, prop)\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = (&mut str, &mut PLYValueTypeName)> {\n\n self.names\n\n .iter_mut()\n\n .map(|x| x.as_mut_str())\n\n .zip(self.props.iter_mut())\n\n }\n\n}\n\nimpl<S: Into<String>> From<Vec<(S, PLYValueTypeName)>> for Property {\n\n fn from(v: Vec<(S, PLYValueTypeName)>) -> Self {\n\n let (names, props) = v.into_iter().map(|(s, p)| (s.into(), p)).unzip();\n\n Self { names, props }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 3.3889037669004263 }, { "content": "pub struct Comment(Vec<String>);\n\nimpl Comment {\n\n pub fn new<S: Into<String>>(comment: S) -> Comment {\n\n Comment(\n\n comment\n\n .into()\n\n .split_whitespace()\n\n .map(|v| v.to_string())\n\n .collect(),\n\n )\n\n }\n\n}\n\nimpl Display for Comment {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"comment {}\", self.0.join(\" \"))\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\n/// Enum represent PLY Element\n", "file_path": "src/lib.rs", "rank": 64, "score": 3.32800781874282 }, { "content": " GenericElement {\n\n name,\n\n count,\n\n props: prop,\n\n payloads: Vec::with_capacity(count),\n\n }\n\n });\n\n\n\n (element, None)\n\n}\n\n\n", "file_path": "src/reader/header.rs", "rank": 65, "score": 3.3188749429672804 }, { "content": " //6\n\n 1u8.to_be(),\n\n 1u8.to_be(),\n\n 1u8.to_be(),\n\n //7\n\n 1u8.to_be(),\n\n 1u8.to_be(),\n\n 0u8.to_be(),\n\n ];\n\n read_element_payload_be_bytes(&mut element, &mut bytes.iter().copied());\n\n assert_eq!(\n\n element.payloads,\n\n vec![\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n", "file_path": "src/reader/payload.rs", "rank": 66, "score": 2.954343590082111 }, { "content": " Payload(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n ]),\n\n Payload(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n\n ]),\n\n Payload(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n\n ]),\n\n ],\n\n });\n\n let element_list = Element::ListElement(GenericElement {\n\n name: \"list\".to_string(),\n\n count: 3,\n", "file_path": "src/writer.rs", "rank": 67, "score": 2.941884033411719 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\n/// property list \"length-type\" \"prop-type\" \"name\"\n\npub struct PropertyList {\n\n pub(crate) count: PLYValueTypeName,\n\n pub(crate) prop: PLYValueTypeName,\n\n pub(crate) name: String,\n\n}\n\nimpl PropertyList {\n\n pub fn new<S: Into<String>>(\n\n name: S,\n\n count: PLYValueTypeName,\n\n prop: PLYValueTypeName,\n\n ) -> PropertyList {\n\n Self {\n\n count,\n\n prop,\n\n name: name.into(),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 68, "score": 2.9222693816867116 }, { "content": " //6\n\n 1u8.to_le(),\n\n 1u8.to_le(),\n\n 1u8.to_le(),\n\n //7\n\n 1u8.to_le(),\n\n 1u8.to_le(),\n\n 0u8.to_le(),\n\n ];\n\n read_element_payload_le_bytes(&mut element, &mut bytes.iter().copied());\n\n assert_eq!(\n\n element.payloads,\n\n vec![\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n", "file_path": "src/reader/payload.rs", "rank": 69, "score": 2.8068177642611065 }, { "content": "//! Command to run benches:\n\n//! `cargo +nightly bench`\n\n//! bench is nightly feature.\n\n//! So use `+nightly` to tempolary(not change default) change toolchain for nightly\n\n\n\n#![feature(test)]\n\n\n\nuse ply::PLYFile;\n\n\n\nextern crate test;\n\nuse test::Bencher;\n\n\n\nconst PATH: &str = \"./benches/ply_files/your_file_name.ply\";\n\n\n\n/// Measure times to read file which can found with PATH \n\n/// In my enviroment, 564,867,610 ns (0.5sec) is measured to read ply file (size: about 19MB, 850000 vertex)\n\n#[bench]\n", "file_path": "benches/benche.rs", "rank": 70, "score": 2.7927374050587748 }, { "content": " props: PropertyList {\n\n count: PLYValueTypeName::Uchar,\n\n prop: PLYValueTypeName::Char,\n\n name: \"vertex_id\".to_string(),\n\n },\n\n payloads: vec![\n\n Payload(vec![PLYValue::Char(3)]),\n\n Payload(vec![PLYValue::Char(3), PLYValue::Char(3)]),\n\n Payload(vec![\n\n PLYValue::Char(3),\n\n PLYValue::Char(3),\n\n PLYValue::Char(3),\n\n ]),\n\n ],\n\n });\n\n PLYFile {\n\n format: Format::Ascii {\n\n version: \"1.0\".to_string(),\n\n },\n\n comments: vec![Comment(vec![\"test\".to_string(), \"data\".to_string()])],\n", "file_path": "src/writer.rs", "rank": 71, "score": 2.77843269131953 }, { "content": " &Format::Ascii {\n\n version: \"1.0\".to_string(),\n\n },\n\n )\n\n .unwrap();\n\n assert_eq!(\n\n r#\"0 0 0\n\n0 0 1\n\n0 1 1\n\n0 1 0\n\n1 0 0\n\n1 0 1\n\n1 1 1\n\n1 1 0\n\n\"#\n\n .as_bytes()\n\n .to_vec(),\n\n writer.into_inner().unwrap(),\n\n );\n\n}\n", "file_path": "src/writer/payload.rs", "rank": 72, "score": 2.7670408242460507 }, { "content": " ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n", "file_path": "examples/build_ply.rs", "rank": 73, "score": 2.6916123792973052 }, { "content": " payloads: vec![\n\n Payload(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32)\n\n ],)\n\n ]\n\n }),\n\n Element::ListElement(GenericElement {\n\n name: \"face\".to_string(),\n", "file_path": "src/reader.rs", "rank": 74, "score": 2.6552838132150303 }, { "content": " .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(1f32),\n\n ]))\n\n .unwrap();\n\n element\n\n .push_payload(Payload::from(vec![\n\n PLYValue::Float(0f32),\n\n PLYValue::Float(1f32),\n\n PLYValue::Float(0f32),\n", "file_path": "examples/build_ply.rs", "rank": 75, "score": 2.6199228375168726 }, { "content": " type Payload = Payload;\n\n\n\n fn write_payload_ascii(\n\n &self,\n\n payload: &Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n\n let line = payload\n\n .iter()\n\n .map(|v| format!(\"{}\", v))\n\n .collect::<Vec<_>>()\n\n .join(\" \");\n\n writeln!(writer, \"{} {}\", payload.len(), line)\n\n }\n\n\n\n fn write_payload_be(\n\n &self,\n\n payload: &Payload,\n\n writer: &mut BufWriter<T>,\n\n ) -> std::io::Result<()> {\n", "file_path": "src/writer/payload.rs", "rank": 76, "score": 2.579456099041905 }, { "content": "# ply_rs\n\n\n\nPure Rust PLY (Polygon File Format) file reader, writer (also create file in code, see exmaples).\n\n\n\nAscii/Binary (both big-endian and little-endian) format supported.\n\n\n\n## Installation\n\n\n\nAdd dependency to your Cargo.toml\n\n\n\n```toml\n\n[dependencies]\n\nply = { git=\"https://github.com/diegodox/ply_rs.git\", tag = \"v0.1.2\" }\n\n```\n\n\n", "file_path": "README.md", "rank": 77, "score": 2.509130091594194 }, { "content": " if !(payload\n\n .iter()\n\n .all(|v| v.value_type() == self.property().prop))\n\n {\n\n return Err(PLYError::MissmatchDataType);\n\n }\n\n\n\n self.count += 1;\n\n self.payloads.push(payload);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 78, "score": 2.440039973130429 }, { "content": " \"ascii\" => Format::Ascii {\n\n version: words.next().expect(\"format version not found\").to_string(),\n\n },\n\n \"binary_little_endian\" => Format::BinaryLittleEndian {\n\n version: words.next().expect(\"format version not found\").to_string(),\n\n },\n\n \"binary_big_endian\" => Format::BinaryBigEndian {\n\n version: words.next().expect(\"format version not found\").to_string(),\n\n },\n\n _ => panic!(\"unknown format style\"),\n\n })\n\n }\n\n \"ply\" => HeaderLine::FileIdentifierLine,\n\n \"comment\" => HeaderLine::CommentLine(Comment(words.map(|s| s.to_string()).collect())),\n\n \"end_header\" => HeaderLine::EndHeader,\n\n x => {\n\n eprintln!(\"unknown line identifier: {}\", x);\n\n HeaderLine::UnknownLine\n\n }\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/reader/header.rs", "rank": 79, "score": 2.4335220685017784 }, { "content": "1 0 1\n\n1 1 1\n\n1 1 0\n\n\"#;\n\n read_elemet_payload_ascii(&mut element, &mut lines.lines().map(|e| e.to_string()));\n\n assert_eq!(\n\n element.payloads,\n\n vec![\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(0),\n\n PLYValue::Uchar(1)\n\n ],),\n\n Payload(vec![\n\n PLYValue::Uchar(0),\n", "file_path": "src/reader/payload.rs", "rank": 80, "score": 2.383858714112057 }, { "content": " if !payload\n\n .iter()\n\n .zip(self.property().props.iter())\n\n .all(|(v, t)| v.value_type() == *t)\n\n {\n\n return Err(PLYError::MissmatchDataType);\n\n }\n\n\n\n self.count += 1;\n\n self.payloads.push(payload);\n\n Ok(())\n\n }\n\n}\n\n#[test]\n", "file_path": "src/lib.rs", "rank": 81, "score": 2.3135905765425053 }, { "content": " /// End Header\n\n EndHeader,\n\n /// Line start from unknown identifier\n\n UnknownLine,\n\n}\n\n\n\nimpl HeaderLine {\n\n pub fn is_end_header(&self) -> bool {\n\n matches!(self, HeaderLine::EndHeader)\n\n }\n\n}\n\n\n\n/// Parse PLY Header Line to [HeaderLine]\n\npub(crate) fn parse_header_line<S: AsRef<str>>(line: S) -> HeaderLine {\n\n let mut words = line.as_ref().split_whitespace();\n\n match words.next() {\n\n None => HeaderLine::EmptyLine,\n\n Some(first_token) => match first_token {\n\n \"property\" => match words.next().expect(r#\"property name or \"list\" not found\"#) {\n\n \"list\" => {\n", "file_path": "src/reader/header.rs", "rank": 82, "score": 1.9690882682609918 }, { "content": "//! Error type definitions\n\n\n\npub type PLYResult<T> = Result<T, PLYError>;\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n/// PLY Error type\n\npub enum PLYError {\n\n MissmatchDataType,\n\n UnknownPLYTypeIdentifier,\n\n ParseFromStrErr,\n\n TryIntoUsizeEr,\n\n TypeConversionFail,\n\n PropertyLengthErr,\n\n}\n", "file_path": "src/error.rs", "rank": 83, "score": 1.6977257554715108 }, { "content": " }\n\n }\n\n}\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n/// Scalar data types a property may have (with value)\n\npub enum PLYValue {\n\n Char(i8),\n\n Uchar(u8),\n\n Short(i16),\n\n Ushort(u16),\n\n Int(i32),\n\n Uint(u32),\n\n Float(f32),\n\n Double(f64),\n\n}\n\n\n\nimpl PLYValue {\n\n /// Returns [PLYValueTypeName] collespond to `self`\n\n pub fn value_type(&self) -> PLYValueTypeName {\n\n match self {\n", "file_path": "src/ply_value.rs", "rank": 84, "score": 1.5777306045431594 }, { "content": " count: 3,\n\n props: PropertyList {\n\n prop: crate::PLYValueTypeName::Uint,\n\n name: \"vertex_list\".to_string(),\n\n count: crate::PLYValueTypeName::Uchar\n\n },\n\n payloads: vec![\n\n Payload(vec![PLYValue::Uint(1)]),\n\n Payload(vec![PLYValue::Uint(1), PLYValue::Uint(2)]),\n\n Payload(vec![\n\n PLYValue::Uint(1),\n\n PLYValue::Uint(2),\n\n PLYValue::Uint(3)\n\n ])\n\n ]\n\n })\n\n ]\n\n }\n\n )\n\n}\n", "file_path": "src/reader.rs", "rank": 85, "score": 1.5593613440556369 }, { "content": " fn read_as_ascii<S: AsRef<str>>(&self, line: S) -> Self::Payload;\n\n fn read_as_be<I: Iterator<Item = u8>>(&self, bytes: &mut I) -> Self::Payload;\n\n fn read_as_le<I: Iterator<Item = u8>>(&self, bytes: &mut I) -> Self::Payload;\n\n}\n\n\n\nimpl ReadPayload for Property {\n\n type Payload = Payload;\n\n\n\n fn read_as_ascii<S: AsRef<str>>(&self, line: S) -> Payload {\n\n let words = line.as_ref().split_ascii_whitespace();\n\n Payload(\n\n self.props\n\n .iter()\n\n .zip(words)\n\n .map(|(t, s)| t.parse(s).unwrap())\n\n .collect(),\n\n )\n\n }\n\n\n\n fn read_as_be<I: Iterator<Item = u8>>(&self, bytes: &mut I) -> Payload {\n", "file_path": "src/reader/payload.rs", "rank": 86, "score": 1.3408097329668291 }, { "content": " },\n\n HeaderLine::PropertyLine {\n\n prop_type: PLYValueTypeName::Float,\n\n name: \"z\".to_string()\n\n },\n\n // HeaderLine::EndHeader\n\n // EndHeader is not member of header lines\n\n ]\n\n );\n\n assert_eq!(input.next(), Some(\"0 0 0\".to_string()));\n\n assert_eq!(input.next(), Some(\"0 0 1\".to_string()));\n\n assert_eq!(input.next(), Some(\"0 1 1\".to_string()));\n\n assert_eq!(input.next(), Some(\"0 1 0\".to_string()));\n\n assert_eq!(input.next(), Some(\"1 0 0\".to_string()));\n\n assert_eq!(input.next(), Some(\"1 0 1\".to_string()));\n\n assert_eq!(input.next(), Some(\"1 1 1\".to_string()));\n\n assert_eq!(input.next(), Some(\"1 1 0\".to_string()));\n\n assert_eq!(input.next(), None);\n\n}\n\n\n", "file_path": "src/reader/header.rs", "rank": 87, "score": 1.2768945200385384 }, { "content": " Payload(self.props.iter().map(|t| t.from_be_bytes(bytes)).collect())\n\n }\n\n\n\n fn read_as_le<I: Iterator<Item = u8>>(&self, bytes: &mut I) -> Payload {\n\n Payload(self.props.iter().map(|t| t.from_le_bytes(bytes)).collect())\n\n }\n\n}\n\n\n\nimpl ReadPayload for PropertyList {\n\n type Payload = Payload;\n\n\n\n fn read_as_ascii<S: AsRef<str>>(&self, line: S) -> Payload {\n\n let mut words = line.as_ref().split_ascii_whitespace();\n\n let count_usize = words.next().unwrap().parse().unwrap();\n\n let data = words\n\n .map(|s| self.prop.parse(s).unwrap())\n\n .collect::<Vec<_>>();\n\n assert_eq!(data.len(), count_usize);\n\n\n\n Payload(data)\n", "file_path": "src/reader/payload.rs", "rank": 88, "score": 1.2648357994825106 } ]
Rust
backend/api/src/http/endpoints/user/public_user.rs
jewish-interactive/ji-cloud
b6164bf1d15277115bcab1d8c9f91619e706231d
use actix_web::{ web::{Data, Json, Path, Query}, HttpResponse, }; use futures::try_join; use shared::{ api::{endpoints::user, ApiEndpoint}, domain::{ asset::DraftOrLive, course::CourseBrowseResponse, jig::JigBrowseResponse, user::public_user::{ BrowsePublicUserFollowersResponse as BrowseFollowersResponse, BrowsePublicUserFollowingResponse as BrowseFollowingsResponse, BrowsePublicUserResourcesResponse as BrowseResourcesResponse, BrowsePublicUserResponse, PublicUser, SearchPublicUserResponse, }, }, }; use sqlx::PgPool; use uuid::Uuid; use crate::{ db, error::{self, ServiceKind}, extractor::TokenUser, http::endpoints::course::{DEFAULT_PAGE_LIMIT, MAX_PAGE_LIMIT}, service::ServiceData, }; pub async fn get( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, ) -> Result<Json<<user::GetPublicUser as ApiEndpoint>::Res>, error::NotFound> { let user_id = path.into_inner(); let user: PublicUser = db::user::public_user::get(&db, user_id).await?; Ok(Json(user)) } pub async fn search( db: Data<PgPool>, claims: Option<TokenUser>, algolia: ServiceData<crate::algolia::Client>, query: Option<Query<<user::Search as ApiEndpoint>::Req>>, ) -> Result<Json<<user::Search as ApiEndpoint>::Res>, error::Service> { let query = query.map_or_else(Default::default, Query::into_inner); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::Service::InternalServerError(e))?; let user_id = db::user::public_user::auth_claims(&db, claims, query.user_id).await?; let (ids, pages, total_hits) = algolia .search_public_user( &query.q, query.username, query.name, user_id, query.language, query.organization, query.persona, page_limit, query.page, ) .await? .ok_or_else(|| error::Service::DisabledService(ServiceKind::Algolia))?; let users: Vec<_> = db::user::public_user::get_by_ids(db.as_ref(), &ids).await?; Ok(Json(SearchPublicUserResponse { users, pages, total_user_count: total_hits, })) } pub async fn browse( db: Data<PgPool>, _auth: Option<TokenUser>, query: Option<Query<<user::BrowsePublicUser as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowsePublicUser as ApiEndpoint>::Res>, error::NotFound> { let query = query.map_or_else(Default::default, Query::into_inner); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_users(&db, query.page.unwrap_or(0), page_limit as u64); let total_count_future = db::user::public_user::total_user_count(db.as_ref()); let (users, total_user_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_user_count / (page_limit as u64) + (total_user_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowsePublicUserResponse { users, pages, total_user_count, })) } pub async fn browse_user_jigs( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseUserJigs as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseUserJigs as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let privacy_level = vec![]; let resource_types = vec![]; let browse_future = db::jig::browse( &db, Some(user_id), None, Some(DraftOrLive::Live), privacy_level.to_owned(), Some(false), query.page.unwrap_or(0) as i32, page_limit, resource_types.to_owned(), ); let total_count_future = db::jig::filtered_count( db.as_ref(), privacy_level.to_owned(), Some(false), Some(user_id), None, Some(DraftOrLive::Live), resource_types.to_owned(), ); let (jigs, (total_count, count)) = try_join!(browse_future, total_count_future,)?; let pages = (count / (page_limit as u64) + (count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(JigBrowseResponse { jigs, pages, total_jig_count: total_count, })) } pub async fn browse_user_resources( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseResources as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseResources as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_user_resources( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_resource_count(&db, user_id); let (resources, total_resource_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_resource_count / (page_limit as u64) + (total_resource_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseResourcesResponse { resources, pages, total_resource_count, })) } pub async fn browse_user_courses( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseCourses as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseCourses as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let privacy_level = vec![]; let resource_types = vec![]; let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::course::browse( &db, Some(user_id), Some(DraftOrLive::Live), privacy_level.to_owned(), query.page.unwrap_or(0) as i32, page_limit, resource_types.to_owned(), ); let total_count_future = db::course::filtered_count( db.as_ref(), privacy_level.to_owned(), Some(user_id), Some(DraftOrLive::Live), resource_types.to_owned(), ); let (courses, (total_count, count)) = try_join!(browse_future, total_count_future,)?; let pages = (count / (page_limit as u64) + (count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(CourseBrowseResponse { courses, pages, total_course_count: total_count, })) } pub async fn follow( db: Data<PgPool>, claims: TokenUser, path: Path<Uuid>, ) -> Result<HttpResponse, error::NotFound> { let (user_id, follower_id) = (path.into_inner(), claims.0.user_id); if user_id == follower_id { return Err(error::NotFound::InternalServerError(anyhow::anyhow!( "User cannot follow self" ))); } db::user::public_user::follow(&db, user_id, follower_id).await?; Ok(HttpResponse::NoContent().finish()) } pub async fn unfollow( db: Data<PgPool>, claims: TokenUser, path: Path<Uuid>, ) -> Result<HttpResponse, error::NotFound> { let (user_id, follower_id) = (path.into_inner(), claims.0.user_id); db::user::public_user::unfollow(&db, user_id, follower_id).await?; Ok(HttpResponse::NoContent().finish()) } pub async fn browse_user_followers( db: Data<PgPool>, _auth: TokenUser, path: Path<Uuid>, query: Option<Query<<user::BrowseFollowers as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseFollowers as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_followers( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_follower_count(db.as_ref(), user_id); let (followers, total_follower_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_follower_count / (page_limit as u64) + (total_follower_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseFollowersResponse { followers, pages, total_follower_count, })) } pub async fn browse_user_followings( db: Data<PgPool>, _auth: TokenUser, path: Path<Uuid>, query: Option<Query<<user::BrowseFollowing as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseFollowing as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_following( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_following_count(db.as_ref(), user_id); let (followings, total_following_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_following_count / (page_limit as u64) + (total_following_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseFollowingsResponse { followings, pages, total_following_count, })) } async fn page_limit(page_limit: Option<u32>) -> anyhow::Result<u32> { if let Some(limit) = page_limit { match limit > 0 && limit <= MAX_PAGE_LIMIT { true => Ok(limit), false => Err(anyhow::anyhow!("Page limit should be within 1-100")), } } else { Ok(DEFAULT_PAGE_LIMIT) } }
use actix_web::{ web::{Data, Json, Path, Query}, HttpResponse, }; use futures::try_join; use shared::{ api::{endpoints::user, ApiEndpoint}, domain::{ asset::DraftOrLive, course::CourseBrowseResponse, jig::JigBrowseResponse, user::public_user::{ BrowsePublicUserFollowersResponse as BrowseFollowersResponse, BrowsePublicUserFollowingResponse as BrowseFollowingsResponse, BrowsePublicUserResourcesResponse as BrowseResourcesResponse, BrowsePublicUserResponse, PublicUser, SearchPublicUserResponse, }, }, }; use sqlx::PgPool; use uuid::Uuid; use crate::{ db, error::{self, ServiceKind}, extractor::TokenUser, http::endpoints::course::{DEFAULT_PAGE_LIMIT, MAX_PAGE_LIMIT}, service::ServiceData, }; pub async fn get( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, ) -> Result<Json<<user::GetPublicUser as ApiEndpoint>::Res>, error::NotFound> { let user_id = path.into_inner(); let user: PublicUser = db::user::public_user::get(&db, user_id).await?; Ok(Json(user)) } pub async fn search( db: Data<PgPool>, claims: Option<TokenUser>, algolia: ServiceData<crate::algolia::Client>, query: Option<Query<<user::Search as ApiEndpoint>::Req>>, ) -> Result<Json<<user::Search as ApiEndpoint>::Res>, error::Service> { let query = query.map_or_else(Default::default, Query::into_inner); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::Service::InternalServerError(e))?; let user_id = db::user::public_user::auth_claims(&db, claims, query.user_id).await?; let (ids, pages, total_hits) = algolia .search_public_user( &query.q, query.username, query.name, user_id, query.language, query.organization, query.persona, page_limit, query.page, ) .await? .ok_or_else(|| error::Service::DisabledService(ServiceKind::Algolia))?; let users: Vec<_> = db::user::public_user::get_by_ids(db.as_ref(), &ids).await?; Ok(Json(SearchPublicUserResponse { users, pages, total_user_count: total_hits, })) } pub async fn browse( db: Data<PgPool>, _auth: Option<TokenUser>, query: Option<Query<<user::BrowsePublicUser as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowsePublicUser as ApiEndpoint>::Res>, error::NotFound> { let query = query.map_or_else(Default::default, Query::into_inner); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_users(&db, query.page.unwrap_or(0), page_limit as u64); let total_count_future = db::user::public_user::total_user_count(db.as_ref()); let (users, total_user_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_user_count / (page_limit as u64) + (total_user_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowsePublicUserResponse { users, pages, total_user_count, })) } pub async fn browse_user_jigs( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseUserJigs as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseUserJigs as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let privacy_level = vec![]; let resource_types = vec![]; let browse_future = db::jig::browse( &db, Some(user_id), None, Some(DraftOrLive::Live), privacy_level.to_owned(), Some(false), query.page.unwrap_or(0) as i32, page_limit, resource_types.to_owned(), ); let total_count_future = db::jig::filtered_count( db.as_ref(), privacy_level.to_owned(), Some(false), Some(user_id), None, Some(DraftOrLive::Live), resource_types.to_owned(), ); let (jigs, (total_count, count)) = try_join!(browse_future, total_count_future,)?; let pages = (count / (page_limit as u64) + (count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(JigBrowseResponse { jigs, pages, total_jig_count: total_count, })) } pub async fn browse_user_resources( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseResources as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseResources as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_user_resources( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_resource_count(&db, user_id); let (resources, total_resource_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_resource_count / (page_limit as u64) + (total_resource_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseResourcesResponse { resources, pages, total_resource_count, })) } pub async fn browse_user_courses( db: Data<PgPool>, _auth: Option<TokenUser>, path: Path<Uuid>, query: Option<Query<<user::BrowseCourses as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseCourses as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let privacy_level = vec![]; let resource_types = vec![]; let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::course::browse( &db, Some(user_id), Some(DraftOrLive::Live), privacy_level.to_owned(), query.page.unwrap_or(0) as i32, page_limit, resource_types.to_owned(), ); let total_count_future = db::course::filtered_count( db.as_ref(), privacy_level.to_owned(), Some(user_id), Some(DraftOrLive::Live), resource_types.to_owned(), ); let (courses, (total_count, count)) = try_join!(browse_future, total_count_future,)?; let pages = (count / (page_limit as u64) + (count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(CourseBrowseResponse { courses, pages, total_course_count: total_count, })) } pub async fn follow( db: Data<PgPool>, claims: TokenUser, path: Path<Uuid>, ) -> Result<HttpResponse, error::NotFound> { let (user_id, follower_id) = (path.into_inner(), claims.0.user_id); if user_id == follower_id { return Err(error::NotFound::InternalServerError(anyhow::anyhow!( "User cannot follow self" ))); } db::user::public_user::follow(&db, user_id, follower_id).await?; Ok(HttpResponse::NoContent().finish()) } pub async fn unfollow( db: Data<PgPool>, claims: TokenUser, path: Path<Uuid>, ) -> Result<HttpResponse, error::NotFound> { let (user_id, follower_id) = (path.into_inner(), claims.0.user_id); db::user::public_user::unfollow(&db, user_id, follower_id).await?; Ok(HttpResponse::NoContent().finish()) } pub async fn browse_user_followers( db: Data<PgPool>, _auth: TokenUser, path: Path<Uuid>, query: Option<Query<<user::BrowseFollowers as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseFollowers as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_followers( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_follower_count(db.as_ref(), user_id); let (followers, total_follower_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_follower_count / (page_limit as u64) + (total_follower_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseFollowersResponse { followers, pages, total_follower_count, })) } pub async fn browse_user_followings( db: Data<PgPool>, _auth: TokenUser, path: Path<Uuid>, query: Option<Query<<user::BrowseFollowing as ApiEndpoint>::Req>>, ) -> Result<Json<<user::BrowseFollowing as ApiEndpoint>::Res>, error::NotFound> { let (query, user_id) = ( query.map_or_else(Default::default, Query::into_inner), path.into_inner(), ); let page_limit = page_limit(query.page_limit) .await .map_err(|e| error::NotFound::InternalServerError(e))?; let browse_future = db::user::public_user::browse_following( &db, user_id, query.page.unwrap_or(0), page_limit as u64, ); let total_count_future = db::user::public_user::total_following_count(db.as_ref(), user_id); let (followings, total_following_count) = try_join!(browse_future, total_count_future,)?; let pages = (total_following_count / (page_limit as u64) + (total_following_count % (page_limit as u64) != 0) as u64) as u32; Ok(Json(BrowseFollowingsResponse { followings, pages, total_following_count, })) } async fn page_limit(page_limit: Option<u32>) -> anyhow::Result<u32> { if let Some(limit) = page_limit { match limit > 0
}
&& limit <= MAX_PAGE_LIMIT { true => Ok(limit), false => Err(anyhow::anyhow!("Page limit should be within 1-100")), } } else { Ok(DEFAULT_PAGE_LIMIT) }
function_block-random_span
[ { "content": "pub fn search(state: Rc<State>, page: Option<u32>) {\n\n state.loader.load(clone!(state => async move {\n\n match state.search_mode.get_cloned() {\n\n SearchMode::Sticker(_) => search_async(Rc::clone(&state), page.unwrap_or_default()).await,\n\n SearchMode::Web(_) => search_async_web(Rc::clone(&state)).await,\n\n };\n\n }));\n\n}\n\n\n", "file_path": "frontend/apps/crates/components/src/image/search/actions.rs", "rank": 0, "score": 450194.84753713745 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n //SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: true }))).unwrap_ji();\n\n SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/matching/edit/src/debug.rs", "rank": 1, "score": 392780.4316852904 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: true })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/matching/play/src/debug.rs", "rank": 2, "score": 392780.4316852905 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/resource-cover/edit/src/debug.rs", "rank": 3, "score": 388898.344182331 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/resource-cover/play/src/debug.rs", "rank": 4, "score": 388898.344182331 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n opts.skip_play = crate::debug::settings().skip_play;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/matching/play/src/state.rs", "rank": 5, "score": 370115.32524174126 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save;\n\n opts.skip_load_jig = debug_settings.skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n\n\n\npub async fn init_from_raw(\n\n init_args: BaseInitFromRawArgs<RawData, Mode, Step>,\n\n) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n let force_step = {\n\n if init_args.source == InitSource::ForceRaw {\n\n crate::debug::settings().step\n\n } else {\n", "file_path": "frontend/apps/crates/entry/module/matching/edit/src/state.rs", "rank": 6, "score": 370115.32524174126 }, { "content": "pub fn search(state: Rc<State>, query: ImageSearchQuery) {\n\n state.loader.load(clone!(state => async move {\n\n //update the address bar\n\n let route = Route::Admin(AdminRoute::ImageSearch(Some(query.clone())));\n\n route.push_state();\n\n\n\n //search\n\n match api_with_auth::<ImageSearchResponse, EmptyError, _>(endpoints::image::Search::PATH, endpoints::image::Search::METHOD, Some(query)).await {\n\n Ok(res) => {\n\n state.response.set(Some(res))\n\n },\n\n Err(_) => {\n\n todo!();\n\n },\n\n }\n\n }));\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/images/search/actions.rs", "rank": 7, "score": 367132.04989915015 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/resource-cover/play/src/state.rs", "rank": 8, "score": 366596.5205238622 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/resource-cover/edit/src/state.rs", "rank": 9, "score": 366596.5205238622 }, { "content": "pub fn get_jig() -> JigResponse {\n\n let module_id = ModuleId(Uuid::from_u128(0));\n\n JigResponse {\n\n id: JigId(Uuid::from_u128(0)),\n\n admin_data: JigAdminData {\n\n rating: None,\n\n blocked: false,\n\n curated: true,\n\n },\n\n creator_id: None,\n\n author_id: None,\n\n author_name: None,\n\n published_at: None,\n\n jig_data: JigData {\n\n draft_or_live: DraftOrLive::Draft,\n\n display_name: \"hello world\".to_string(),\n\n //TODO - delete me: https://github.com/ji-devs/ji-cloud/issues/835\n\n modules: vec![\n\n LiteModule {\n\n id: module_id,\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/debug.rs", "rank": 10, "score": 356050.33104605577 }, { "content": "fn new_entry_with_id(id: u32, bundle_id: Uuid) -> DisplayableEntry {\n\n DisplayableEntry {\n\n id,\n\n english: String::new(),\n\n hebrew: String::new(),\n\n section: None,\n\n item_kind_id: None,\n\n status: EntryStatus::Discuss,\n\n zeplin_reference: Mutable::new(None),\n\n comments: String::new(),\n\n in_app: false,\n\n in_element: false,\n\n in_mock: false,\n\n bundle_id,\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/admin/src/locale/db_interface.rs", "rank": 11, "score": 355707.87314284046 }, { "content": "pub fn start_timer(state: Rc<JigPlayer>, time: u32) {\n\n let timer = Timer::new(time);\n\n\n\n spawn_local(timer.time.signal().for_each(clone!(state => move|time| {\n\n if time == 0 {\n\n sent_iframe_message(Rc::clone(&state), JigToModulePlayerMessage::TimerDone);\n\n }\n\n async {}\n\n })));\n\n\n\n state.timer.set(Some(timer));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/actions.rs", "rank": 12, "score": 351011.64820057317 }, { "content": "pub fn get_user() -> Option<&'static UserProfile> {\n\n USER.get()\n\n}\n", "file_path": "frontend/apps/crates/utils/src/init/user.rs", "rank": 13, "score": 347722.9855718904 }, { "content": "#[allow(dead_code)] // this should be removed eventually\n\npub fn mouse_down(state: Rc<State>, x: i32, y: i32) {\n\n state\n\n .sidebar\n\n .drag\n\n .set(Some(Rc::new(DragState::new(state.clone(), x, y))));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/spot/actions.rs", "rank": 14, "score": 347412.64901381766 }, { "content": "pub fn pdf_lib_url(library_kind: MediaLibrary, id: PdfId) -> String {\n\n let path = media_key(library_kind, id.0, FileKind::DocumentPdf);\n\n\n\n uploads_url(&path)\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 15, "score": 344971.0801541937 }, { "content": "pub fn audio_lib_url(library_kind: MediaLibrary, id: AudioId) -> String {\n\n let path = media_key(library_kind, id.0, FileKind::AudioMp3);\n\n\n\n uploads_url(&path)\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 16, "score": 344971.0801541936 }, { "content": "pub fn render_stickers_raw_vec(stickers: &[RawSticker], theme_id: ThemeId) -> Vec<Dom> {\n\n stickers\n\n .iter()\n\n .map(|sticker| render_sticker_raw(sticker, theme_id, None))\n\n .collect::<Vec<Dom>>()\n\n}\n\n\n", "file_path": "frontend/apps/crates/components/src/stickers/dom.rs", "rank": 17, "score": 344528.9104356231 }, { "content": "pub fn use_module_as(state: Rc<State>, target_kind: ModuleKind, source_module_id: ModuleId) {\n\n state.loader.load(clone!(state => async move {\n\n let target_module_id: Result<(ModuleId, bool), EmptyError> = async {\n\n let asset_type = match state.asset {\n\n Asset::Jig(_) => AssetType::Jig,\n\n Asset::Course(_) => AssetType::Course,\n\n };\n\n let path = endpoints::module::GetDraft::PATH\n\n .replace(\"{asset_type}\",asset_type.as_str())\n\n .replace(\"{module_id}\", &source_module_id.0.to_string());\n\n\n\n let source_module = api_with_auth::<ModuleResponse, EmptyError, ()>(\n\n &path,\n\n endpoints::module::GetDraft::METHOD,\n\n None\n\n ).await?.module;\n\n\n\n let target_body = source_module.body.convert_to_body(target_kind).unwrap_ji();\n\n\n\n let req = ModuleCreateRequest {\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/actions.rs", "rank": 18, "score": 344093.8188113075 }, { "content": "pub fn render_image_search() -> Dom {\n\n let opts = ImageSearchOptions {\n\n kind: ImageSearchKind::Background,\n\n ..ImageSearchOptions::default()\n\n };\n\n let callbacks = ImageSearchCallbacks::new(Some(|image| {\n\n log::info!(\"{:?}\", image);\n\n }));\n\n let state = image_search::state::State::new(opts, callbacks);\n\n\n\n html!(\"div\", {\n\n .style(\"padding\", \"30px\")\n\n .child(image_search::dom::render(Rc::new(state), None))\n\n })\n\n}\n\n\n\n// pub fn render_audio_input() -> Dom {\n\n// let opts:AudioInputOptions = AudioInputOptions {\n\n// //ummm... this is a lie... I guess... but w/e\n\n// //in the usual case of supplying a Some the real type is inferred\n", "file_path": "frontend/apps/crates/entry/dev/scratch/001/src/page.rs", "rank": 19, "score": 344023.34810746246 }, { "content": "pub fn mouse_up(sidebar: Rc<SidebarState>, _x: i32, _y: i32) {\n\n if let Some(_drag) = sidebar.drag.replace(None) {\n\n sidebar.drag_target_index.set_neq(None);\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/dragging/actions.rs", "rank": 20, "score": 343920.4575364261 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/edit/src/debug.rs", "rank": 21, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n // InitSticker::Text, InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/video/edit/src/debug.rs", "rank": 22, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text,\n\n InitSticker::Video, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/video/play/src/debug.rs", "rank": 23, "score": 343347.94037302164 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/edit/src/debug.rs", "rank": 24, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/cover/play/src/debug.rs", "rank": 25, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: true })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/debug.rs", "rank": 26, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n //SETTINGS.set(DebugSettings::debug(Some(InitData { with_pairs: true }))).unwrap_ji();\n\n SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/flashcards/edit/src/debug.rs", "rank": 27, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: false })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/debug.rs", "rank": 28, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/poster/play/src/debug.rs", "rank": 29, "score": 343347.9403730216 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: true })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/flashcards/play/src/debug.rs", "rank": 30, "score": 343347.9403730216 }, { "content": "//Mouse movements are triggered from sidebar regardless of\n\n//whether drag state exists yet or not\n\npub fn mouse_move(sidebar: Rc<SidebarState>, x: i32, y: i32) {\n\n //update via ref not lock_mut\n\n //otherwise it will replace the drag and cause a re-render\n\n //with every update\n\n //internally, drag uses Mutable and Atomic so this works in Rc\n\n if let Some(drag) = &*sidebar.drag.lock_ref() {\n\n drag.inner.update(x, y);\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/dragging/actions.rs", "rank": 31, "score": 340522.6664291173 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: true })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/edit/src/debug.rs", "rank": 32, "score": 340236.33180532407 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n (InitSticker::Text, ItemKind::Static, (0.3, 0.3)),\n\n (\n\n InitSticker::Text,\n\n ItemKind::Interactive(Interactive {\n\n audio: None,\n\n target_transform: None,\n\n }),\n\n (-0.3, -0.3),\n\n ),\n\n (\n\n InitSticker::Sprite,\n\n ItemKind::Interactive(Interactive {\n\n audio: None,\n\n target_transform: None,\n\n }),\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/edit/src/debug.rs", "rank": 33, "score": 340236.331805324 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData { with_pairs: true })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/play/src/debug.rs", "rank": 34, "score": 340236.33180532407 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n (InitSticker::Text, ItemKind::Static, (0.3, 0.3)),\n\n /*\n\n (\n\n InitSticker::Text,\n\n ItemKind::Interactive(\n\n Interactive {\n\n audio: None,\n\n target_transform: {\n\n let mut t = Transform::identity();\n\n Some(t)\n\n }\n\n }\n\n ),\n\n (-0.3, -0.3)\n\n ),\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/play/src/debug.rs", "rank": 35, "score": 340236.331805324 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, // InitSticker::Sprite\n\n ],\n\n traces: vec![\n\n InitTrace::Ellipse(0.3, 0.4, 0.2, 0.1),\n\n InitTrace::Ellipse(0.1, 0.1, 0.1, 0.1),\n\n ],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/play/src/debug.rs", "rank": 36, "score": 340236.33180532407 }, { "content": "pub fn init(jig_id: JigId, _module_id: ModuleId) {\n\n if jig_id == JigId(Uuid::from_u128(0)) {\n\n SETTINGS\n\n .set(DebugSettings::debug(Some(InitData {\n\n stickers: vec![\n\n InitSticker::Text, //InitSticker::Sprite\n\n ],\n\n traces: vec![InitTrace::Ellipse(0.3, 0.4, 0.2, 0.1)],\n\n })))\n\n .unwrap_ji();\n\n //SETTINGS.set(DebugSettings::debug(None)).unwrap_ji();\n\n } else {\n\n SETTINGS.set(DebugSettings::default()).unwrap_ji();\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/edit/src/debug.rs", "rank": 37, "score": 340236.331805324 }, { "content": "pub fn render() -> Page {\n\n ModulePage::<PageRenderer, PageLoader, RawData, State>::render(\n\n PageRenderer{},\n\n PageLoader{}\n\n )\n\n}\n\n\n\npub type RawData = ();\n\n\n\npub struct State {\n\n pub kind: Mutable<ModulePageKind>\n\n}\n\nimpl State {\n\n fn new(data:RawData) -> Self {\n\n Self { \n\n kind: Mutable::new(INITIAL_MODE) \n\n }\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/dev/showcase/001/src/pages/module_grid/dom.rs", "rank": 38, "score": 340027.0795592807 }, { "content": "fn render_logged_in(state: Rc<State>, user: &UserProfile) -> Vec<Dom> {\n\n vec![html!(\"page-header-profile\", {\n\n .property(\"slot\", \"user\")\n\n .property(\"name\", &user.given_name)\n\n .property(\"email\", &user.email)\n\n .children(&mut [\n\n html!(\"button-rect\", {\n\n .property(\"slot\", \"logout\")\n\n .property(\"kind\", \"outline\")\n\n .property(\"size\", \"small\")\n\n .property(\"color\", \"blue\")\n\n .text(STR_LOGOUT)\n\n .event(clone!(state => move |_: events::Click| {\n\n actions::logout(Rc::clone(&state));\n\n }))\n\n }),\n\n html!(\"profile-image\", {\n\n .property(\"slot\", \"profile-image\")\n\n .property(\"imageId\", {\n\n match &user.profile_image {\n", "file_path": "frontend/apps/crates/components/src/page_header/dom.rs", "rank": 39, "score": 339527.0408749679 }, { "content": "pub fn get<'a>(db: &'a PgPool, ids: &'a [Uuid]) -> BoxStream<'a, sqlx::Result<ImageMetadata>> {\n\n sqlx::query_as(\n\nr#\"\n", "file_path": "backend/api/src/db/image.rs", "rank": 40, "score": 338237.2587327539 }, { "content": "pub fn navigate_to_module(state: Rc<JigPlayer>, module_id: &ModuleId) {\n\n if let Some(jig) = &*state.jig.lock_ref() {\n\n let index = jig\n\n .jig_data\n\n .modules\n\n .iter()\n\n .position(|module| &module.id == module_id);\n\n\n\n if let Some(index) = index {\n\n navigate_to_index(Rc::clone(&state), index);\n\n }\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/actions.rs", "rank": 41, "score": 331492.6407379694 }, { "content": "pub fn media_url(path: &str) -> String {\n\n format!(\n\n \"{}/{}\",\n\n SETTINGS.get().unwrap_ji().remote_target.media_url(),\n\n path\n\n )\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 42, "score": 327230.96186760976 }, { "content": "pub fn legacy_url(path: &str) -> String {\n\n format!(\n\n \"{}/{}\",\n\n SETTINGS.get().unwrap_ji().remote_target.legacy_url(),\n\n path\n\n )\n\n}\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 43, "score": 327230.96186760976 }, { "content": "pub fn uploads_url(path: &str) -> String {\n\n format!(\n\n \"{}/{}\",\n\n SETTINGS.get().unwrap_ji().remote_target.uploads_url(),\n\n path\n\n )\n\n}\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 44, "score": 327230.96186760976 }, { "content": "#[instrument(skip_all)]\n\npub fn build_tree(categories: Vec<RawCategory>) -> Vec<Category> {\n\n let mut nodes: Vec<Rc<RefCell<CategoryNode>>> = Vec::new();\n\n let mut lookup: HashMap<Uuid, Rc<RefCell<CategoryNode>>> = HashMap::new();\n\n\n\n // Now we know this category exists in the lookup table\n\n for raw in categories.iter() {\n\n lookup.insert(\n\n raw.id.clone(),\n\n Rc::new(RefCell::new(CategoryNode {\n\n id: raw.id.clone(),\n\n name: raw.name.clone(),\n\n created_at: raw.created_at,\n\n updated_at: raw.updated_at,\n\n user_scopes: raw.user_scopes.clone(),\n\n children: Vec::new(),\n\n })),\n\n );\n\n }\n\n\n\n for raw in categories.iter() {\n", "file_path": "backend/api/src/domain.rs", "rank": 45, "score": 326784.973339407 }, { "content": "pub fn get_debug_pairs(mode: Mode) -> Vec<(String, String)> {\n\n EDITOR_CONFIG\n\n .get()\n\n .map(|config| match mode {\n\n Mode::Duplicate => config\n\n .init\n\n .single_list_words\n\n .iter()\n\n .skip(1)\n\n .map(|word| (word.to_string(), word.to_string()))\n\n .collect(),\n\n Mode::WordsAndImages => config\n\n .init\n\n .single_list_words\n\n .iter()\n\n .map(|word| (word.to_string(), \"\".to_string()))\n\n .collect(),\n\n // Images/Images doesn't use lists at all\n\n Mode::Images => vec![(\"\".to_string(), \"\".to_string())],\n\n _ => config.init.dual_list_words.clone(),\n\n })\n\n .unwrap_ji()\n\n}\n", "file_path": "frontend/apps/crates/components/src/module/_groups/cards/edit/config.rs", "rank": 46, "score": 326618.874868804 }, { "content": "pub fn get_debug_pairs(mode: Mode) -> Vec<(String, String)> {\n\n EDITOR_CONFIG\n\n .get()\n\n .map(|config| match mode {\n\n Mode::Duplicate => config\n\n .init\n\n .single_list_words\n\n .iter()\n\n .skip(1)\n\n .map(|word| (word.to_string(), word.to_string()))\n\n .collect(),\n\n Mode::WordsAndImages => config\n\n .init\n\n .single_list_words\n\n .iter()\n\n .map(|word| (word.to_string(), \"\".to_string()))\n\n .collect(),\n\n // Images/Images doesn't use lists at all\n\n Mode::Images => vec![(\"\".to_string(), \"\".to_string())],\n\n _ => config.init.dual_list_words.clone(),\n\n })\n\n .unwrap_ji()\n\n}\n", "file_path": "frontend/apps/crates/components/src/module/_groups/cards/play/config.rs", "rank": 47, "score": 326618.874868804 }, { "content": "pub fn top(&self) -> f64 {\n\n self.y\n\n}\n", "file_path": "frontend/apps/crates/utils/src/math/bounds.rs", "rank": 48, "score": 325739.88175915694 }, { "content": "pub fn left(&self) -> f64 {\n\n self.x\n\n}\n", "file_path": "frontend/apps/crates/utils/src/math/bounds.rs", "rank": 49, "score": 325739.88175915694 }, { "content": "pub fn bottom(&self) -> f64 {\n\n if self.invert_y {\n\n self.y + self.height\n\n } else {\n\n self.y - self.height\n\n }\n\n}\n", "file_path": "frontend/apps/crates/utils/src/math/bounds.rs", "rank": 50, "score": 325739.88175915694 }, { "content": "pub fn right(&self) -> f64 {\n\n self.x + self.width\n\n}\n\n*/\n", "file_path": "frontend/apps/crates/utils/src/math/bounds.rs", "rank": 51, "score": 325739.88175915694 }, { "content": "select id as user_id, 8 as scope from \"user\";\n", "file_path": "backend/api/migrations/20210409001901_jig-self-editable-default.sql", "rank": 52, "score": 324529.22755400016 }, { "content": "pub fn render(state: Rc<State>) -> Dom {\n\n let share_jig = share_jig::ShareJig::new(state.player_state.jig_id);\n\n\n\n let anchor = html!(\"jig-play-sidebar-action\", {\n\n .property(\"kind\", \"share\")\n\n .property_signal(\"active\", share_jig.active_popup.signal_cloned().map(|active| active.is_some()))\n\n });\n\n\n\n share_jig.render(anchor, Some(\"actions\"))\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/sidebar/dom/share.rs", "rank": 53, "score": 323591.2606776151 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/video/play/src/state.rs", "rank": 54, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save;\n\n opts.skip_load_jig = debug_settings.skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n\n\n\npub async fn init_from_raw(\n\n init_args: BaseInitFromRawArgs<RawData, Mode, Step>,\n\n) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n let force_step = {\n\n if init_args.source == InitSource::ForceRaw {\n\n crate::debug::settings().step\n\n } else {\n", "file_path": "frontend/apps/crates/entry/module/flashcards/edit/src/state.rs", "rank": 55, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/flashcards/play/src/state.rs", "rank": 56, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/video/edit/src/state.rs", "rank": 57, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/cover/play/src/state.rs", "rank": 58, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/poster/play/src/state.rs", "rank": 59, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/state.rs", "rank": 60, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/cover/edit/src/state.rs", "rank": 61, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/poster/edit/src/state.rs", "rank": 62, "score": 323411.8617964863 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save;\n\n opts.skip_load_jig = debug_settings.skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n\n\n\npub async fn init_from_raw(\n\n init_args: BaseInitFromRawArgs<RawData, Mode, Step>,\n\n) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n let force_step = {\n\n if init_args.source == InitSource::ForceRaw {\n\n crate::debug::settings().step\n\n } else {\n", "file_path": "frontend/apps/crates/entry/module/memory/edit/src/state.rs", "rank": 63, "score": 323411.8617964863 }, { "content": "pub fn submit(state: Rc<State>) {\n\n let age_ranges: Vec<AgeRangeId> = state\n\n .age_ranges\n\n .borrow()\n\n .iter()\n\n .map(|id| AgeRangeId(Uuid::parse_str(id).unwrap_ji()))\n\n .collect();\n\n\n\n let affiliations: Vec<AffiliationId> = state\n\n .affiliations\n\n .borrow()\n\n .iter()\n\n .map(|id| AffiliationId(Uuid::parse_str(id).unwrap_ji()))\n\n .collect();\n\n\n\n let subjects: Vec<SubjectId> = state\n\n .subjects\n\n .borrow()\n\n .iter()\n\n .map(|id| SubjectId(Uuid::parse_str(id).unwrap_ji()))\n", "file_path": "frontend/apps/crates/entry/user/src/register/pages/step_3/actions.rs", "rank": 64, "score": 322509.0140907303 }, { "content": "pub fn signin_google(state: Rc<LoginPage>) {\n\n state.loader.load(async {\n\n crate::oauth::actions::redirect(GetOAuthUrlServiceKind::Google, OAuthUrlKind::Login).await;\n\n });\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/user/src/login/actions.rs", "rank": 65, "score": 322509.0140907303 }, { "content": "pub fn submit(state: Rc<State>) {\n\n state.evaluate_terms_error();\n\n state.evaluate_language_error();\n\n state.evaluate_persona_error();\n\n\n\n let terms_error = state.terms_error.lock_ref().is_some();\n\n let language_error = state.language_error.get();\n\n let _persona_error = state.persona_error.get();\n\n\n\n let persona_error = state.persona.lock_ref().is_empty();\n\n state.persona_error.set_neq(persona_error);\n\n\n\n let location_error = match &*state.location_json.borrow() {\n\n None => true,\n\n Some(x) => x.is_empty(),\n\n };\n\n state.location_error.set_neq(location_error);\n\n\n\n if !terms_error && !language_error && !persona_error && !location_error {\n\n next_step(state);\n", "file_path": "frontend/apps/crates/entry/user/src/register/pages/step_2/actions.rs", "rank": 66, "score": 322509.0140907303 }, { "content": "pub fn signin_email(state: Rc<LoginPage>) {\n\n state.tried_to_submit.set(true);\n\n\n\n if !state.email.email_acceptable() {\n\n return;\n\n }\n\n\n\n state.loader.load(clone!(state => async move {\n\n let email = state.email.get_value();\n\n let password = state.password.borrow().clone();\n\n\n\n let (resp, _):(Result<CreateSessionResponse, EmptyError>, u16) = api_with_basic_token_status(session::Create::PATH, &email, &password, session::Create::METHOD, None::<()>).await;\n\n\n\n match resp {\n\n Ok(resp) => {\n\n match resp {\n\n CreateSessionResponse::Login(resp) => {\n\n do_success(&resp.csrf);\n\n },\n\n CreateSessionResponse::Register{response, oauth_profile} => {\n", "file_path": "frontend/apps/crates/entry/user/src/login/actions.rs", "rank": 67, "score": 322509.0140907303 }, { "content": "pub fn submit(state: Rc<State>) {\n\n let mut ready = true;\n\n\n\n if !*state.over_18.borrow() {\n\n state.over_18_status.set(Some(Over18Error::Unchecked));\n\n ready = false;\n\n }\n\n\n\n if state.firstname.borrow().is_empty() {\n\n state.firstname_status.set(Some(NameError::Empty));\n\n ready = false;\n\n }\n\n\n\n if state.lastname.borrow().is_empty() {\n\n state.lastname_status.set(Some(NameError::Empty));\n\n ready = false;\n\n }\n\n\n\n if state.username.borrow().is_empty() {\n\n state.username_status.set(Some(NameError::Empty));\n", "file_path": "frontend/apps/crates/entry/user/src/register/pages/step_1/actions.rs", "rank": 68, "score": 322509.0140907303 }, { "content": "pub fn go_register(_state: Rc<LoginPage>) {\n\n let route: String = Route::User(UserRoute::Register(Default::default())).into();\n\n dominator::routing::go_to_url(&route);\n\n}\n\n\n\n//// PRIVATE HELPERS /////\n\n\n", "file_path": "frontend/apps/crates/entry/user/src/login/actions.rs", "rank": 69, "score": 322509.0140907303 }, { "content": "pub fn duplicate_module(state: Rc<State>, module_id: &ModuleId) {\n\n state.loader.load(clone!(state, module_id => async move {\n\n let jig_id = state.asset.unwrap_jig().id;\n\n let module = super::module_cloner::clone_module(&module_id, &jig_id).await.unwrap_ji();\n\n populate_added_module(state, module);\n\n }));\n\n}\n\n\n\n// pub fn _player_settings_change_signal(state: Rc<State>) -> impl Signal<Item = JigPlayerSettings> {\n\n// let sig = map_ref! {\n\n// let direction = state.settings.direction.signal_cloned(),\n\n// let display_score = state.settings.display_score.signal(),\n\n// let track_assessments = state.settings.track_assessments.signal(),\n\n// let drag_assist = state.settings.drag_assist.signal()\n\n// => ( *direction, *display_score, *track_assessments, *drag_assist)\n\n// };\n\n\n\n// sig.map(\n\n// |(direction, display_score, track_assessments, drag_assist)| JigPlayerSettings {\n\n// direction,\n\n// display_score,\n\n// track_assessments,\n\n// drag_assist,\n\n// },\n\n// )\n\n// }\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/actions.rs", "rank": 70, "score": 321030.86427135085 }, { "content": "pub fn render(jig_id: JigId, jig_edit_state: Rc<JigEditState>) -> Dom {\n\n let state = Rc::new(State::new(jig_id, jig_edit_state));\n\n\n\n html!(\"post-publish\", {\n\n .property(\"slot\", \"main\")\n\n .apply(clone!(state => move |dom| {\n\n match state.jig_edit_state.jig_focus {\n\n JigFocus::Resources => {\n\n dom.children(\n\n render_resources_focused_actions(&state)\n\n )\n\n },\n\n _ => {\n\n dom.children(\n\n render_modules_focused_actions(&state)\n\n )\n\n },\n\n }\n\n }))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/post_publish/dom.rs", "rank": 71, "score": 320713.23868229555 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/play/src/state.rs", "rank": 72, "score": 320584.42643243534 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/tapping-board/edit/src/state.rs", "rank": 73, "score": 320584.42643243534 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.is_main_scrollable = false;\n\n opts.skip_save_for_debug = crate::debug::settings().skip_save;\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/edit/src/state.rs", "rank": 74, "score": 320584.42643243534 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n opts.skip_play = crate::debug::settings().skip_play;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/drag-drop/play/src/state.rs", "rank": 75, "score": 320584.42643243534 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = crate::debug::settings().data.clone();\n\n opts.skip_load_jig = crate::debug::settings().skip_load_jig;\n\n\n\n AppState::new(opts, Base::new)\n\n}\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/play/src/state.rs", "rank": 76, "score": 320584.42643243534 }, { "content": "pub fn create_state(jig_id: JigId, module_id: ModuleId) -> Rc<AppState> {\n\n crate::debug::init(jig_id, module_id);\n\n let debug_settings = crate::debug::settings();\n\n\n\n let mut opts = StateOpts::new(jig_id, module_id);\n\n opts.force_raw = debug_settings.data.clone();\n\n opts.is_main_scrollable = true;\n\n opts.skip_save_for_debug = debug_settings.skip_save;\n\n opts.skip_load_jig = debug_settings.skip_load_jig;\n\n\n\n AppState::new(opts, init_from_raw)\n\n}\n\n\n\npub async fn init_from_raw(\n\n init_args: BaseInitFromRawArgs<RawData, Mode, Step>,\n\n) -> BaseInit<Step, Base, Main, Sidebar, Header, Footer, Overlay> {\n\n let force_step = {\n\n if init_args.source == InitSource::ForceRaw {\n\n crate::debug::settings().step\n\n } else {\n", "file_path": "frontend/apps/crates/entry/module/card-quiz/edit/src/state.rs", "rank": 77, "score": 320584.42643243534 }, { "content": "pub fn start_drag(state: Rc<CardBottom>, elem: HtmlElement, x: i32, y: i32) {\n\n state.phase.set(BottomPhase::Remove);\n\n if let Some(current) = state.game.get_current() {\n\n current\n\n .drag\n\n .set(Some(Rc::new(CardDrag::new((*state).clone(), elem, x, y))));\n\n\n\n if let Some(audio) = &state.card.audio {\n\n AUDIO_MIXER.with(|mixer| mixer.play_oneshot(audio.as_source()));\n\n }\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/module/matching/play/src/base/game/card/actions.rs", "rank": 78, "score": 319928.27564592316 }, { "content": "pub fn list(\n\n db: &PgPool,\n\n user_id: Uuid,\n\n kind: Option<ImageKind>,\n\n) -> BoxStream<'_, sqlx::Result<UserImage>> {\n\n sqlx::query_as!(\n\n UserImage,\n\n // language=SQL\n\n r#\"\n", "file_path": "backend/api/src/db/image/user.rs", "rank": 79, "score": 319785.6729321007 }, { "content": "pub fn render(state: Rc<State>) -> impl Signal<Item = Vec<Dom>> {\n\n state\n\n .report_status\n\n .signal_cloned()\n\n .map(clone!(state => move|report_status| {\n\n match report_status {\n\n ReportStatus::Default => render_default(Rc::clone(&state)),\n\n ReportStatus::Active => render_active(Rc::clone(&state)),\n\n ReportStatus::Sent => render_sent(Rc::clone(&state)),\n\n }\n\n }))\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/sidebar/dom/report.rs", "rank": 80, "score": 319062.2688699203 }, { "content": "/// Returns whether the liked status should be loaded for a JIG\n\n///\n\n/// Returns true only if there is a logged-in user who is **not** the author of the JIG, and the\n\n/// JIG is published.\n\npub fn can_load_liked_status(jig: &JigResponse) -> bool {\n\n match utils::init::user::get_user() {\n\n Some(user) if jig.jig_data.draft_or_live.is_live() => match jig.author_id {\n\n Some(author_id) => author_id != user.id,\n\n None => true,\n\n },\n\n _ => false, // No logged-in user\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/state.rs", "rank": 81, "score": 318790.8134530246 }, { "content": "pub fn image_lib_url(library_kind: MediaLibrary, img_kind: PngImageFile, id: ImageId) -> String {\n\n let path = media_key(library_kind, id.0, FileKind::ImagePng(img_kind));\n\n\n\n uploads_url(&path)\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 82, "score": 317966.244115354 }, { "content": "pub fn copy_module(state: Rc<State>, module_id: &ModuleId) {\n\n let value = format!(\"{},{}\", &state.asset.unwrap_jig().id.0, &module_id.0);\n\n\n\n let local_storage = get_local_storage().unwrap_ji();\n\n\n\n local_storage.set(COPY_MODULE_KEY, &value).unwrap_ji();\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/copy_paste_module.rs", "rank": 83, "score": 314838.08150475926 }, { "content": "pub fn get_debug_pairs(mode: Mode, n_cards: usize) -> Vec<(String, String)> {\n\n match mode {\n\n Mode::Duplicate | Mode::Lettering => {\n\n let mut cards = Vec::new();\n\n\n\n for _i in 0..n_cards {\n\n cards.push((\"hello\".into(), \"world\".into()));\n\n }\n\n cards\n\n }\n\n Mode::WordsAndImages => vec![(\"hello\".into(), \"\".into())],\n\n Mode::Images => vec![(\"\".into(), \"\".into())],\n\n _ => vec![(\"hello\".into(), \"world\".into())],\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/module/memory/play/src/config.rs", "rank": 84, "score": 312705.4493315627 }, { "content": "pub fn get_player_settings(settings_state: Rc<SettingsState>) -> JigPlayerOptions {\n\n let direction = settings_state.direction.get_cloned();\n\n let display_score = settings_state.display_score.get();\n\n let track_assessments = settings_state.track_assessments.get();\n\n let drag_assist = settings_state.drag_assist.get();\n\n\n\n JigPlayerOptions {\n\n direction,\n\n display_score,\n\n track_assessments,\n\n drag_assist,\n\n is_student: false,\n\n draft_or_live: DraftOrLive::Draft,\n\n }\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/actions.rs", "rank": 85, "score": 311324.2392390198 }, { "content": "pub fn search(state: Rc<State>) {\n\n state.loader.load(clone!(state => async move {\n\n search_async(state).await;\n\n }));\n\n}\n", "file_path": "frontend/apps/crates/entry/home/src/home/actions.rs", "rank": 86, "score": 309605.2911117294 }, { "content": "pub fn render(\n\n state: Rc<State>,\n\n slot: Option<&str>,\n\n active_page: Option<PageLinks>,\n\n render_beta: bool,\n\n) -> Dom {\n\n actions::fetch_profile(Rc::clone(&state));\n\n\n\n html!(\"page-header\", {\n\n .apply_if(slot.is_some(), |dom| {\n\n dom.property(\"slot\", slot.unwrap_ji())\n\n })\n\n .children(PageLinks::iter().map(|page_link| {\n\n html!(\"page-header-link\", {\n\n .property(\"slot\", \"links\")\n\n .property(\"kind\", page_link.kind_str())\n\n .property(\"active\", {\n\n matches!(\n\n &active_page,\n\n Some(active_page) if active_page == &page_link\n", "file_path": "frontend/apps/crates/components/src/page_header/dom.rs", "rank": 87, "score": 309380.3324322071 }, { "content": "pub fn is_iframe() -> bool {\n\n let window = web_sys::window().unwrap_ji();\n\n let top = window.top().unwrap_ji().unwrap_ji();\n\n window != top\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/play/src/jig/dom.rs", "rank": 88, "score": 306384.35464182514 }, { "content": "pub fn draw_path_commands(\n\n ctx: &CanvasRenderingContext2d,\n\n resize_info: &ResizeInfo,\n\n commands: &[(PathCommand, bool)],\n\n) {\n\n //log::warn!(\"canvas draw for path commands inherently fills!!!\");\n\n\n\n let path_string = path_command_to_string(\n\n commands\n\n .iter()\n\n .map(|(command, absolute)| (denormalize_command(command, resize_info), *absolute)),\n\n );\n\n\n\n let path_2d = web_sys::Path2d::new_with_path_string(&path_string).unwrap_ji();\n\n\n\n ctx.fill_with_path_2d(&path_2d);\n\n}\n\n\n", "file_path": "frontend/apps/crates/components/src/traces/canvas.rs", "rank": 89, "score": 305898.0938428867 }, { "content": "pub fn render_with_action(\n\n state: Rc<State>,\n\n slot: Option<&str>,\n\n get_action: Option<impl Fn() -> Dom + 'static>,\n\n) -> Dom {\n\n html!(\"empty-fragment\", {\n\n .apply_if(slot.is_some(), move |dom| {\n\n dom.property(\"slot\", slot.unwrap_ji())\n\n })\n\n .child_signal(state.init_loader.is_loading().map(clone!(state => move |init_loading| {\n\n if init_loading {\n\n Some(html!(\"p\", {\n\n .text(\"Loading...\")\n\n }))\n\n } else {\n\n let action = get_action.as_ref().map(|get_action| get_action());\n\n Some(render_loaded(state.clone(), action))\n\n }\n\n })))\n\n })\n\n}\n\n\n", "file_path": "frontend/apps/crates/components/src/image/search/dom.rs", "rank": 90, "score": 305775.6405475468 }, { "content": "pub fn navigate_to_login() {\n\n let location = web_sys::window().unwrap_ji().location();\n\n let origin = location.origin().unwrap_ji();\n\n\n\n let redirect = format!(\n\n \"{}{}\",\n\n location.pathname().unwrap_ji(),\n\n location.search().unwrap_ji()\n\n );\n\n\n\n let route: String = Route::User(UserRoute::Login(LoginQuery::redirect(redirect))).to_string();\n\n\n\n let url = format!(\"{}{}\", origin, route);\n\n\n\n let _ = location.set_href(&url);\n\n}\n", "file_path": "frontend/apps/crates/components/src/page_header/actions.rs", "rank": 91, "score": 305677.86590624816 }, { "content": "pub fn get(local_insecure: bool) -> actix_cors::Cors {\n\n let mut cors = actix_cors::Cors::default()\n\n .supports_credentials()\n\n .allowed_methods(&[Method::GET, Method::POST, Method::DELETE, Method::OPTIONS])\n\n .expose_headers(&[\n\n header::AUTHORIZATION,\n\n header::CONTENT_TYPE,\n\n header::HeaderName::from_static(\"x-csrf\"),\n\n ]);\n\n\n\n if local_insecure {\n\n cors = cors.allow_any_origin();\n\n } else {\n\n for origin in CORS_ORIGINS {\n\n cors = cors.allowed_origin(origin);\n\n }\n\n }\n\n\n\n cors\n\n}\n", "file_path": "backend/pages/src/server/cors.rs", "rank": 92, "score": 304828.1528160721 }, { "content": "pub fn on_background_audio_click(\n\n state: Rc<State>,\n\n selected: bool,\n\n audio_background: AudioBackground,\n\n) {\n\n if selected {\n\n state.background_audio.set(Some(audio_background));\n\n } else {\n\n state.background_audio.set(None);\n\n }\n\n update_jig_settings(Rc::clone(&state));\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/settings/actions.rs", "rank": 93, "score": 302508.1715936992 }, { "content": "pub fn ui<T: AsRef<str>>(path: T) -> String {\n\n media_url(&format!(\"ui/{}\", path.as_ref()))\n\n}\n\n\n", "file_path": "frontend/apps/crates/utils/src/path.rs", "rank": 94, "score": 301297.90943325043 }, { "content": "fn rgba_to_i32(color: rgb::RGBA8) -> i32 {\n\n i32::from_be_bytes(color.into())\n\n}\n\n\n", "file_path": "backend/api/src/db/user.rs", "rank": 95, "score": 301184.05433405226 }, { "content": "pub fn navigate_to_publish(state: Rc<State>, jig: &JigResponse) {\n\n state.jig_edit_state.set_route_jig(JigEditRoute::Publish);\n\n state.collapsed.set(true);\n\n\n\n let jig_id = jig.id;\n\n Route::push_state(Route::Asset(AssetRoute::Edit(AssetEditRoute::Jig(\n\n jig_id,\n\n jig.jig_focus,\n\n JigEditRoute::Publish,\n\n ))));\n\n}\n\n\n\npub async fn update_jig(jig_id: &JigId, req: JigUpdateDraftDataRequest) -> Result<(), EmptyError> {\n\n let path = endpoints::jig::UpdateDraftData::PATH.replace(\"{id}\", &jig_id.0.to_string());\n\n api_with_auth_empty::<EmptyError, _>(&path, endpoints::jig::UpdateDraftData::METHOD, Some(req))\n\n .await\n\n}\n\n\n\npub async fn update_display_name(jig_id: JigId, value: String) {\n\n let req = JigUpdateDraftDataRequest {\n\n display_name: Some(value),\n\n ..Default::default()\n\n };\n\n\n\n let _ = update_jig(&jig_id, req).await;\n\n}\n\n\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/actions.rs", "rank": 96, "score": 299793.4732162019 }, { "content": "fn render_resources_focused_actions(state: &Rc<State>) -> Vec<Dom> {\n\n vec![\n\n html!(\"post-publish-action\", {\n\n .property(\"slot\", \"actions\")\n\n .property(\"kind\", \"new-resource\")\n\n .event(clone!(state => move |_: events::Click| {\n\n actions::create_jig(Rc::clone(&state));\n\n }))\n\n }),\n\n html!(\"post-publish-action\", {\n\n .property(\"kind\", \"view-resources\")\n\n .property(\"slot\", \"actions\")\n\n .event(|_: events::Click| {\n\n Route::Asset(AssetRoute::ResourceGallery).redirect();\n\n })\n\n }),\n\n ]\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/post_publish/dom.rs", "rank": 97, "score": 299198.75381223887 }, { "content": "pub fn should_get_iframe_data() -> bool {\n\n crate::routes::is_param_bool(IFRAME_DATA_PARAM)\n\n}\n\n\n\n/// Action is used for passing runtime messages\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct IframeAction<T> {\n\n pub data: T,\n\n}\n\n\n\nimpl<T> IframeAction<T> {\n\n pub fn new(data: T) -> Self {\n\n Self { data }\n\n }\n\n}\n\n\n\n#[wasm_bindgen(inline_js = \"export function temp_log(val) { console.log(val); }\")]\n\nextern \"C\" {\n\n fn temp_log(val: &JsValue);\n\n}\n", "file_path": "frontend/apps/crates/utils/src/iframe.rs", "rank": 98, "score": 297255.3421346796 }, { "content": "fn get_module_to_paste() -> Option<ModuleId> {\n\n let value = get_local_storage()\n\n .unwrap_ji()\n\n .get(COPY_MODULE_KEY)\n\n .unwrap_ji();\n\n\n\n match value {\n\n None => None,\n\n Some(value) => {\n\n let value: Vec<&str> = value.split(',').collect();\n\n // let jig_id = JigId(Uuid::from_str(value[0]).unwrap_ji());\n\n let module_id = ModuleId(Uuid::from_str(value[1]).unwrap_ji());\n\n // value\n\n log::info!(\"{:?}{:?}\", value, 90);\n\n\n\n Some(module_id)\n\n }\n\n }\n\n}\n", "file_path": "frontend/apps/crates/entry/jig/edit/src/edit/sidebar/jig/copy_paste_module.rs", "rank": 99, "score": 296388.2201376462 } ]
Rust
src/levels/level_list.rs
yancouto/functional
86e9f0d59e84983f0e0604b74286832af0b38da1
use serde::Deserialize; use super::{BaseLevel, GameLevel, TestCase}; use crate::prelude::*; fn get_true() -> bool { true } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JLevel { pub name: String, pub description: String, pub extra_info: Option<String>, pub test_cases: Vec1<(String, String)>, pub solutions: Vec1<String>, #[serde(default)] pub wrong_solutions: Vec<String>, #[serde(default)] pub provides_constant: bool, #[serde(default = "get_true")] pub show_constants: bool, #[serde(default)] pub before_level_constants: Vec<(String, String)>, #[serde(default)] pub extra_info_is_hint: bool, } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JSection { pub name: SectionName, pub levels: Vec1<JLevel>, } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JLevelConfig { pub sections: Vec1<JSection>, pub tests: Vec1<(String, String)>, } const RAW_LEVEL_CONFIG: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/level_config.json")); pub fn raw_load_level_config() -> JLevelConfig { serde_json::from_slice(RAW_LEVEL_CONFIG).expect("Invalid json") } fn load_all() -> Vec1<Section> { let config = raw_load_level_config(); config.sections.mapped(|s| { let section_name = s.name; Section { name: s.name, levels: { if cfg!(feature = "demo") && s.name > SectionName::Boolean { vec![] } else { let mut idx = 0; s.levels .mapped(|l| { if l.extra_info_is_hint { debug_assert!(l.extra_info.is_some()); } let level = GameLevel { base: BaseLevel { name: l.name, description: l.description, extra_info: l.extra_info, test_cases: l .test_cases .mapped(|t| TestCase::from_or_fail(&t.0, &t.1)), extra_info_is_hint: l.extra_info_is_hint, }, idx, section: section_name, solutions: l.solutions, wrong_solutions: l.wrong_solutions, show_constants: l.show_constants, }; idx += 1; level }) .into() } }, } }) } #[derive( Debug, strum::Display, strum::EnumIter, PartialEq, Eq, Hash, Clone, Copy, Deserialize, PartialOrd, Ord, )] #[strum(serialize_all = "snake_case")] #[serde(rename_all = "snake_case")] pub enum SectionName { Basic, Boolean, #[strum(serialize = "pair and list")] #[serde(rename = "pair and list")] PairAndList, Recursion, Numerals, #[strum(serialize = "more numerals")] #[serde(rename = "more numerals")] MoreNumerals, } pub struct Section { pub name: SectionName, pub levels: Vec<GameLevel>, } lazy_static! { pub static ref LEVELS: Vec1<Section> = load_all(); } #[cfg(test)] mod test { use std::{collections::HashSet, time::Duration}; use rayon::prelude::*; use strum::IntoEnumIterator; use super::{ super::{base::Level, get_result}, * }; use crate::{ interpreter::{interpreter::test::interpret_ok, ConstantProvider}, save_system::{LevelResult, SaveProfile} }; #[test] fn test_level_load() { assert!(LEVELS.len() > 0); } #[test] fn unique_names() { let names = LEVELS .iter() .flat_map(|s| &s.levels) .map(|l| l.base.name.clone()) .collect::<HashSet<_>>(); assert_eq!( names.len(), LEVELS.iter().flat_map(|s| &s.levels).count(), "Some name is duplicated in the levels definition" ); } #[test] fn test_jsonnet_tests() { raw_load_level_config() .tests .into_iter() .for_each(|(a, b)| { assert_eq!(interpret_ok(&a), interpret_ok(&b), "'{}' != '{}'", &a, &b) }); } fn solution_section(section: SectionName) { let mut all_levels_so_far = Vec::with_capacity(LEVELS.len()); LEVELS .iter() .filter(|s| s.name <= section) .flat_map(|s| s.levels.iter()) .for_each(|l| { all_levels_so_far.push(l.base.name.as_str()); if l.section < section { return; } l.solutions.par_iter().for_each(|s| { let r = Level::GameLevel(l) .test( s.chars(), ConstantProvider::new( l.into(), Some(Arc::new(SaveProfile::fake(all_levels_so_far.clone()))), ), ) .expect(&format!( "On '{}' failed to compile solution {}", l.base.name, s )); r.runs.iter().for_each(|r| { assert!( r.is_correct(), "Code '{}' does not reduce to '{}' on level '{}', instead reduced to {:?}", r.test_expression, r.expected_result, l.base.name, r.result.clone().map(|r| format!("{}", r.term)), ) }); assert_matches!(get_result(&Ok(r)), LevelResult::Success { .. }); }) }); } fn all_sections(sections: Vec<SectionName>) { assert_eq!( SectionName::iter().collect::<HashSet<_>>(), sections.into_iter().collect::<HashSet<_>>() ); } macro_rules! solution_tests { ($($name:ident),*) => { $( #[test] #[allow(non_snake_case)] fn $name () { solution_section(SectionName::$name); } )* #[test] fn test_cover_all_sections() { all_sections(vec![$(SectionName::$name),*]) } } } solution_tests!( Basic, Boolean, Numerals, PairAndList, Recursion, MoreNumerals ); #[test] fn test_wrong_solutions() { LEVELS.iter().flat_map(|s| &s.levels).for_each(|l| { l.wrong_solutions.iter().for_each(|s| { assert_matches!( get_result(&Level::GameLevel(l).test(s.chars(), ConstantProvider::all())), LevelResult::Failure, "Code was solution {} on level {}", s, l.base.name ) }) }); } fn fake_bterm() -> bl::BTerm { bl::BTerm { width_pixels: W as u32, height_pixels: H as u32, original_height_pixels: H as u32, original_width_pixels: W as u32, fps: 30.0, frame_time_ms: 10.0, active_console: 0, key: None, mouse_pos: (0, 0), left_click: false, shift: false, control: false, alt: false, web_button: None, quitting: false, post_scanlines: false, post_screenburn: false, screen_burn_color: bl::RGB::from_u8(0, 1, 1), } } #[test] fn test_out_of_space() { use crate::{ drawables::BasicTextEditor, gamestates::{ base::{with_current_console, EventTickData, GSData, TickData}, editor::EditorState }, save_system::SaveProfile }; let fake_profile = Arc::new(SaveProfile::fake(vec![])); let mut term = fake_bterm(); bl::BACKEND_INTERNAL .lock() .consoles .push(bl::DisplayConsole { console: box bl::VirtualConsole::new(bl::Point::new(W, H)), shader_index: 0, font_index: 0, }); LEVELS.iter().flat_map(|s| &s.levels).for_each(|l| { let mut gs_data = GSData { cur: box EditorState::<BasicTextEditor>::new(l.into(), fake_profile.clone()), time: Duration::new(0, 0), }; with_current_console(0, |mut c| { let input = bl::INPUT.lock(); let data = TickData::new( &gs_data, EventTickData::default(), &mut c, &mut term, &input, None, ); gs_data.cur.tick(data); }) }); } }
use serde::Deserialize; use super::{BaseLevel, GameLevel, TestCase}; use crate::prelude::*; fn get_true() -> bool { true } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JLevel { pub name: String, pub description: String, pub extra_info: Option<String>, pub test_cases: Vec1<(String, String)>, pub solutions: Vec1<String>, #[serde(default)] pub wrong_solutions: Vec<String>, #[serde(default)] pub provides_constant: bool, #[serde(default = "get_true")] pub show_constants: bool, #[serde(default)] pub before_level_constants: Vec<(String, String)>, #[serde(default)] pub extra_info_is_hint: bool, } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JSection { pub name: SectionName, pub levels: Vec1<JLevel>, } #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct JLevelConfig { pub sections: Vec1<JSection>, pub tests: Vec1<(String, String)>, } const RAW_LEVEL_CONFIG: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/level_config.json")); pub fn raw_load_level_config() -> JLevelConfig { serde_json::from_slice(RAW_LEVEL_CONFIG).expect("Invalid json") } fn load_all() -> Vec1<Section> { let config = raw_load_level_config(); config.sections.mapped(|s| { let section_name = s.name; Section { name: s.name, levels: { if cfg!(feature = "demo") && s.name > SectionName::Boolea
ections() { all_sections(vec![$(SectionName::$name),*]) } } } solution_tests!( Basic, Boolean, Numerals, PairAndList, Recursion, MoreNumerals ); #[test] fn test_wrong_solutions() { LEVELS.iter().flat_map(|s| &s.levels).for_each(|l| { l.wrong_solutions.iter().for_each(|s| { assert_matches!( get_result(&Level::GameLevel(l).test(s.chars(), ConstantProvider::all())), LevelResult::Failure, "Code was solution {} on level {}", s, l.base.name ) }) }); } fn fake_bterm() -> bl::BTerm { bl::BTerm { width_pixels: W as u32, height_pixels: H as u32, original_height_pixels: H as u32, original_width_pixels: W as u32, fps: 30.0, frame_time_ms: 10.0, active_console: 0, key: None, mouse_pos: (0, 0), left_click: false, shift: false, control: false, alt: false, web_button: None, quitting: false, post_scanlines: false, post_screenburn: false, screen_burn_color: bl::RGB::from_u8(0, 1, 1), } } #[test] fn test_out_of_space() { use crate::{ drawables::BasicTextEditor, gamestates::{ base::{with_current_console, EventTickData, GSData, TickData}, editor::EditorState }, save_system::SaveProfile }; let fake_profile = Arc::new(SaveProfile::fake(vec![])); let mut term = fake_bterm(); bl::BACKEND_INTERNAL .lock() .consoles .push(bl::DisplayConsole { console: box bl::VirtualConsole::new(bl::Point::new(W, H)), shader_index: 0, font_index: 0, }); LEVELS.iter().flat_map(|s| &s.levels).for_each(|l| { let mut gs_data = GSData { cur: box EditorState::<BasicTextEditor>::new(l.into(), fake_profile.clone()), time: Duration::new(0, 0), }; with_current_console(0, |mut c| { let input = bl::INPUT.lock(); let data = TickData::new( &gs_data, EventTickData::default(), &mut c, &mut term, &input, None, ); gs_data.cur.tick(data); }) }); } }
n { vec![] } else { let mut idx = 0; s.levels .mapped(|l| { if l.extra_info_is_hint { debug_assert!(l.extra_info.is_some()); } let level = GameLevel { base: BaseLevel { name: l.name, description: l.description, extra_info: l.extra_info, test_cases: l .test_cases .mapped(|t| TestCase::from_or_fail(&t.0, &t.1)), extra_info_is_hint: l.extra_info_is_hint, }, idx, section: section_name, solutions: l.solutions, wrong_solutions: l.wrong_solutions, show_constants: l.show_constants, }; idx += 1; level }) .into() } }, } }) } #[derive( Debug, strum::Display, strum::EnumIter, PartialEq, Eq, Hash, Clone, Copy, Deserialize, PartialOrd, Ord, )] #[strum(serialize_all = "snake_case")] #[serde(rename_all = "snake_case")] pub enum SectionName { Basic, Boolean, #[strum(serialize = "pair and list")] #[serde(rename = "pair and list")] PairAndList, Recursion, Numerals, #[strum(serialize = "more numerals")] #[serde(rename = "more numerals")] MoreNumerals, } pub struct Section { pub name: SectionName, pub levels: Vec<GameLevel>, } lazy_static! { pub static ref LEVELS: Vec1<Section> = load_all(); } #[cfg(test)] mod test { use std::{collections::HashSet, time::Duration}; use rayon::prelude::*; use strum::IntoEnumIterator; use super::{ super::{base::Level, get_result}, * }; use crate::{ interpreter::{interpreter::test::interpret_ok, ConstantProvider}, save_system::{LevelResult, SaveProfile} }; #[test] fn test_level_load() { assert!(LEVELS.len() > 0); } #[test] fn unique_names() { let names = LEVELS .iter() .flat_map(|s| &s.levels) .map(|l| l.base.name.clone()) .collect::<HashSet<_>>(); assert_eq!( names.len(), LEVELS.iter().flat_map(|s| &s.levels).count(), "Some name is duplicated in the levels definition" ); } #[test] fn test_jsonnet_tests() { raw_load_level_config() .tests .into_iter() .for_each(|(a, b)| { assert_eq!(interpret_ok(&a), interpret_ok(&b), "'{}' != '{}'", &a, &b) }); } fn solution_section(section: SectionName) { let mut all_levels_so_far = Vec::with_capacity(LEVELS.len()); LEVELS .iter() .filter(|s| s.name <= section) .flat_map(|s| s.levels.iter()) .for_each(|l| { all_levels_so_far.push(l.base.name.as_str()); if l.section < section { return; } l.solutions.par_iter().for_each(|s| { let r = Level::GameLevel(l) .test( s.chars(), ConstantProvider::new( l.into(), Some(Arc::new(SaveProfile::fake(all_levels_so_far.clone()))), ), ) .expect(&format!( "On '{}' failed to compile solution {}", l.base.name, s )); r.runs.iter().for_each(|r| { assert!( r.is_correct(), "Code '{}' does not reduce to '{}' on level '{}', instead reduced to {:?}", r.test_expression, r.expected_result, l.base.name, r.result.clone().map(|r| format!("{}", r.term)), ) }); assert_matches!(get_result(&Ok(r)), LevelResult::Success { .. }); }) }); } fn all_sections(sections: Vec<SectionName>) { assert_eq!( SectionName::iter().collect::<HashSet<_>>(), sections.into_iter().collect::<HashSet<_>>() ); } macro_rules! solution_tests { ($($name:ident),*) => { $( #[test] #[allow(non_snake_case)] fn $name () { solution_section(SectionName::$name); } )* #[test] fn test_cover_all_s
random
[ { "content": "fn get_level_config_json() -> String {\n\n match std::process::Command::new(\"jsonnet\")\n\n .args(&[\n\n \"-J\",\n\n \"src/levels/config\",\n\n \"src/levels/config/level_config.jsonnet\",\n\n ])\n\n .output()\n\n {\n\n Ok(o) if !o.stdout.is_empty() => String::from_utf8(o.stdout).unwrap(),\n\n Err(e) if e.kind() == std::io::ErrorKind::NotFound => {\n\n let mut vm = JsonnetVm::new();\n\n let out = vm\n\n .evaluate_file(\"src/levels/config/level_config.jsonnet\")\n\n .expect(\"Failed to parse jsonnet\")\n\n .to_string();\n\n out\n\n },\n\n Ok(o) => panic!(\"{}\", String::from_utf8_lossy(&o.stderr)),\n\n Err(e) => panic!(\"Failed to run {:?}\", e),\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 1, "score": 205124.13118089826 }, { "content": "fn description(lvl: &MaybeLevel) -> String {\n\n match lvl {\n\n Ok(l) => l.base.name.clone(),\n\n Err((id, err)) => format!(\"Can't load level {}. {}\", id, err),\n\n }\n\n}\n\n\n\nimpl GameState for UserCreatedLevelsState {\n\n fn name(&self) -> &'static str { \"User created levels\" }\n\n\n\n fn tick(&mut self, mut data: TickData) -> GameStateEvent {\n\n if data.pressed_key == Some(Key::Escape) {\n\n SFX::Back.play();\n\n GameStateEvent::Pop(1)\n\n } else if let Some(levels) = &mut self.levels {\n\n data.print(Pos::new(2, 2), \"User created levels\");\n\n let mut i = 5;\n\n for lvl in levels.inner() {\n\n data.print(Pos::new(i, 4), &description(lvl));\n\n i += 2;\n", "file_path": "src/gamestates/user_created_levels.rs", "rank": 3, "score": 158584.99140470257 }, { "content": "pub fn get_result(results: &TestRunResults) -> LevelResult {\n\n let r = match &results {\n\n Err(_) => None,\n\n Ok(runs) => Some(runs.runs.iter().map(|run| {\n\n if run.is_correct() {\n\n run.result.as_ref().ok().map(|r| r.stats)\n\n } else {\n\n None\n\n }\n\n })),\n\n };\n\n\n\n r.zip(results.as_ref().ok())\n\n .and_then(|(maybe_stats, runs)| {\n\n let full_len = maybe_stats.len();\n\n let stats: Vec<_> = maybe_stats.filter_map(|x| x).collect();\n\n if full_len != stats.len() {\n\n None\n\n } else {\n\n Some(accumulate_stats(stats, count_functions(&runs.code)))\n\n }\n\n })\n\n .map(|stats| LevelResult::Success { stats })\n\n .unwrap_or(LevelResult::Failure)\n\n}\n\n\n", "file_path": "src/levels/base.rs", "rank": 4, "score": 146901.1313275274 }, { "content": "/// Deletes only save profile. Leaves code there.\n\npub fn reset_profile(name: &str) {\n\n fs::remove_file(get_save_profile(name).join(SAVE_FILE)).debug_unwrap();\n\n}\n\n\n\nconst CURRENT_COMMON_VERSION: u32 = 1;\n\n#[derive(Savefile, Debug)]\n\npub struct CommonConfig {\n\n pub default_profile: Option<String>,\n\n #[savefile_default_val = \"7\"]\n\n #[savefile_versions = \"1..\"]\n\n pub volume: u8,\n\n}\n\n\n\nimpl Default for CommonConfig {\n\n fn default() -> Self {\n\n Self {\n\n default_profile: None,\n\n volume: 7,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/save_system/save_file.rs", "rank": 6, "score": 141628.41546387246 }, { "content": "pub fn validate(\n\n workshop: WorkshopConfig,\n\n config: PathBuf,\n\n) -> Result<ParsedUserLevelConfig, ValidationError> {\n\n if workshop.title.is_empty() {\n\n Err(ValidationError::EmptyTitle)?;\n\n } else if workshop.description.is_empty() {\n\n Err(ValidationError::EmptyDescription)?;\n\n }\n\n let mut vm = JsonnetVm::new();\n\n let str = match vm.evaluate_file(config) {\n\n Ok(str) => str.to_string(),\n\n Err(err) => Err(ValidationError::JsonnetError(err.to_string()))?,\n\n };\n\n let config: UserLevelConfig = serde_json::from_str(&str)?;\n\n config.validate(workshop)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ValidationState {\n", "file_path": "src/gamestates/level_creator/validator.rs", "rank": 7, "score": 134823.67602433378 }, { "content": "#[derive(Debug, Default)]\n\nstruct Level {\n\n prev_node: Option<Box<Node>>,\n\n enveloping_functions: Vec<TVariable>,\n\n}\n\n\n\nimpl Level {\n\n /// Add a new term to the right of this level, merging it with prev_node if\n\n /// it exists\n\n fn merge(&mut self, node: Box<Node>) {\n\n self.prev_node = if let Some(prev) = self.prev_node.take() {\n\n Some(Box::new(Node::Apply {\n\n left: prev,\n\n right: node,\n\n }))\n\n } else {\n\n Some(node)\n\n };\n\n }\n\n\n\n /// Finish this level, and turn it into a single term. Fails if prev_node is\n", "file_path": "src/interpreter/parser.rs", "rank": 8, "score": 123808.2459951369 }, { "content": "pub fn load_common() -> CommonConfig {\n\n read(get_common_file(), CURRENT_COMMON_VERSION).debug_unwrap_or_default()\n\n}\n\n\n", "file_path": "src/save_system/save_file.rs", "rank": 9, "score": 123305.30236563376 }, { "content": "/// Will create a folder if it doesn't exist\n\npub fn load_profile(name: &str) -> Result<SaveProfile, SavefileError> {\n\n SaveProfile::load(get_save_profile(name), name.to_string())\n\n}\n\n\n", "file_path": "src/save_system/save_file.rs", "rank": 10, "score": 118500.51229315621 }, { "content": "pub fn set_volume(vol: u8) { MANAGER.lock().volume = vol.min(10).max(0); }\n", "file_path": "src/audio.rs", "rank": 11, "score": 115237.13477963113 }, { "content": "pub fn parse_or_fail(str: &str) -> Box<Node> {\n\n parse(tokenize(str.chars()).expect(\"Failed to tokenize\")).expect(\"Failed to parse\")\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TestCaseRun {\n\n pub test_expression: Box<Node>,\n\n pub result: Result<Interpreted, InterpretError>,\n\n pub expected_result: Box<Node>,\n\n}\n\n\n\nimpl TestCaseRun {\n\n pub fn is_correct(&self) -> bool {\n\n self.result\n\n .as_ref()\n\n .map_or(false, |r| r.term == self.expected_result)\n\n }\n\n}\n\n\n\nimpl TestCase {\n", "file_path": "src/levels/base.rs", "rank": 12, "score": 113602.0342495836 }, { "content": "pub fn interpret(\n\n root: Box<Node>,\n\n fully_resolve: bool,\n\n provider: ConstantProvider,\n\n) -> Result<Interpreted, InterpretError> {\n\n let reductions = Arc::new(AtomicU32::new(0));\n\n let mut gen = Interpreter {\n\n fully_resolve,\n\n yield_intermediates: false,\n\n provider,\n\n reductions: reductions.clone(),\n\n }\n\n .interpret(0, root);\n\n loop {\n\n match gen.as_mut().resume(()) {\n\n GeneratorState::Yielded(_) => {\n\n debug_unreachable!(\"yield_intermediates is set to false\")\n\n },\n\n GeneratorState::Complete(ret) =>\n\n break ret.map(|term| Interpreted {\n\n term,\n\n stats: Stats {\n\n reductions: reductions.load(Ordering::Relaxed),\n\n },\n\n }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 13, "score": 110220.94389125971 }, { "content": "fn inside_consider_border(mouse: &Pos, rect: &Rect) -> bool {\n\n mouse.inside(&Rect::new(\n\n rect.pos.i - 2,\n\n rect.pos.j - 1,\n\n rect.size.w + 2,\n\n rect.size.h + 3,\n\n ))\n\n}\n\n\n\nconst SAVE: &str = \"Save\";\n\nconst RELOAD: &str = \"Reload\";\n\nconst VALIDATE: &str = \"Validate\";\n\n\n\nconst INSTRUCTIONS: &str = r#\"\n\nTo create a level, first give it a name and a description, which will be shown in the Steam Workshop config.\n\n\n\nThe level config should be a JSON object which describes the levels. The most important fields of it are:\n\n\n\n- test_cases: A list, each element must be a list with exactly two strings, a test case. The first element is a function that is applied to the user's solution, and the second must be the expected result.\n\n\n", "file_path": "src/gamestates/level_creator/editor.rs", "rank": 14, "score": 108217.6544915686 }, { "content": "pub fn interpret_itermediates(\n\n root: Box<Node>,\n\n fully_resolve: bool,\n\n provider: ConstantProvider,\n\n) -> impl Iterator<Item = Box<Node>> {\n\n InterpretIter {\n\n gen: Interpreter {\n\n fully_resolve,\n\n yield_intermediates: true,\n\n provider,\n\n reductions: Arc::new(AtomicU32::new(0)),\n\n }\n\n .interpret(0, root),\n\n finished: false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::{\n", "file_path": "src/interpreter/interpreter.rs", "rank": 15, "score": 107704.5405653095 }, { "content": "pub fn edit_and_save<R, F: FnOnce(&mut CommonConfig) -> R>(edit_fn: F) {\n\n let mut config = load_common();\n\n edit_fn(&mut config);\n\n write(get_common_file(), CURRENT_COMMON_VERSION, &config);\n\n}\n", "file_path": "src/save_system/save_file.rs", "rank": 16, "score": 106374.25773067589 }, { "content": "pub fn black() -> bl::RGBA { bl::RGBA::named(bl::BLACK) }\n", "file_path": "src/drawables/text_box.rs", "rank": 17, "score": 101907.86153254601 }, { "content": "pub fn white() -> bl::RGBA { bl::RGBA::named(bl::WHITE) }\n", "file_path": "src/drawables/text_box.rs", "rank": 18, "score": 101907.86153254601 }, { "content": "pub fn gray() -> bl::RGBA { bl::RGBA::named(bl::GRAY) }\n", "file_path": "src/drawables/text_box.rs", "rank": 19, "score": 101907.86153254601 }, { "content": "pub fn light_red() -> bl::RGBA { bl::RGBA::from_u8(255, 100, 100, 255) }\n\n\n\nimpl TickData<'_> {\n\n pub fn draw_box_color(&mut self, rect: Rect, fg: bl::RGBA, bg: bl::RGBA) {\n\n let Rect { pos, size } = rect;\n\n self.console\n\n .draw_box(pos.j, pos.i, size.w - 1, size.h - 1, fg, bg);\n\n }\n\n\n\n pub fn title_box_color(&mut self, title: &str, rect: Rect, fg: bl::RGBA, bg: bl::RGBA) {\n\n self.draw_box_color(rect, fg, bg);\n\n self.print(Pos::new(rect.pos.i, rect.pos.j + 1), title);\n\n }\n\n\n\n pub fn title_box(&mut self, title: &str, rect: Rect) {\n\n self.title_box_color(title, rect, white(), black());\n\n }\n\n\n\n pub fn text_box(&mut self, title: &str, text: &str, rect: Rect, fail_on_out_of_space: bool) {\n\n self.title_box(title, rect);\n", "file_path": "src/drawables/text_box.rs", "rank": 20, "score": 101568.05833171723 }, { "content": "pub fn update_section_achievements(client: Arc<Client>, profile: Arc<SaveProfile>) {\n\n wrap(move || update_section_achievements_impl(client, profile));\n\n}\n", "file_path": "src/utils/steam.rs", "rank": 21, "score": 101375.10786417134 }, { "content": "pub fn tick() { MANAGER.lock().tick(); }\n\n\n", "file_path": "src/audio.rs", "rank": 22, "score": 98294.492282501 }, { "content": "pub fn dark_gray() -> bl::RGBA { bl::RGBA::named(bl::DARK_GRAY) }\n", "file_path": "src/drawables/text_box.rs", "rank": 23, "score": 97629.01426587348 }, { "content": "fn check_constants(node: &Node, provider: ConstantProvider) -> Result<(), LevelTestError> {\n\n for constant in all_constants(&node) {\n\n if provider.get(&constant).is_none() {\n\n return Err(LevelTestError::UnknownConstant(constant.to_string()));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Level {\n\n pub fn test<S: IntoIterator<Item = char>>(\n\n &self,\n\n code: S,\n\n provider: ConstantProvider,\n\n ) -> TestRunResults {\n\n let ts = Instant::now();\n\n let node = parse(tokenize(code)?)?;\n\n check_constants(&node, provider)?;\n\n // From here, we use all constants as the test cases may have unknown constants and that's fine\n\n let provider = ConstantProvider::all();\n", "file_path": "src/levels/base.rs", "rank": 24, "score": 96200.58864532193 }, { "content": "fn get_save_profile(name: &str) -> PathBuf { get_save_dir().join(name) }\n\n\n", "file_path": "src/save_system/save_file.rs", "rank": 25, "score": 94579.74020021933 }, { "content": "fn raw_load_constants() -> HashMap<String, ConstantNode> {\n\n raw_load_level_config()\n\n .sections\n\n .into_iter()\n\n .flat_map(|section| {\n\n let section_name = section.name;\n\n section\n\n .levels\n\n .into_iter()\n\n .enumerate()\n\n .flat_map(move |(i, level)| {\n\n let mut v: Vec<_> = level\n\n .before_level_constants\n\n .into_iter()\n\n .map(|(name, term)| {\n\n (\n\n name,\n\n ConstantNode {\n\n term: parse_constant(&term),\n\n method: DiscoveryMethod::BeforeLevel {\n", "file_path": "src/interpreter/constants.rs", "rank": 26, "score": 92644.5853993081 }, { "content": "fn upload_level(\n\n id_sender: Sender<u64>,\n\n state_sender: Sender<State>,\n\n level: ParsedUserLevelConfig,\n\n client: Arc<SteamClient>,\n\n config: WorkshopConfig,\n\n) {\n\n #[cfg(feature = \"steam\")]\n\n {\n\n std::thread::spawn(move || {\n\n match upload_level_impl(\n\n id_sender,\n\n state_sender.clone(),\n\n level,\n\n client.clone(),\n\n config,\n\n ) {\n\n Err(err) => {\n\n log::error!(\"Failed to upload: {}\", err);\n\n state_sender.send(State::Error(err)).debug_unwrap();\n", "file_path": "src/gamestates/level_creator/uploading.rs", "rank": 27, "score": 92433.18283115127 }, { "content": "struct JsonReceiver {\n\n recv: Receiver<ClientMessage>,\n\n buf: String,\n\n}\n\n\n\nimpl JsonReceiver {\n\n fn new(recv: Receiver<ClientMessage>) -> Self {\n\n Self {\n\n recv,\n\n buf: String::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Read for JsonReceiver {\n\n fn read(&mut self, mut buf: &mut [u8]) -> Result<usize, std::io::Error> {\n\n debug_unreachable!(\"Shouldn't call read directly!\");\n\n let ret = buf.write(self.buf.as_bytes());\n\n self.buf.clear();\n\n ret\n", "file_path": "src/text_editor/interface.rs", "rank": 28, "score": 91689.56879482645 }, { "content": "fn validate(name: &str) -> Option<ValidationError> {\n\n if name.len() > 18 {\n\n Some(ValidationError::TooLong)\n\n } else if !name.is_ascii() {\n\n Some(ValidationError::NonASCII)\n\n } else if name.chars().any(|x| !x.is_ascii_alphanumeric() && x != '_') {\n\n Some(ValidationError::NonAlphaNumeric)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl GameState for ProfileSelectionState {\n\n fn name(&self) -> &'static str { \"ProfileSelection\" }\n\n\n\n fn tick(&mut self, mut data: TickData) -> GameStateEvent {\n\n self.editor.draw(&mut data);\n\n data.instructions(&[\"Press ENTER to create or load profile\"]);\n\n\n\n let mut i = H / 2 - self.known_profiles.len() as i32 * 3 / 2 - 3;\n", "file_path": "src/gamestates/profile_selection.rs", "rank": 29, "score": 91457.63991009611 }, { "content": "pub fn count_functions(root: &Node) -> u16 {\n\n match root {\n\n Node::Constant(_) => 0,\n\n Node::Variable(_) => 0,\n\n Node::Function { body, .. } => 1 + count_functions(body),\n\n Node::Apply { left, right } => count_functions(left) + count_functions(right),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 30, "score": 91355.13003294398 }, { "content": "#[cfg(feature = \"steam\")]\n\nfn upload_level_impl(\n\n id_sender: Sender<u64>,\n\n state_sender: Sender<State>,\n\n level: ParsedUserLevelConfig,\n\n client: Arc<SteamClient>,\n\n config: WorkshopConfig,\n\n) -> Result<u64, UploadError> {\n\n use steamworks::*;\n\n // Create item if it doesn't exist\n\n let published_id = if let Some(id) = config.published_id {\n\n Ok(id)\n\n } else {\n\n state_sender.send(State::CreatingItem).debug_unwrap();\n\n let (send, recv) = bounded(1);\n\n client\n\n .ugc()\n\n .create_item(client.utils().app_id(), FileType::Community, move |r| {\n\n send.send(r).debug_unwrap()\n\n });\n\n if let Ok(r) = recv.recv() {\n", "file_path": "src/gamestates/level_creator/uploading.rs", "rank": 31, "score": 90429.40883035117 }, { "content": "pub fn configure_user_stats(client: Arc<Client>) {\n\n let handle = box client\n\n .clone()\n\n .register_callback(move |s: UserStatsReceived| {\n\n *LOADED.0.lock() = true;\n\n LOADED.1.notify_all();\n\n if let Err(err) = s.result {\n\n log::error!(\"Failed to fetch user stats, ignoring: {}\", err);\n\n } else {\n\n log::info!(\"Successfully loaded user stats\");\n\n }\n\n });\n\n // It's fine for this to live forever\n\n Box::leak(handle);\n\n client.user_stats().request_current_stats();\n\n}\n\n\n", "file_path": "src/utils/steam.rs", "rank": 32, "score": 89473.91423420084 }, { "content": "pub fn tokenize<S: IntoIterator<Item = char>>(\n\n str: S,\n\n) -> Result<impl Iterator<Item = Token>, TokenizeError> {\n\n // TODO: We can make a proper iterator that goes through the string as needed\n\n Ok(tokenize_vec(str)?.into_iter())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(\n\n tokenize(\"a: asd()B\".chars())\n\n .unwrap()\n\n .collect::<Vec<Token>>(),\n\n vec![\n\n Token::Variable('a'),\n\n Token::Colon,\n\n Token::Constant(\"asd\".to_string()),\n\n Token::OpenPar,\n\n Token::ClosePar,\n\n Token::Constant(\"B\".to_string()),\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "src/interpreter/tokenizer.rs", "rank": 33, "score": 88973.05811422244 }, { "content": "/// Returns a sender to send messages from client to Xi server, and a receiver\n\n/// to get messages back from Xi server.\n\npub fn start_xi_thread() -> (ClientMessageSender, ServerMessageReceiver) {\n\n let mut state = XiCore::new();\n\n let (server_sender, server_receiver) = channel();\n\n let (client_sender, client_receiver) = channel();\n\n thread::spawn(move || {\n\n let r = RpcLoop::new(JsonSender(server_sender))\n\n .mainloop(|| JsonReceiver::new(client_receiver), &mut state);\n\n log::info!(\"Out of Xi main loop! {:?}\", r);\n\n });\n\n let recv = ServerMessageReceiver::new(server_receiver);\n\n let recv2 = recv.clone();\n\n thread::spawn(move || recv2.main_loop());\n\n (\n\n ClientMessageSender {\n\n sender: client_sender,\n\n receiver: recv.clone(),\n\n id_count: Arc::new(AtomicU64::new(0)),\n\n },\n\n recv,\n\n )\n\n}\n", "file_path": "src/text_editor/interface.rs", "rank": 34, "score": 85813.55512530496 }, { "content": "pub fn try_load_default_profile() -> Box<dyn GameState> {\n\n let common = load_common();\n\n match common.default_profile {\n\n Some(user) => SaveLoaderState::try_load(user),\n\n // Maybe also go to ProfileSelection if the save was deleted\n\n None => box ProfileSelectionState::new(),\n\n }\n\n}\n\n\n", "file_path": "src/gamestates/profile_selection.rs", "rank": 35, "score": 84513.15998908547 }, { "content": "// In practice will only be called once, but it's not FnOnce\n\npub trait GameStateBuilder = Fn() -> Box<dyn GameState>;\n\n#[derive(Debug)]\n\npub struct IntroState<F: GameStateBuilder> {\n\n time_since_creation_ms: f32,\n\n next: F,\n\n}\n\n\n\nimpl<F: GameStateBuilder> IntroState<F> {\n\n pub fn new(next: F) -> Self {\n\n IntroState {\n\n time_since_creation_ms: 0.0,\n\n next,\n\n }\n\n }\n\n}\n\n\n\nimpl<F: GameStateBuilder> GameState for IntroState<F> {\n\n fn name(&self) -> &'static str { \"Intro\" }\n\n\n\n fn tick(&mut self, data: TickData) -> GameStateEvent {\n", "file_path": "src/gamestates/intro.rs", "rank": 36, "score": 83685.76283623569 }, { "content": "struct JsonSender(Sender<ServerMessage>);\n\n\n\nimpl Write for JsonSender {\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n let msg = buf.try_into()?;\n\n if matches!(msg, ServerMessage::Unknown) {\n\n log::debug!(\"Unknown message: {}\", String::from_utf8_lossy(buf));\n\n }\n\n if let Err(e) = self.0.send(msg) {\n\n log::error!(\"Failed to send {:?}\", e.0);\n\n }\n\n Ok(buf.len())\n\n }\n\n\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n debug_unreachable!(\"Not expecting flush\");\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/text_editor/interface.rs", "rank": 37, "score": 80323.20457168893 }, { "content": "pub fn get_single_achievement(client: Arc<Client>, ach: ManualAchievements) {\n\n wrap(move || get_single_achievement_impl(client, ach));\n\n}\n\n\n", "file_path": "src/utils/steam.rs", "rank": 38, "score": 80054.14188424798 }, { "content": "pub fn all_constants<'a>(root: &'a Node) -> impl Iterator<Item = &'a Constant> {\n\n // Can be improved with generators if needed\n\n let mut v = Vec::new();\n\n all_constants_rec(&root, &mut v);\n\n v.into_iter()\n\n}\n", "file_path": "src/interpreter/traversers.rs", "rank": 39, "score": 77064.89223942079 }, { "content": "pub fn get_save_dir() -> PathBuf { PROJECT_DIR.data_dir().join(\"savegames\") }\n", "file_path": "src/save_system/save_file.rs", "rank": 40, "score": 76766.17628547456 }, { "content": "// Will we ever need two consoles?\n\npub fn with_current_console<F, R>(active_console: usize, f: F) -> R\n\nwhere\n\n F: FnOnce(&mut Box<dyn bl::Console>) -> R,\n\n{\n\n f(&mut bl::BACKEND_INTERNAL.lock().consoles[active_console].console)\n\n}\n\n\n\nimpl GameStateManager {\n\n pub fn new(first: Box<dyn GameState>, client: Option<SteamClient>) -> Self {\n\n log::info!(\"Starting on gamestate {}\", first.name());\n\n let this = Self {\n\n all_gs: Vec1::new(GSData {\n\n cur: first,\n\n time: Duration::default(),\n\n }),\n\n steam_client: client.map(Arc::new),\n\n };\n\n #[cfg(feature = \"steam\")]\n\n if let Some(client) = this.steam_client.clone() {\n\n crate::utils::steam::configure_user_stats(client);\n", "file_path": "src/gamestates/base.rs", "rank": 41, "score": 76607.74833664631 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::prelude::*;\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n/// UserLevelConfig is the format in which the JSON level config file must be specified.\n\n/// See the field descriptions belows for more information.\n\n///\n\n/// Example of a valid level:\n\n/// ```text\n\n/// {\n\n/// test_cases: [[\"f: f a\", \"a\"]],\n\n/// solutions: [\"x: x\"]\n\n/// }\n\n/// ```\n\npub struct UserLevelConfig {\n\n /// Name of the level shown in the UI. If not present, defaults to the same name as\n\n /// in the Workshop.\n\n pub name: Option<String>,\n\n /// Description shown in the game UI. If not present, defaults to the same description\n", "file_path": "src/gamestates/level_creator/level_config.rs", "rank": 42, "score": 72039.23390567374 }, { "content": " /// as in the Workshop level.\n\n pub description: Option<String>,\n\n /// If present, is displayed as extra info in the UI, next to the description.\n\n pub extra_info: Option<String>,\n\n /// If present, is displayed as a hint in the UI, next to the description. A button must\n\n /// be clicked in order for the hint to be shown. Can't be specified if `extra_info` is\n\n /// specified.\n\n pub hint: Option<String>,\n\n /// Each element of the vector is a test case. Each test case is represented as two strings.\n\n /// - The first must be a function that takes one argument, the user's solution to the problem.\n\n /// - The second is the reduction when the first function is applied to the user's solution.\n\n ///\n\n /// Example for the level \"boolean or\":\n\n /// `[[\"f: f TRUE FALSE A B\", \"A\"], [\"f: FALSE FALSE A B\", \"B\"]]`\n\n ///\n\n /// You must specify at least one test case.\n\n pub test_cases: Vec1<(String, String)>,\n\n /// Each of the strings must be a correct solution for the problem, and must pass all test\n\n /// cases. You must specify at least one valid solution.\n\n ///\n", "file_path": "src/gamestates/level_creator/level_config.rs", "rank": 43, "score": 72026.48822409028 }, { "content": " /// Example for the level \"boolean or\":\n\n /// `[\"a:b: x:y: a x (b x y)\", \"a:b: NOT (AND (NOT a) (NOT b))\"]`\n\n pub solutions: Vec1<String>,\n\n #[serde(default)]\n\n /// You may optionally specify wrong solutions, and validation will make sure they do not\n\n /// pass all tests.\n\n pub wrong_solutions: Vec<String>,\n\n #[serde(default)]\n\n /// This is an optional list of additional constants the player may use. By default the\n\n /// user knows all constants from the game.\n\n ///\n\n /// Constants are two-element arrays. First element is the name, which must be UPPER_CASE.\n\n /// Second element is the constant term.\n\n ///\n\n /// Example: `[[\"DUPLICATE\", \"x: x x\"]]`\n\n pub extra_constants: Vec<(String, String)>,\n\n}\n", "file_path": "src/gamestates/level_creator/level_config.rs", "rank": 44, "score": 72024.25787988406 }, { "content": "pub fn accumulate_stats<V: IntoIterator<Item = Stats>>(v: V, functions: u16) -> AccStats {\n\n struct Acc {\n\n sum_red: u32,\n\n count: u32,\n\n }\n\n let acc = v.into_iter().fold(\n\n Acc {\n\n sum_red: 0,\n\n count: 0,\n\n },\n\n |acc, stats| Acc {\n\n sum_red: acc.sum_red + stats.reductions,\n\n count: acc.count + 1,\n\n },\n\n );\n\n AccStats {\n\n reductions_x100: acc.sum_red * 100 / acc.count,\n\n functions,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Interpreted {\n\n pub term: Box<Node>,\n\n pub stats: Stats,\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 45, "score": 71028.7202327884 }, { "content": "pub fn parse<T: IntoIterator<Item = Token>>(tokens: T) -> Result<Box<Node>, ParseError> {\n\n // Levels keep track of all the current terms being created. Opening a new parenthesis\n\n // means creating a new level, and closing one means merging it upward.\n\n let mut levels = Vec1::new(Level::default());\n\n let mut iter = tokens.into_iter().peekable();\n\n let mut bindings = Bindings::new();\n\n while let Some(token) = iter.next() {\n\n match token {\n\n Token::Variable(name) =>\n\n if iter.peek() == Some(&Token::Colon) {\n\n iter.next().unwrap();\n\n if levels.last().prev_node.is_some() {\n\n return Err(ParseError::FunctionInsideBody);\n\n }\n\n bindings.push_var(name);\n\n levels.last_mut().enveloping_functions.push(name);\n\n } else {\n\n levels\n\n .last_mut()\n\n .merge(Box::new(Node::Variable(Variable::new(\n", "file_path": "src/interpreter/parser.rs", "rank": 46, "score": 68556.8033112617 }, { "content": "/// On file not found, return default value.\n\nfn read<T: savefile::WithSchema + savefile::Deserialize + Default>(\n\n path: PathBuf,\n\n version: u32,\n\n) -> Result<T, SavefileError> {\n\n match savefile::load_file(path.to_str().unwrap(), version) {\n\n Ok(value) => Ok(value),\n\n Err(SavefileError::IOError { io_error }) if io_error.kind() == io::ErrorKind::NotFound =>\n\n Ok(Default::default()),\n\n Err(err) => {\n\n log::error!(\"Failed to read save file {:?}: {:?}\", path, err);\n\n Err(err)\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/save_system/save_file.rs", "rank": 47, "score": 66007.10408680953 }, { "content": "fn open_file_page(client: &SteamClient, id: u64) {\n\n #[cfg(feature = \"steam\")]\n\n client\n\n .friends()\n\n .activate_game_overlay_to_web_page(&format!(\"steam://url/CommunityFilePage/{}\", id));\n\n}\n\n\n\npub const LEVEL_FILE: &str = \"level.json\";\n\n\n", "file_path": "src/gamestates/level_creator/uploading.rs", "rank": 48, "score": 65285.42758307899 }, { "content": "fn update_section_achievements_impl(client: Arc<Client>, profile: Arc<SaveProfile>) {\n\n let info = profile.get_levels_info();\n\n let mut any = false;\n\n LEVELS.iter().for_each(|section| {\n\n let completed_all = section.levels.iter().all(|l| {\n\n info.get(&l.base.name)\n\n .map(|i| i.result.is_success())\n\n .unwrap_or(false)\n\n });\n\n let ach_name = format!(\"SECTION_{}\", section.name)\n\n .to_uppercase()\n\n .replace(' ', \"_\");\n\n let user_stats = client.user_stats();\n\n let ach = user_stats.achievement(&ach_name);\n\n if completed_all && ach.get().debug_unwrap_or(false) == false {\n\n log::info!(\"Achieved all levels in section {}\", section.name);\n\n ach.set().debug_unwrap();\n\n any = true;\n\n }\n\n });\n", "file_path": "src/utils/steam.rs", "rank": 49, "score": 62952.89826230075 }, { "content": "struct AudioManager {\n\n playing: Vec<Sound>,\n\n // Use std::sync::Mutex here because it's what ears lib requires\n\n data: EnumMap<SFX, Arc<std::sync::Mutex<SoundData>>>,\n\n volume: u8,\n\n}\n\n\n\nimpl AudioManager {\n\n fn new() -> Result<Self, SoundError> {\n\n let config = load_common();\n\n Ok(Self {\n\n playing: Vec::new(),\n\n data: enum_map! {\n\n sfx => Arc::new(std::sync::Mutex::new(SoundData::new(&format!(\"assets/sounds/{}.wav\", sfx))?)),\n\n },\n\n volume: config.volume,\n\n })\n\n }\n\n\n\n fn tick(&mut self) { self.playing.drain_filter(|s| !s.is_playing()); }\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 50, "score": 62200.33066225324 }, { "content": "struct Axis {\n\n min: u32,\n\n step: u32,\n\n data: HashMap<AccStats, usize>,\n\n}\n\n\n", "file_path": "src/drawables/leaderboards.rs", "rank": 51, "score": 62200.33066225324 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Interpreter {\n\n fully_resolve: bool,\n\n yield_intermediates: bool,\n\n provider: ConstantProvider,\n\n reductions: Arc<AtomicU32>,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Stats {\n\n /// Number of reductions it took to get to the correct result\n\n pub reductions: u32,\n\n}\n\n\n\n#[derive(Savefile, Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct AccStats {\n\n /// This is the average number of reductions multiplied by 100\n\n pub reductions_x100: u32,\n\n /// Number of functions in the solution term\n\n pub functions: u16,\n\n}\n\n\n\nimpl AccStats {\n\n /// Always get new\n\n pub fn best(self, new: Self) -> Self { new }\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 52, "score": 62200.33066225324 }, { "content": "#[derive(Debug)]\n\nstruct Bindings {\n\n /// For each variable, at which depths it was defined\n\n /// Example, for (x: x: x), you would get x with depth 1 and 2\n\n map: HashMap<TVariable, Vec1<usize>>,\n\n cur_depth: usize,\n\n}\n\n\n\nimpl Bindings {\n\n fn new() -> Self {\n\n Self {\n\n map: HashMap::new(),\n\n cur_depth: 0,\n\n }\n\n }\n\n\n\n /// Create a new variable with given name, register and return it.\n\n fn push_var(&mut self, name: TVariable) {\n\n self.cur_depth += 1;\n\n match self.map.entry(name) {\n\n Entry::Vacant(entry) => {\n", "file_path": "src/interpreter/parser.rs", "rank": 53, "score": 62200.33066225324 }, { "content": "struct MainState {\n\n manager: gamestates::base::GameStateManager,\n\n client: Option<MainThreadSteamClient>,\n\n}\n\n\n\nimpl bl::GameState for MainState {\n\n fn tick(&mut self, ctx: &mut bl::BTerm) {\n\n #[cfg(feature = \"steam\")]\n\n if let Some(c) = &self.client {\n\n c.run_callbacks();\n\n }\n\n self.manager.tick(ctx);\n\n }\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"functional\")]\n\npub struct Opt {\n\n /// Skip intro screen\n\n #[structopt(long)]\n", "file_path": "src/main.rs", "rank": 54, "score": 62200.33066225324 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct Line {\n\n text: String,\n\n}\n\n\n\nimpl From<InsLine> for Line {\n\n fn from(line: InsLine) -> Self { Self { text: line.text } }\n\n}\n\n\n\n#[derive(Derivative)]\n\n#[derivative(Debug)]\n\npub struct XiEditor {\n\n title: String,\n\n rect: Rect,\n\n cursor: Pos,\n\n selections: Vec<(Pos, Pos)>,\n\n text: Vec<Line>,\n\n cursor_blink_rate: Duration,\n\n cursor_enabled: bool,\n\n view_id: xi_core_lib::ViewId,\n\n send: ClientMessageSender,\n", "file_path": "src/drawables/xi_editor.rs", "rank": 55, "score": 60817.44071978421 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Entry {\n\n friend: String,\n\n stats: AccStats,\n\n}\n\n\n", "file_path": "src/drawables/friend_leaderboard.rs", "rank": 56, "score": 60817.44071978421 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CompletionData {\n\n // Level this constant data is for\n\n level: Level,\n\n // Save profile with list of completed levels\n\n profile: Option<Arc<SaveProfile>>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ConstantProvider {\n\n // None currently means use all constants\n\n completion_data: Option<CompletionData>,\n\n numerals: Numerals,\n\n}\n\n\n\nimpl ConstantProvider {\n\n pub fn new(current_level: Level, profile: Option<Arc<SaveProfile>>) -> Self {\n\n Self {\n\n numerals: match &current_level {\n\n Level::GameLevel(gl) =>\n\n if gl.section >= SectionName::Numerals {\n", "file_path": "src/interpreter/constants.rs", "rank": 57, "score": 60817.44071978421 }, { "content": "struct Data {\n\n // Bound variables by level. First the actual name of it, and second the String name, which may\n\n // have a suffix if the expression has two variables with the same name\n\n bound_vars: Vec<String>,\n\n // How many times this is variable is bound\n\n bound_times: HashMap<char, usize>,\n\n // Unbound variables that are shadowed by a variable of the same name, somewhere in the term\n\n shadowed_unbound: HashSet<TVariable>,\n\n}\n\n\n\nimpl Data {\n\n fn new(shadowed: HashSet<TVariable>) -> Self {\n\n Self {\n\n bound_vars: Vec::new(),\n\n bound_times: HashMap::new(),\n\n shadowed_unbound: shadowed,\n\n }\n\n }\n\n\n\n fn get_text(&mut self, var: Variable) -> String {\n", "file_path": "src/interpreter/node_display.rs", "rank": 58, "score": 60817.44071978421 }, { "content": "struct InterpretIter {\n\n gen: InterpretResult,\n\n finished: bool,\n\n}\n\n\n\nimpl Iterator for InterpretIter {\n\n type Item = Box<Node>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.finished {\n\n None\n\n } else {\n\n match self.gen.as_mut().resume(()) {\n\n GeneratorState::Yielded(y) => Some(y),\n\n GeneratorState::Complete(_) => {\n\n self.finished = true;\n\n None\n\n },\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 59, "score": 60817.44071978421 }, { "content": "struct ConstantNode {\n\n term: Box<Node>,\n\n method: DiscoveryMethod,\n\n}\n\n\n\nimpl ConstantNode {\n\n fn can_be_used(&self, data: &CompletionData) -> bool {\n\n match data {\n\n CompletionData {\n\n level: Level::GameLevel(gl),\n\n profile: Some(profile),\n\n } => match &self.method {\n\n DiscoveryMethod::BeforeLevel { section, lvl_idx } =>\n\n (*section, *lvl_idx) <= (gl.section, gl.idx),\n\n DiscoveryMethod::LevelCompleted { name, section } =>\n\n *section <= gl.section\n\n && *name != gl.base.name\n\n && profile\n\n .get_levels_info()\n\n .get(name)\n", "file_path": "src/interpreter/constants.rs", "rank": 60, "score": 60817.44071978421 }, { "content": "#[derive(Debug)]\n\nstruct DebugData {\n\n steps: Vec<Box<Node>>,\n\n interpreted: Result<Interpreted, InterpretError>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PlaygroundState<Editor: TextEditor> {\n\n editor: Editor,\n\n data: Option<Result<Result<DebugData, ParseError>, TokenizeError>>,\n\n provider: ConstantProvider,\n\n}\n\n\n\nconst EDITOR_W: i32 = 40;\n\nconst MAX_STEPS: usize = H as usize - 5;\n\n\n\nimpl<Editor: TextEditor> PlaygroundState<Editor> {\n\n pub fn new(initial_text: String, provider: ConstantProvider) -> Self {\n\n Self {\n\n editor: Editor::new(\n\n \"Playground\".to_string(),\n", "file_path": "src/gamestates/playground.rs", "rank": 61, "score": 60817.44071978421 }, { "content": "fn main() {\n\n rerun_if_changed_paths(\"src/levels/config/**/*.jsonnet\").unwrap();\n\n rerun_if_changed_paths(\"src/levels/config/**/*.libsonnet\").unwrap();\n\n rerun_if_changed_paths(\"src/levels/config/**/*.json\").unwrap();\n\n let out_dir = std::env::var_os(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"level_config.json\");\n\n std::fs::write(&dest_path, &get_level_config_json()).unwrap();\n\n}\n", "file_path": "build.rs", "rank": 62, "score": 60357.097914538565 }, { "content": "// Returns size of the expression\n\nfn for_each_unbound_req<F: Fn(&mut usize) -> () + Copy>(\n\n root: &mut Node,\n\n cur_depth: usize,\n\n f: F,\n\n) -> u32 {\n\n match root {\n\n Node::Constant(_) => 1,\n\n Node::Variable(v) =>\n\n // unbound variables in the root expression, not necessarily in the whole expression\n\n // for example, on (x:y: x), x is considered unbound in the subterm (y: x).\n\n {\n\n if v.depth >= cur_depth {\n\n f(&mut v.depth);\n\n }\n\n 1\n\n }\n\n Node::Function { variable: _, body } => for_each_unbound_req(body, cur_depth + 1, f) + 1,\n\n Node::Apply { left, right } =>\n\n for_each_unbound_req(left, cur_depth, f) + for_each_unbound_req(right, cur_depth, f),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/interpreter.rs", "rank": 63, "score": 59081.532005549656 }, { "content": "#[derive(Savefile, Debug, Default)]\n\nstruct SaveFile {\n\n level_info: HashMap<String, LevelInfo>,\n\n}\n\n\n\nimpl SaveProfile {\n\n #[cfg(test)]\n\n pub fn fake(completed_levels: Vec<&str>) -> Self {\n\n Self {\n\n name: \"test\".to_string(),\n\n path: PathBuf::new(),\n\n current_save_file: Mutex::new(SaveFile {\n\n level_info: completed_levels\n\n .into_iter()\n\n .map(|l| {\n\n (\n\n l.to_string(),\n\n LevelInfo {\n\n result: LevelResult::Success {\n\n stats: AccStats {\n\n reductions_x100: 100,\n", "file_path": "src/save_system/save_file.rs", "rank": 64, "score": 58358.31914968847 }, { "content": "fn wait_for_stats() {\n\n let mut lock = LOADED.0.lock();\n\n if *lock == false {\n\n LOADED.1.wait(&mut lock);\n\n }\n\n}\n\n\n", "file_path": "src/utils/steam.rs", "rank": 65, "score": 55923.43467323894 }, { "content": "fn maybe_load_icon() {\n\n let result = bl::BACKEND\n\n .lock()\n\n .context_wrapper\n\n .as_ref()\n\n .map(|wrapped_ctx| {\n\n bmp::from_reader(&mut ICON_DATA.clone()).map(|img| {\n\n let mut data =\n\n Vec::with_capacity((img.get_height() * img.get_width() * 4) as usize);\n\n for (x, y) in img.coordinates() {\n\n let bmp::Pixel { r, g, b } = img.get_pixel(x, y);\n\n data.append(&mut vec![r, g, b, 255]);\n\n }\n\n winit::window::Icon::from_rgba(data, img.get_width(), img.get_height())\n\n .map(|icon| wrapped_ctx.wc.window().set_window_icon(Some(icon)))\n\n })\n\n });\n\n match result {\n\n Some(Ok(Ok(()))) => {},\n\n err @ _ => log::warn!(\"Failed to set icon correctly: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 66, "score": 55923.43467323894 }, { "content": "pub trait GameState {\n\n fn name(&self) -> &'static str;\n\n fn tick(&mut self, data: TickData) -> GameStateEvent;\n\n fn on_event(&mut self, _event: bl::BEvent, _input: &bl::Input) {}\n\n fn clear_terminal(&self) -> bool { true }\n\n}\n", "file_path": "src/gamestates/base.rs", "rank": 67, "score": 55229.779711810465 }, { "content": " pub trait ConvertToNode {\n\n fn n(self) -> Box<Node>;\n\n }\n\n\n\n impl<T: Into<Box<Node>>> ConvertToNode for T {\n\n fn n(self) -> Box<Node> { self.into() }\n\n }\n\n\n\n #[test]\n\n fn simple() {\n\n assert_eq!(parse_ok(\"A\"), Box::new(Node::Constant(\"A\".to_string())));\n\n assert_eq!(parse(vec![Token::Variable('x')]).unwrap(), (0, 'x').n());\n\n assert_ne!(parse(vec![Token::Variable('x')]).unwrap(), (0, 'y').n());\n\n assert_eq!(\n\n parse_ok(\"a bc c\"),\n\n (((0, 'a').n(), \"bc\".n()).n(), (0, 'c').n()).n()\n\n );\n\n assert_eq!(\n\n parse_ok(\"a b c\"),\n\n (((0, 'a').n(), (0, 'b').n()).n(), (0, 'c').n()).n(),\n", "file_path": "src/interpreter/parser.rs", "rank": 68, "score": 55229.779711810465 }, { "content": "#[cfg(feature = \"steam\")]\n\nfn get_leaderboard_data(\n\n sender: channel::Sender<LdData>,\n\n level_id: String,\n\n upload_score: Option<AccStats>,\n\n client: Arc<SteamClient>,\n\n friend_sender: channel::Sender<FriendResult>,\n\n) -> Result<(), LeaderboardLoadError> {\n\n log::info!(\"Finding or creating leaderboard\");\n\n let (send, recv) = channel::bounded(1);\n\n client.user_stats().find_or_create_leaderboard(\n\n &format!(\"level_{}\", level_id),\n\n steamworks::LeaderboardSortMethod::Ascending,\n\n steamworks::LeaderboardDisplayType::Numeric,\n\n move |result| send.send(result).debug_unwrap(),\n\n );\n\n let lb = match recv.recv() {\n\n Ok(Ok(Some(lb))) => lb,\n\n Ok(Ok(None)) => Err(LeaderboardLoadError::FailedToFindLeaderboard)?,\n\n Ok(Err(e)) => Err(e)?,\n\n Err(_) => Err(LeaderboardLoadError::ChannelDisconnected)?,\n", "file_path": "src/drawables/leaderboards.rs", "rank": 69, "score": 54672.41447185618 }, { "content": "fn rec_display(\n\n node: &Node,\n\n data: &mut Data,\n\n f: &mut fmt::Formatter<'_>,\n\n func_prefix: bool,\n\n needs_assoc_par: bool,\n\n) -> fmt::Result {\n\n match node {\n\n Node::Constant(c) => c.fmt(f)?,\n\n Node::Variable(v) => data.get_text(*v).fmt(f)?,\n\n Node::Function { variable, body } => {\n\n if !func_prefix || needs_assoc_par {\n\n \"(\".fmt(f)?;\n\n }\n\n f.write_fmt(format_args!(\"{}{}: \", variable, data.get_suffix(*variable)))?;\n\n data.with_bound_var(|data| rec_display(&body, data, f, true, false), *variable)?;\n\n if !func_prefix || needs_assoc_par {\n\n \")\".fmt(f)?;\n\n }\n\n },\n", "file_path": "src/interpreter/node_display.rs", "rank": 70, "score": 54672.41447185618 }, { "content": "pub trait DebugUnwrap {\n\n /// Only on debug mode, panics if the value can't be unwraped.\n\n /// On production builds, only log.\n\n fn debug_unwrap(self);\n\n /// Same as debug_unwrap, but with custom text.\n\n fn debug_expect(self, text: &str);\n\n}\n\n\n", "file_path": "src/utils/debug_asserts.rs", "rank": 71, "score": 54057.93024327086 }, { "content": "pub trait TextEditorInner {\n\n fn on_event(&mut self, event: &bl::BEvent, input: &bl::Input);\n\n fn load_string(&mut self, str: String);\n\n fn load_file(&mut self, path: Option<PathBuf>) -> std::io::Result<()>;\n\n fn to_string(&self) -> String;\n\n fn draw(&mut self, data: &mut TickData);\n\n\n\n fn rect(&self) -> &Rect;\n\n\n\n /// Default should be on\n\n fn set_cursor(&mut self, enable: bool);\n\n}\n\n\n", "file_path": "src/drawables/text_editor.rs", "rank": 72, "score": 52969.613426537675 }, { "content": "fn mark_shadowed_unbound_variables(\n\n node: &Node,\n\n map: &mut HashMap<TVariable, u32>,\n\n shadowed: &mut HashSet<TVariable>,\n\n cur_depth: usize,\n\n) {\n\n match node {\n\n Node::Constant(_) => {},\n\n Node::Variable(v) =>\n\n if v.depth == cur_depth && *map.get(&v.original).unwrap_or(&0) > 0 {\n\n shadowed.insert(v.original);\n\n },\n\n Node::Function { variable, body } => {\n\n *map.entry(*variable).or_insert(0) += 1;\n\n mark_shadowed_unbound_variables(body, map, shadowed, cur_depth + 1);\n\n *map.get_mut(variable).unwrap() -= 1;\n\n },\n\n Node::Apply { left, right } => {\n\n mark_shadowed_unbound_variables(left, map, shadowed, cur_depth);\n\n mark_shadowed_unbound_variables(right, map, shadowed, cur_depth);\n", "file_path": "src/interpreter/node_display.rs", "rank": 73, "score": 52437.90254699779 }, { "content": "fn wrap<F: FnOnce() -> () + Send + 'static>(f: F) {\n\n std::thread::spawn(move || {\n\n wait_for_stats();\n\n f();\n\n });\n\n}\n\n\n", "file_path": "src/utils/steam.rs", "rank": 74, "score": 52400.03430728312 }, { "content": "pub trait DebugUnwrapOr<T> {\n\n /// On debug mode, panic if the value can't be unwraped.\n\n /// On production builds, returns the given value.\n\n fn debug_unwrap_or(self, default: T) -> T;\n\n}\n\n\n\nimpl<T> DebugUnwrap for Option<T> {\n\n fn debug_unwrap(self) { self.debug_expect(\"Optional has unexpected None value!\"); }\n\n\n\n fn debug_expect(self, text: &str) {\n\n if cfg!(debug_assertions) {\n\n if self.is_none() {\n\n println!(\"{}\", Backtrace::force_capture());\n\n }\n\n self.expect(text);\n\n } else {\n\n if self.is_none() {\n\n log::error!(\"{}\", text);\n\n }\n\n }\n", "file_path": "src/utils/debug_asserts.rs", "rank": 75, "score": 50944.18596532865 }, { "content": "fn main() -> bl::BError {\n\n let opt = &CMD_LINE_OPTIONS;\n\n let log_file = save_system::PROJECT_DIR.cache_dir().join(\"debug.log\");\n\n println!(\"Writing debug logs to {:?}\", log_file);\n\n CombinedLogger::init(vec![\n\n TermLogger::new(\n\n LevelFilter::Debug,\n\n Config::default(),\n\n TerminalMode::Mixed,\n\n ColorChoice::Auto,\n\n ),\n\n WriteLogger::new(\n\n LevelFilter::Debug,\n\n Config::default(),\n\n std::fs::File::create(log_file.clone()).expect(\"Failed to create debug file\"),\n\n ),\n\n ])\n\n .expect(\"Failed to set up logger.\");\n\n ears::init().unwrap();\n\n\n", "file_path": "src/main.rs", "rank": 76, "score": 50655.565824998455 }, { "content": "type ServerResponse = Result<Json, RemoteError>;\n\nuse crossbeam::channel::{unbounded as channel, Receiver, Sender};\n\nuse serde::de::Error;\n\n#[derive(Debug, Deserialize)]\n\npub struct ConfigChanges {\n\n #[serde(flatten)]\n\n extra: HashMap<String, Json>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[serde(tag = \"type\")]\n\n#[serde(deny_unknown_fields)]\n\npub enum UpdateAnnotation {\n\n Selection {\n\n ranges: Vec<(usize, usize, usize, usize)>,\n\n n: usize,\n\n payloads: Option<Vec<Json>>,\n\n },\n\n}\n", "file_path": "src/text_editor/interface.rs", "rank": 77, "score": 49581.48293254591 }, { "content": "pub trait TextEditor: TextEditorInner {\n\n /// rect is just the dimensions of the text, not of the whole rectangle\n\n /// so there's still a border around it (1 left, down, right, 2 up)\n\n fn new(title: String, rect: Rect, initial_text: String) -> Self;\n\n}\n", "file_path": "src/drawables/text_editor.rs", "rank": 78, "score": 48984.80090080519 }, { "content": "pub trait DebugUnwrapOrDefault<T: Default> {\n\n /// On debug mode, panic if the value can't be unwraped.\n\n /// On production builds, return default value.\n\n fn debug_unwrap_or_default(self) -> T;\n\n}\n\n\n", "file_path": "src/utils/debug_asserts.rs", "rank": 79, "score": 47280.520495683166 }, { "content": "fn distribute<F: Fn(AccStats) -> u32>(data: &HashMap<AccStats, u32>, get: F, size: u32) -> Axis {\n\n debug_assert!(size > 1);\n\n if data.is_empty() {\n\n return Axis {\n\n min: 0,\n\n step: 1,\n\n data: HashMap::new(),\n\n };\n\n }\n\n let min = data.keys().cloned().map(&get).min().unwrap();\n\n let max = data.keys().cloned().map(&get).max().unwrap();\n\n let step = ((max - min + 1) + size - 1) / size;\n\n Axis {\n\n min,\n\n step,\n\n data: data\n\n .keys()\n\n .map(|a| (*a, ((get(*a) - min) / step) as usize))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/drawables/leaderboards.rs", "rank": 80, "score": 46034.750860856955 }, { "content": "fn parse_constant(term: &str) -> Box<Node> {\n\n parse(tokenize(term.chars()).expect(\"Failed to tokenize constant\"))\n\n .expect(\"Failed to parse constant\")\n\n}\n\n\n", "file_path": "src/interpreter/constants.rs", "rank": 81, "score": 44582.53619608988 }, { "content": "/// Return a number such that a few numbers are greater than that in the given matrix.\n\n/// But not a lot. Used to display \"common\" values.\n\nfn get_common(data: &Vec<Vec<u32>>) -> u32 {\n\n let mut tot = 0;\n\n let mut freqs = BTreeMap::new();\n\n for v in data {\n\n for val in v {\n\n if *val > 0 {\n\n *freqs.entry(*val).or_insert(0u32) += 1;\n\n tot += 1;\n\n }\n\n }\n\n }\n\n let mut ans = u32::MAX;\n\n // Stop when we're in the 25-50% range\n\n let (min, max, mut cur) = (tot * 25 / 100, tot * 50 / 100, 0);\n\n loop {\n\n match freqs.pop_last() {\n\n Some((val, freq)) =>\n\n if freq + cur > max {\n\n break ans;\n\n } else {\n", "file_path": "src/drawables/leaderboards.rs", "rank": 82, "score": 42523.86545046678 }, { "content": "type MaybeLevel = Result<Arc<UserCreatedLevel>, (u64, LoadLevelError)>;\n\n\n\npub struct UserCreatedLevelsState {\n\n save_profile: Arc<SaveProfile>,\n\n levels: Option<VecWithCursor<MaybeLevel>>,\n\n}\n\n\n", "file_path": "src/gamestates/user_created_levels.rs", "rank": 98, "score": 41005.81641096899 }, { "content": "fn get_single_achievement_impl(client: Arc<Client>, ach_type: ManualAchievements) {\n\n let stats = client.user_stats();\n\n let ach = stats.achievement(&format!(\"{}\", ach_type));\n\n if ach.get().debug_unwrap_or(false) == false {\n\n log::info!(\"Got achievement {:?}!\", ach_type);\n\n ach.set().debug_unwrap();\n\n stats.store_stats().debug_unwrap();\n\n }\n\n}\n", "file_path": "src/utils/steam.rs", "rank": 99, "score": 39328.409105237704 } ]
Rust
src/render/src/encoder.rs
pravic/gfx
f0fde6d3d05412358d08707f66c544fb48c6a531
#![deny(missing_docs)] use std::mem; use draw_state::target::{Depth, Stencil}; use gfx_core::{Device, IndexType, Resources, VertexCount}; use gfx_core::{draw, format, handle, tex}; use gfx_core::factory::{cast_slice, Typed}; use mesh; use pso; #[allow(missing_docs)] #[derive(Clone, Debug, PartialEq)] pub enum UpdateError<T> { OutOfBounds { target: T, source: T, }, UnitCountMismatch { target: usize, slice: usize, }, } pub struct Encoder<R: Resources, C: draw::CommandBuffer<R>> { command_buffer: C, raw_pso_data: pso::RawDataSet<R>, handles: handle::Manager<R>, } impl<R: Resources, C: draw::CommandBuffer<R>> From<C> for Encoder<R, C> { fn from(combuf: C) -> Encoder<R, C> { Encoder { command_buffer: combuf, raw_pso_data: pso::RawDataSet::new(), handles: handle::Manager::new(), } } } impl<R: Resources, C: draw::CommandBuffer<R>> Encoder<R, C> { pub fn flush<D>(&mut self, device: &mut D) where D: Device<Resources=R, CommandBuffer=C> { device.pin_submitted_resources(&self.handles); device.submit(&mut self.command_buffer); self.command_buffer.reset(); self.handles.clear(); } pub fn clone_empty(&self) -> Encoder<R, C> { Encoder { command_buffer: self.command_buffer.clone_empty(), raw_pso_data: pso::RawDataSet::new(), handles: handle::Manager::new(), } } pub fn update_buffer<T: Copy>(&mut self, buf: &handle::Buffer<R, T>, data: &[T], offset_elements: usize) -> Result<(), UpdateError<usize>> { if data.is_empty() { return Ok(()) } let elem_size = mem::size_of::<T>(); let offset_bytes = elem_size * offset_elements; let bound = data.len().wrapping_mul(elem_size) + offset_bytes; if bound <= buf.get_info().size { self.command_buffer.update_buffer( self.handles.ref_buffer(buf.raw()).clone(), cast_slice(data), offset_bytes); Ok(()) } else { Err(UpdateError::OutOfBounds { target: bound, source: buf.get_info().size, }) } } pub fn update_constant_buffer<T: Copy>(&mut self, buf: &handle::Buffer<R, T>, data: &T) { use std::slice; let slice = unsafe { slice::from_raw_parts(data as *const T as *const u8, mem::size_of::<T>()) }; self.command_buffer.update_buffer( self.handles.ref_buffer(buf.raw()).clone(), slice, 0); } pub fn update_texture<S, T>(&mut self, tex: &handle::Texture<R, T::Surface>, face: Option<tex::CubeFace>, img: tex::NewImageInfo, data: &[S::DataType]) -> Result<(), UpdateError<[tex::Size; 3]>> where S: format::SurfaceTyped, S::DataType: Copy, T: format::Formatted<Surface = S>, { if data.is_empty() { return Ok(()) } let target_count = img.get_texel_count(); if target_count != data.len() { return Err(UpdateError::UnitCountMismatch { target: target_count, slice: data.len(), }) } let dim = tex.get_info().kind.get_dimensions(); if !img.is_inside(dim) { let (w, h, d, _) = dim; return Err(UpdateError::OutOfBounds { target: [ img.xoffset + img.width, img.yoffset + img.height, img.zoffset + img.depth, ], source: [w, h, d], }) } self.command_buffer.update_texture( self.handles.ref_texture(tex.raw()).clone(), tex.get_info().kind, face, cast_slice(data), img.convert(T::get_format())); Ok(()) } fn draw_indexed<T>(&mut self, buf: &handle::Buffer<R, T>, ty: IndexType, slice: &mesh::Slice<R>, base: VertexCount, instances: draw::InstanceOption) { self.command_buffer.bind_index(self.handles.ref_buffer(buf.raw()).clone(), ty); self.command_buffer.call_draw_indexed(slice.start, slice.end - slice.start, base, instances); } fn draw_slice(&mut self, slice: &mesh::Slice<R>, instances: draw::InstanceOption) { match slice.kind { mesh::SliceKind::Vertex => self.command_buffer.call_draw( slice.start, slice.end - slice.start, instances), mesh::SliceKind::Index8(ref buf, base) => self.draw_indexed(buf, IndexType::U8, slice, base, instances), mesh::SliceKind::Index16(ref buf, base) => self.draw_indexed(buf, IndexType::U16, slice, base, instances), mesh::SliceKind::Index32(ref buf, base) => self.draw_indexed(buf, IndexType::U32, slice, base, instances), } } pub fn clear<T: format::RenderFormat>(&mut self, view: &handle::RenderTargetView<R, T>, value: T::View) where T::View: Into<draw::ClearColor> { let target = self.handles.ref_rtv(view.raw()).clone(); self.command_buffer.clear_color(target, value.into()) } pub fn clear_depth<T: format::DepthFormat>(&mut self, view: &handle::DepthStencilView<R, T>, depth: Depth) { let target = self.handles.ref_dsv(view.raw()).clone(); self.command_buffer.clear_depth_stencil(target, Some(depth), None) } pub fn clear_stencil<T: format::StencilFormat>(&mut self, view: &handle::DepthStencilView<R, T>, stencil: Stencil) { let target = self.handles.ref_dsv(view.raw()).clone(); self.command_buffer.clear_depth_stencil(target, None, Some(stencil)) } pub fn draw<D: pso::PipelineData<R>>(&mut self, slice: &mesh::Slice<R>, pipeline: &pso::PipelineState<R, D::Meta>, user_data: &D) { let (pso, _) = self.handles.ref_pso(pipeline.get_handle()); self.command_buffer.bind_pipeline_state(pso.clone()); self.raw_pso_data.clear(); user_data.bake_to(&mut self.raw_pso_data, pipeline.get_meta(), &mut self.handles); self.command_buffer.bind_vertex_buffers(self.raw_pso_data.vertex_buffers.clone()); self.command_buffer.bind_pixel_targets(self.raw_pso_data.pixel_targets.clone()); self.command_buffer.set_ref_values(self.raw_pso_data.ref_values); self.command_buffer.set_scissor(self.raw_pso_data.scissor); self.command_buffer.bind_constant_buffers(&self.raw_pso_data.constant_buffers); for &(location, value) in &self.raw_pso_data.global_constants { self.command_buffer.bind_global_constant(location, value); } self.command_buffer.bind_unordered_views(&self.raw_pso_data.unordered_views); self.command_buffer.bind_resource_views(&self.raw_pso_data.resource_views); self.command_buffer.bind_samplers(&self.raw_pso_data.samplers); self.draw_slice(slice, slice.instances); } }
#![deny(missing_docs)] use std::mem; use draw_state::target::{Depth, Stencil}; use gfx_core::{Device, IndexType, Resources, VertexCount}; use gfx_core::{draw, format, handle, tex}; use gfx_core::factory::{cast_slice, Typed}; use mesh; use pso; #[allow(missing_docs)] #[derive(Clone, Debug, PartialEq)] pub enum UpdateError<T> { OutOfBounds { target: T, source: T, }, UnitCountMismatch { target: usize, slice: usize, }, } pub struct Encoder<R: Resources, C: draw::CommandBuffer<R>> { command_buffer: C, raw_pso_data: pso::RawDataSet<R>, handles: handle::Manager<R>, } impl<R: Resources, C: draw::CommandBuf
Format>(&mut self, view: &handle::RenderTargetView<R, T>, value: T::View) where T::View: Into<draw::ClearColor> { let target = self.handles.ref_rtv(view.raw()).clone(); self.command_buffer.clear_color(target, value.into()) } pub fn clear_depth<T: format::DepthFormat>(&mut self, view: &handle::DepthStencilView<R, T>, depth: Depth) { let target = self.handles.ref_dsv(view.raw()).clone(); self.command_buffer.clear_depth_stencil(target, Some(depth), None) } pub fn clear_stencil<T: format::StencilFormat>(&mut self, view: &handle::DepthStencilView<R, T>, stencil: Stencil) { let target = self.handles.ref_dsv(view.raw()).clone(); self.command_buffer.clear_depth_stencil(target, None, Some(stencil)) } pub fn draw<D: pso::PipelineData<R>>(&mut self, slice: &mesh::Slice<R>, pipeline: &pso::PipelineState<R, D::Meta>, user_data: &D) { let (pso, _) = self.handles.ref_pso(pipeline.get_handle()); self.command_buffer.bind_pipeline_state(pso.clone()); self.raw_pso_data.clear(); user_data.bake_to(&mut self.raw_pso_data, pipeline.get_meta(), &mut self.handles); self.command_buffer.bind_vertex_buffers(self.raw_pso_data.vertex_buffers.clone()); self.command_buffer.bind_pixel_targets(self.raw_pso_data.pixel_targets.clone()); self.command_buffer.set_ref_values(self.raw_pso_data.ref_values); self.command_buffer.set_scissor(self.raw_pso_data.scissor); self.command_buffer.bind_constant_buffers(&self.raw_pso_data.constant_buffers); for &(location, value) in &self.raw_pso_data.global_constants { self.command_buffer.bind_global_constant(location, value); } self.command_buffer.bind_unordered_views(&self.raw_pso_data.unordered_views); self.command_buffer.bind_resource_views(&self.raw_pso_data.resource_views); self.command_buffer.bind_samplers(&self.raw_pso_data.samplers); self.draw_slice(slice, slice.instances); } }
fer<R>> From<C> for Encoder<R, C> { fn from(combuf: C) -> Encoder<R, C> { Encoder { command_buffer: combuf, raw_pso_data: pso::RawDataSet::new(), handles: handle::Manager::new(), } } } impl<R: Resources, C: draw::CommandBuffer<R>> Encoder<R, C> { pub fn flush<D>(&mut self, device: &mut D) where D: Device<Resources=R, CommandBuffer=C> { device.pin_submitted_resources(&self.handles); device.submit(&mut self.command_buffer); self.command_buffer.reset(); self.handles.clear(); } pub fn clone_empty(&self) -> Encoder<R, C> { Encoder { command_buffer: self.command_buffer.clone_empty(), raw_pso_data: pso::RawDataSet::new(), handles: handle::Manager::new(), } } pub fn update_buffer<T: Copy>(&mut self, buf: &handle::Buffer<R, T>, data: &[T], offset_elements: usize) -> Result<(), UpdateError<usize>> { if data.is_empty() { return Ok(()) } let elem_size = mem::size_of::<T>(); let offset_bytes = elem_size * offset_elements; let bound = data.len().wrapping_mul(elem_size) + offset_bytes; if bound <= buf.get_info().size { self.command_buffer.update_buffer( self.handles.ref_buffer(buf.raw()).clone(), cast_slice(data), offset_bytes); Ok(()) } else { Err(UpdateError::OutOfBounds { target: bound, source: buf.get_info().size, }) } } pub fn update_constant_buffer<T: Copy>(&mut self, buf: &handle::Buffer<R, T>, data: &T) { use std::slice; let slice = unsafe { slice::from_raw_parts(data as *const T as *const u8, mem::size_of::<T>()) }; self.command_buffer.update_buffer( self.handles.ref_buffer(buf.raw()).clone(), slice, 0); } pub fn update_texture<S, T>(&mut self, tex: &handle::Texture<R, T::Surface>, face: Option<tex::CubeFace>, img: tex::NewImageInfo, data: &[S::DataType]) -> Result<(), UpdateError<[tex::Size; 3]>> where S: format::SurfaceTyped, S::DataType: Copy, T: format::Formatted<Surface = S>, { if data.is_empty() { return Ok(()) } let target_count = img.get_texel_count(); if target_count != data.len() { return Err(UpdateError::UnitCountMismatch { target: target_count, slice: data.len(), }) } let dim = tex.get_info().kind.get_dimensions(); if !img.is_inside(dim) { let (w, h, d, _) = dim; return Err(UpdateError::OutOfBounds { target: [ img.xoffset + img.width, img.yoffset + img.height, img.zoffset + img.depth, ], source: [w, h, d], }) } self.command_buffer.update_texture( self.handles.ref_texture(tex.raw()).clone(), tex.get_info().kind, face, cast_slice(data), img.convert(T::get_format())); Ok(()) } fn draw_indexed<T>(&mut self, buf: &handle::Buffer<R, T>, ty: IndexType, slice: &mesh::Slice<R>, base: VertexCount, instances: draw::InstanceOption) { self.command_buffer.bind_index(self.handles.ref_buffer(buf.raw()).clone(), ty); self.command_buffer.call_draw_indexed(slice.start, slice.end - slice.start, base, instances); } fn draw_slice(&mut self, slice: &mesh::Slice<R>, instances: draw::InstanceOption) { match slice.kind { mesh::SliceKind::Vertex => self.command_buffer.call_draw( slice.start, slice.end - slice.start, instances), mesh::SliceKind::Index8(ref buf, base) => self.draw_indexed(buf, IndexType::U8, slice, base, instances), mesh::SliceKind::Index16(ref buf, base) => self.draw_indexed(buf, IndexType::U16, slice, base, instances), mesh::SliceKind::Index32(ref buf, base) => self.draw_indexed(buf, IndexType::U32, slice, base, instances), } } pub fn clear<T: format::Render
random
[ { "content": "/// Create the proxy target views (RTV and DSV) for the attachments of the\n\n/// main framebuffer. These have GL names equal to 0.\n\n/// Not supposed to be used by the users directly.\n\npub fn create_main_targets_raw(dim: d::tex::Dimensions, color_format: d::format::SurfaceType, depth_format: d::format::SurfaceType)\n\n -> (handle::RawRenderTargetView<Resources>, handle::RawDepthStencilView<Resources>) {\n\n use gfx_core::handle::Producer;\n\n let mut temp = handle::Manager::new();\n\n let color_tex = temp.make_texture(\n\n NewTexture::Surface(0),\n\n d::tex::Descriptor {\n\n levels: 1,\n\n kind: d::tex::Kind::D2(dim.0, dim.1, dim.3),\n\n format: color_format,\n\n bind: d::factory::RENDER_TARGET,\n\n usage: d::factory::Usage::GpuOnly,\n\n },\n\n );\n\n let depth_tex = temp.make_texture(\n\n NewTexture::Surface(0),\n\n d::tex::Descriptor {\n\n levels: 1,\n\n kind: d::tex::Kind::D2(dim.0, dim.1, dim.3),\n\n format: depth_format,\n", "file_path": "src/backend/gl/src/lib.rs", "rank": 0, "score": 216882.17876601388 }, { "content": "#[derive(Clone, Debug, Hash, PartialEq)]\n\nenum ViewSource<R: Resources> {\n\n Buffer(Arc<R::Buffer>),\n\n Texture(Arc<R::Texture>),\n\n}\n\n\n\n/// Raw Shader Resource View Handle\n\n#[derive(Clone, Debug, Hash, PartialEq)]\n\npub struct RawShaderResourceView<R: Resources>(Arc<R::ShaderResourceView>, ViewSource<R>);\n\n\n\n/// Type-safe Shader Resource View Handle\n\n#[derive(Clone, Debug, Hash, PartialEq)]\n\npub struct ShaderResourceView<R: Resources, T>(RawShaderResourceView<R>, PhantomData<T>);\n\n\n\nimpl<R: Resources, T> Typed for ShaderResourceView<R, T> {\n\n type Raw = RawShaderResourceView<R>;\n\n fn new(handle: RawShaderResourceView<R>) -> ShaderResourceView<R, T> {\n\n ShaderResourceView(handle, PhantomData)\n\n }\n\n fn raw(&self) -> &RawShaderResourceView<R> {\n\n &self.0\n", "file_path": "src/core/src/handle.rs", "rank": 1, "score": 171929.23394756616 }, { "content": "#[allow(missing_docs)]\n\npub trait Resources: Clone + Hash + Debug + Eq + PartialEq {\n\n type Buffer: Clone + Hash + Debug + Eq + PartialEq + Send + Sync + Copy;\n\n type Shader: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n type Program: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n type PipelineStateObject: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n type Texture: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n type ShaderResourceView: Clone + Hash + Debug + Eq + PartialEq + Send + Sync + Copy;\n\n type UnorderedAccessView: Clone + Hash + Debug + Eq + PartialEq + Send + Sync + Copy;\n\n type RenderTargetView: Clone + Hash + Debug + Eq + PartialEq + Send + Sync + Copy;\n\n type DepthStencilView: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n type Sampler: Clone + Hash + Debug + Eq + PartialEq + Send + Sync + Copy;\n\n type Fence: Clone + Hash + Debug + Eq + PartialEq + Send + Sync;\n\n}\n\n\n", "file_path": "src/core/src/lib.rs", "rank": 2, "score": 167753.93775492167 }, { "content": "pub fn map_format(format: Format, is_target: bool) -> Option<DXGI_FORMAT> {\n\n use gfx_core::format::SurfaceType::*;\n\n use gfx_core::format::ChannelType::*;\n\n Some(match format.0 {\n\n R4_G4 | R4_G4_B4_A4 | R5_G5_B5_A1 | R5_G6_B5 => return None,\n\n R8 => match format.1 {\n\n Int => DXGI_FORMAT_R8_SINT,\n\n Uint => DXGI_FORMAT_R8_UINT,\n\n Inorm => DXGI_FORMAT_R8_SNORM,\n\n Unorm => DXGI_FORMAT_R8_UNORM,\n\n _ => return None,\n\n },\n\n R8_G8 => match format.1 {\n\n Int => DXGI_FORMAT_R8G8_SINT,\n\n Uint => DXGI_FORMAT_R8G8_UINT,\n\n Inorm => DXGI_FORMAT_R8G8_SNORM,\n\n Unorm => DXGI_FORMAT_R8G8_UNORM,\n\n _ => return None,\n\n },\n\n R8_G8_B8_A8 => match format.1 {\n", "file_path": "src/backend/dx11/src/data.rs", "rank": 3, "score": 161748.14545499123 }, { "content": "/// An ability of a surface type to be used for stencil targets.\n\npub trait StencilSurface: SurfaceTyped {}\n\n\n", "file_path": "src/core/src/format.rs", "rank": 4, "score": 161602.18501837575 }, { "content": "struct ViewPair<R: gfx::Resources, T: gfx::format::Formatted> {\n\n resource: gfx::handle::ShaderResourceView<R, T::View>,\n\n target: gfx::handle::RenderTargetView<R, T>,\n\n}\n\n\n", "file_path": "examples/deferred/main.rs", "rank": 5, "score": 153992.85021354348 }, { "content": "/// Ability to be used for depth+stencil targets.\n\npub trait DepthStencilFormat: DepthFormat + StencilFormat {}\n", "file_path": "src/core/src/format.rs", "rank": 6, "score": 150073.7912977552 }, { "content": "pub fn create(driver_type: winapi::D3D_DRIVER_TYPE, desc: &winapi::DXGI_SWAP_CHAIN_DESC, format: gfx_core::format::Format)\n\n -> Result<(Device, Factory, *mut winapi::IDXGISwapChain, h::RawRenderTargetView<Resources>), winapi::HRESULT> {\n\n use gfx_core::handle::Producer;\n\n\n\n let mut swap_chain = ptr::null_mut();\n\n let create_flags = winapi::D3D11_CREATE_DEVICE_FLAG(0); //D3D11_CREATE_DEVICE_DEBUG;\n\n let mut device = ptr::null_mut();\n\n let share = Share {\n\n capabilities: gfx_core::Capabilities {\n\n max_vertex_count: 0,\n\n max_index_count: 0,\n\n max_texture_size: 0,\n\n instance_base_supported: false,\n\n instance_call_supported: false,\n\n instance_rate_supported: false,\n\n vertex_base_supported: false,\n\n srgb_color_supported: false,\n\n constant_buffer_supported: true,\n\n unordered_access_view_supported: false,\n\n separate_blending_slots_supported: false,\n", "file_path": "src/backend/dx11/src/lib.rs", "rank": 7, "score": 149953.17737362257 }, { "content": "/// Ability to be used for vertex buffers.\n\npub trait StencilFormat: Formatted {}\n", "file_path": "src/core/src/format.rs", "rank": 8, "score": 147338.8276662624 }, { "content": "/// A helper trait to build index slices from data.\n\npub trait ToIndexSlice<R: Resources> { //TODO: remove/refactor it\n\n /// Make an index slice.\n\n fn to_slice<F: Factory<R>>(self, factory: &mut F) -> Slice<R>;\n\n}\n\n\n\nmacro_rules! impl_slice {\n\n ($ty:ty, $index:ident) => (\n\n impl<R: Resources> From<handle::Buffer<R, $ty>> for Slice<R> {\n\n fn from(buf: handle::Buffer<R, $ty>) -> Slice<R> {\n\n Slice {\n\n start: 0,\n\n end: buf.len() as VertexCount,\n\n instances: None,\n\n kind: SliceKind::$index(buf, 0)\n\n }\n\n }\n\n }\n\n impl<'a, R: Resources> ToIndexSlice<R> for &'a [$ty] {\n\n fn to_slice<F: Factory<R>>(self, factory: &mut F) -> Slice<R> {\n\n //debug_assert!(self.len() <= factory.get_capabilities().max_index_count);\n", "file_path": "src/render/src/mesh.rs", "rank": 9, "score": 144674.3727642464 }, { "content": "struct App<R: gfx::Resources, C: gfx::CommandBuffer<R>> {\n\n init: gfx_app::Init<R>,\n\n is_parallel: bool,\n\n forward_pso: gfx::PipelineState<R, forward::Meta>,\n\n encoder: gfx::Encoder<R, C>,\n\n scene: Scene<R, C>,\n\n}\n\n\n\nimpl<R: gfx::Resources, C: gfx::CommandBuffer<R>> App<R, C> {\n\n fn rotate(&mut self, axis: cgmath::Vector3<f32>) {\n\n use cgmath::{EuclideanVector, Matrix4, Rotation3};\n\n let len = axis.length();\n\n for ent in self.scene.share.write().unwrap().entities.iter_mut() {\n\n if !ent.dynamic {\n\n continue\n\n }\n\n // rotate all cubes around the axis\n\n let rot = cgmath::Decomposed {\n\n scale: 1.0,\n\n rot: cgmath::Quaternion::from_axis_angle(\n", "file_path": "examples/shadow/main.rs", "rank": 10, "score": 143086.0024951083 }, { "content": "struct Scene<R: gfx::Resources, C: gfx::CommandBuffer<R>> {\n\n camera: Camera,\n\n lights: Vec<Light<R, C>>,\n\n light_dirty: bool,\n\n light_buf: gfx::handle::Buffer<R, LightParam>,\n\n share: Arc<RwLock<Share<R>>>,\n\n}\n\n\n\n//----------------------------------------\n\n// Section-4: scene construction routines\n\n\n", "file_path": "examples/shadow/main.rs", "rank": 11, "score": 143086.00249510832 }, { "content": "struct Light<R: gfx::Resources, C: gfx::CommandBuffer<R>> {\n\n position: cgmath::Point3<f32>,\n\n mx_view: cgmath::Matrix4<f32>,\n\n projection: cgmath::Perspective<f32>,\n\n color: gfx::ColorValue,\n\n shadow: gfx::handle::DepthStencilView<R, Depth>,\n\n encoder: gfx::Encoder<R, C>,\n\n}\n\n\n", "file_path": "examples/shadow/main.rs", "rank": 12, "score": 143086.0024951083 }, { "content": "pub fn generate_mipmap(gl: &gl::Gl, name: Texture, target: gl::types::GLenum) { unsafe {\n\n //can't fail here, but we need to check for integer formats too\n\n gl.BindTexture(target, name);\n\n gl.GenerateMipmap(target);\n\n}}\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 13, "score": 142448.1704991619 }, { "content": "pub fn make_depth_stencil(device: *mut ID3D11Device, dsi: &pso::DepthStencilInfo)\n\n -> *const ID3D11DepthStencilState {\n\n let desc = D3D11_DEPTH_STENCIL_DESC {\n\n DepthEnable: if dsi.depth.is_some() {TRUE} else {FALSE},\n\n DepthWriteMask: D3D11_DEPTH_WRITE_MASK(match dsi.depth {\n\n Some(ref d) if d.write => 1,\n\n _ => 0,\n\n }),\n\n DepthFunc: match dsi.depth {\n\n Some(ref d) => map_function(d.fun),\n\n None => D3D11_COMPARISON_NEVER,\n\n },\n\n StencilEnable: if dsi.front.is_some() || dsi.back.is_some() {TRUE} else {FALSE},\n\n StencilReadMask: map_stencil_mask(dsi, \"read\", |s| (s.mask_read as UINT8)),\n\n StencilWriteMask: map_stencil_mask(dsi, \"write\", |s| (s.mask_write as UINT8)),\n\n FrontFace: map_stencil_side(&dsi.front),\n\n BackFace: map_stencil_side(&dsi.back),\n\n };\n\n\n\n let mut handle = ptr::null_mut();\n\n let hr = unsafe {\n\n (*device).CreateDepthStencilState(&desc, &mut handle)\n\n };\n\n if !SUCCEEDED(hr) {\n\n error!(\"Failed to create depth-stencil state {:?}, descriptor {:#?}, error {:x}\", dsi, desc, hr);\n\n }\n\n handle as *const _\n\n}\n\n\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 14, "score": 136909.44668796952 }, { "content": "/// Compile-time surface type trait.\n\npub trait SurfaceTyped {\n\n /// The corresponding data type to be passed from CPU.\n\n type DataType: Copy;\n\n /// Return the run-time value of the type.\n\n fn get_surface_type() -> SurfaceType;\n\n}\n", "file_path": "src/core/src/format.rs", "rank": 15, "score": 135879.83535124752 }, { "content": "/// Compile-time channel type trait.\n\npub trait ChannelTyped {\n\n /// Shader-visible type that corresponds to this channel.\n\n /// For example, normalized integers are visible as floats.\n\n type ShaderType;\n\n /// Return the run-time value of the type.\n\n fn get_channel_type() -> ChannelType;\n\n}\n", "file_path": "src/core/src/format.rs", "rank": 16, "score": 135879.83535124752 }, { "content": "type AttributeSlotSet = usize;\n\n/// Service struct to simplify the implementations of `VertexBuffer` and `InstanceBuffer`.\n\npub struct VertexBufferCommon<T, I>(RawVertexBuffer, PhantomData<(T, I)>);\n\n/// Vertex buffer component. Advanced per vertex.\n\n/// - init: `()`\n\n/// - data: `Buffer<T>`\n\npub type VertexBuffer<T> = VertexBufferCommon<T, [(); 0]>;\n\n/// Instance buffer component. Same as the vertex buffer but advances per instance.\n\npub type InstanceBuffer<T> = VertexBufferCommon<T, [(); 1]>;\n\n/// Raw vertex/instance buffer component.\n\n/// - init: ?\n\n/// - data: `RawBuffer`\n\npub struct RawVertexBuffer(AttributeSlotSet);\n\n/// Constant buffer component.\n\n/// - init: `&str` = name of the buffer\n\n/// - data: `Buffer<T>`\n\npub struct ConstantBuffer<T: Structure<shade::ConstFormat>>(Option<(Usage, ConstantBufferSlot)>, PhantomData<T>);\n\n/// Global (uniform) constant component. Describes a free-standing value passed into\n\n/// the shader, which is not enclosed into any constant buffer. Deprecated in DX10 and higher.\n\n/// - init: `&str` = name of the constant\n\n/// - data: `T` = value\n\npub struct Global<T: ToUniform>(Option<shade::Location>, PhantomData<T>);\n\n\n\n\n", "file_path": "src/render/src/pso/buffer.rs", "rank": 17, "score": 135232.00271147204 }, { "content": "pub fn make_blend(device: *mut ID3D11Device, targets: &[Option<pso::ColorTargetDesc>])\n\n -> *const ID3D11BlendState {\n\n let dummy_target = D3D11_RENDER_TARGET_BLEND_DESC {\n\n BlendEnable: FALSE,\n\n SrcBlend: D3D11_BLEND_ZERO,\n\n DestBlend: D3D11_BLEND_ONE,\n\n BlendOp: D3D11_BLEND_OP_ADD,\n\n SrcBlendAlpha: D3D11_BLEND_ZERO,\n\n DestBlendAlpha: D3D11_BLEND_ONE,\n\n BlendOpAlpha: D3D11_BLEND_OP_ADD,\n\n RenderTargetWriteMask: 0xF,\n\n };\n\n let mut desc = D3D11_BLEND_DESC {\n\n AlphaToCoverageEnable: FALSE, //TODO\n\n IndependentBlendEnable: match targets[1..].iter().find(|t| t.is_some()) {\n\n Some(_) => TRUE,\n\n None => FALSE,\n\n },\n\n RenderTarget: [dummy_target; 8],\n\n };\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 18, "score": 134950.37730962926 }, { "content": "pub trait ApplicationBase<R: gfx::Resources, C: gfx::CommandBuffer<R>> {\n\n fn new<F>(F, gfx::Encoder<R, C>, Init<R>) -> Self where\n\n F: gfx::Factory<R>;\n\n fn render<D>(&mut self, &mut D) where\n\n D: gfx::Device<Resources=R, CommandBuffer=C>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 134533.84480724094 }, { "content": "#[allow(missing_docs)]\n\npub trait Producer<R: Resources> {\n\n fn make_buffer(&mut self, R::Buffer, BufferInfo) -> RawBuffer<R>;\n\n fn make_shader(&mut self, R::Shader) -> Shader<R>;\n\n fn make_program(&mut self, R::Program, shade::ProgramInfo) -> Program<R>;\n\n fn make_pso(&mut self, R::PipelineStateObject, &Program<R>) -> RawPipelineState<R>;\n\n fn make_texture(&mut self, R::Texture, tex::Descriptor) -> RawTexture<R>;\n\n fn make_buffer_srv(&mut self, R::ShaderResourceView, &RawBuffer<R>) -> RawShaderResourceView<R>;\n\n fn make_texture_srv(&mut self, R::ShaderResourceView, &RawTexture<R>) -> RawShaderResourceView<R>;\n\n fn make_buffer_uav(&mut self, R::UnorderedAccessView, &RawBuffer<R>) -> RawUnorderedAccessView<R>;\n\n fn make_texture_uav(&mut self, R::UnorderedAccessView, &RawTexture<R>) -> RawUnorderedAccessView<R>;\n\n fn make_rtv(&mut self, R::RenderTargetView, &RawTexture<R>, tex::Dimensions) -> RawRenderTargetView<R>;\n\n fn make_dsv(&mut self, R::DepthStencilView, &RawTexture<R>, tex::Dimensions) -> RawDepthStencilView<R>;\n\n fn make_sampler(&mut self, R::Sampler, tex::SamplerInfo) -> Sampler<R>;\n\n fn make_fence(&mut self, name: R::Fence) -> Fence<R>;\n\n\n\n /// Walk through all the handles, keep ones that are reference elsewhere\n\n /// and call the provided delete function (resource-specific) for others\n\n fn clean_with<T,\n\n A: Fn(&mut T, &R::Buffer),\n\n B: Fn(&mut T, &R::Shader),\n", "file_path": "src/core/src/handle.rs", "rank": 20, "score": 129608.21908670514 }, { "content": "pub fn bind_stencil(gl: &gl::Gl, stencil: &Option<s::Stencil>, refs: (Stencil, Stencil), cull: s::CullFace) {\n\n fn bind_side(gl: &gl::Gl, face: gl::types::GLenum, side: s::StencilSide, ref_value: Stencil) { unsafe {\n\n gl.StencilFuncSeparate(face, map_comparison(side.fun),\n\n ref_value as gl::types::GLint, side.mask_read as gl::types::GLuint);\n\n gl.StencilMaskSeparate(face, side.mask_write as gl::types::GLuint);\n\n gl.StencilOpSeparate(face, map_operation(side.op_fail),\n\n map_operation(side.op_depth_fail), map_operation(side.op_pass));\n\n }}\n\n match stencil {\n\n &Some(ref s) => {\n\n unsafe { gl.Enable(gl::STENCIL_TEST) };\n\n if cull != CullFace::Front {\n\n bind_side(gl, gl::FRONT, s.front, refs.0);\n\n }\n\n if cull != CullFace::Back {\n\n bind_side(gl, gl::BACK, s.back, refs.1);\n\n }\n\n }\n\n &None => unsafe { gl.Disable(gl::STENCIL_TEST) },\n\n }\n\n}\n\n\n\n\n", "file_path": "src/backend/gl/src/state.rs", "rank": 21, "score": 128972.20147974114 }, { "content": "/// An ability of a surface type to be used for depth targets.\n\npub trait DepthSurface: SurfaceTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 22, "score": 127688.67007252257 }, { "content": "/// An ability of a channel type to be used for render targets.\n\npub trait RenderChannel: ChannelTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 23, "score": 127688.67007252257 }, { "content": "/// An ability of a surface type to be used for render targets.\n\npub trait RenderSurface: SurfaceTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 24, "score": 127688.67007252257 }, { "content": "/// An ability of a channel type to be used for textures.\n\npub trait TextureChannel: ChannelTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 25, "score": 127684.21598751389 }, { "content": "/// An ability of a surface type to be used for textures.\n\npub trait TextureSurface: SurfaceTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 26, "score": 127684.21598751389 }, { "content": "/// An ability of a surface type to be used for vertex buffers.\n\npub trait BufferSurface: SurfaceTyped {}\n", "file_path": "src/core/src/format.rs", "rank": 27, "score": 127684.13384334755 }, { "content": "/// a service trait implemented the \"data\" structure of PSO.\n\npub trait PipelineData<R: d::Resources> {\n\n /// The associated \"meta\" struct.\n\n type Meta;\n\n /// Dump all the contained data into the raw data set,\n\n /// given the mapping (\"meta\"), and a handle manager.\n\n fn bake_to(&self, &mut RawDataSet<R>, meta: &Self::Meta, &mut d::handle::Manager<R>);\n\n}\n\n\n\n/// Strongly-typed compiled pipeline state.\n\npub struct PipelineState<R: d::Resources, M>(\n\n d::handle::RawPipelineState<R>, d::Primitive, M);\n\n\n\nimpl<R: d::Resources, M> PipelineState<R, M> {\n\n /// Create a new PSO from a raw handle and the \"meta\" instance.\n\n pub fn new(raw: d::handle::RawPipelineState<R>, prim: d::Primitive, meta: M)\n\n -> PipelineState<R, M> {\n\n PipelineState(raw, prim, meta)\n\n }\n\n /// Get a raw handle reference.\n\n pub fn get_handle(&self) -> &d::handle::RawPipelineState<R> {\n\n &self.0\n\n }\n\n /// Get a \"meta\" struct reference. Can be used by the user to check\n\n /// what resources are actually used and what not.\n\n pub fn get_meta(&self) -> &M {\n\n &self.2\n\n }\n\n}\n\n\n", "file_path": "src/render/src/pso/mod.rs", "rank": 28, "score": 121868.72866029239 }, { "content": "/// The \"bind\" logic portion of the PSO component.\n\n/// Defines how the user data translates into the raw data set.\n\npub trait DataBind<R: d::Resources> {\n\n /// The associated \"data\" type - a member of the PSO \"data\" struct.\n\n type Data;\n\n /// Dump the given data into the raw data set.\n\n fn bind_to(&self, &mut RawDataSet<R>, &Self::Data, &mut d::handle::Manager<R>);\n\n}\n", "file_path": "src/render/src/pso/mod.rs", "rank": 29, "score": 121868.49380598028 }, { "content": "/// Bind a sampler using a given binding anchor.\n\n/// Used for GL compatibility profile only. The core profile has sampler objects\n\npub fn bind_sampler(gl: &gl::Gl, target: GLenum, info: &t::SamplerInfo) { unsafe {\n\n let (min, mag) = filter_to_gl(info.filter);\n\n\n\n match info.filter {\n\n t::FilterMethod::Anisotropic(fac) =>\n\n gl.TexParameterf(target, gl::TEXTURE_MAX_ANISOTROPY_EXT, fac as GLfloat),\n\n _ => ()\n\n }\n\n\n\n gl.TexParameteri(target, gl::TEXTURE_MIN_FILTER, min as GLint);\n\n gl.TexParameteri(target, gl::TEXTURE_MAG_FILTER, mag as GLint);\n\n\n\n let (s, t, r) = info.wrap_mode;\n\n gl.TexParameteri(target, gl::TEXTURE_WRAP_S, wrap_to_gl(s) as GLint);\n\n gl.TexParameteri(target, gl::TEXTURE_WRAP_T, wrap_to_gl(t) as GLint);\n\n gl.TexParameteri(target, gl::TEXTURE_WRAP_R, wrap_to_gl(r) as GLint);\n\n\n\n gl.TexParameterf(target, gl::TEXTURE_LOD_BIAS, info.lod_bias.into());\n\n let border: [f32; 4] = info.border.into();\n\n gl.TexParameterfv(target, gl::TEXTURE_BORDER_COLOR, &border[0]);\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 30, "score": 120080.49896323649 }, { "content": "pub fn compressed_update(gl: &gl::Gl, kind: Kind, target: GLenum, img: &ImageInfo,\n\n data: *const GLvoid, typ: GLenum, size: GLint)\n\n -> Result<(), Error> {\n\n match kind {\n\n Kind::D1(_) => unsafe {\n\n gl.CompressedTexSubImage1D(\n\n target,\n\n img.mipmap as GLint,\n\n img.xoffset as GLint,\n\n img.width as GLint,\n\n typ,\n\n size as GLint,\n\n data\n\n );\n\n },\n\n Kind::D1Array(_, _) | Kind::D2(_, _, AaMode::Single) => unsafe {\n\n gl.CompressedTexSubImage2D(\n\n target,\n\n img.mipmap as GLint,\n\n img.xoffset as GLint,\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 31, "score": 118210.18169573415 }, { "content": "/// Cast a slice from one POD type to another.\n\npub fn cast_slice<A: Copy, B: Copy>(slice: &[A]) -> &[B] {\n\n use std::slice;\n\n let raw_len = mem::size_of::<A>().wrapping_mul(slice.len());\n\n let len = raw_len / mem::size_of::<B>();\n\n assert_eq!(raw_len, mem::size_of::<B>().wrapping_mul(len));\n\n unsafe {\n\n slice::from_raw_parts(slice.as_ptr() as *const B, len)\n\n }\n\n}\n\n\n\n/// Specifies the access allowed to a buffer mapping.\n\n#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Copy, Clone, Debug)]\n\n#[repr(u8)]\n\npub enum MapAccess {\n\n /// Only allow reads.\n\n Readable,\n\n /// Only allow writes.\n\n Writable,\n\n /// Allow full access.\n\n RW\n", "file_path": "src/core/src/factory.rs", "rank": 32, "score": 117840.03014253944 }, { "content": "/// Initialize with a given size. Raw format version.\n\npub fn init_raw(title: &str, requested_width: winapi::INT, requested_height: winapi::INT, color_format: format::Format)\n\n -> Result<(Window, Device, Factory, gfx_core::handle::RawRenderTargetView<Resources>), InitError> {\n\n let hwnd = match window::create(title, requested_width, requested_height) {\n\n Ok(h) => h,\n\n Err(()) => return Err(InitError::Window),\n\n };\n\n let (width, height) = window::show(hwnd).unwrap();\n\n\n\n let driver_types = [\n\n winapi::D3D_DRIVER_TYPE_HARDWARE,\n\n winapi::D3D_DRIVER_TYPE_WARP,\n\n winapi::D3D_DRIVER_TYPE_REFERENCE,\n\n ];\n\n\n\n let swap_desc = winapi::DXGI_SWAP_CHAIN_DESC {\n\n BufferDesc: winapi::DXGI_MODE_DESC {\n\n Width: width as winapi::UINT,\n\n Height: height as winapi::UINT,\n\n Format: match gfx_device_dx11::map_format(color_format, true) {\n\n Some(fm) => fm,\n", "file_path": "src/window/dxgi/src/lib.rs", "rank": 33, "score": 112964.94577095492 }, { "content": "/// Compile-time full format trait.\n\npub trait Formatted {\n\n /// Associated surface type.\n\n type Surface: SurfaceTyped;\n\n /// Associated channel type.\n\n type Channel: ChannelTyped;\n\n /// Shader view type of this format.\n\n type View;\n\n /// Return the run-time value of the type.\n\n fn get_format() -> Format {\n\n Format(\n\n Self::Surface::get_surface_type(),\n\n Self::Channel::get_channel_type())\n\n }\n\n}\n", "file_path": "src/core/src/format.rs", "rank": 34, "score": 112883.61284558647 }, { "content": "// need a custom depth format in order to view SRV depth as float4\n\nstruct DepthFormat;\n\nimpl gfx::format::Formatted for DepthFormat {\n\n type Surface = gfx::format::D24;\n\n type Channel = gfx::format::Unorm;\n\n type View = [f32; 4];\n\n\n\n fn get_format() -> gfx::format::Format {\n\n use gfx::format as f;\n\n f::Format(f::SurfaceType::D24, f::ChannelType::Unorm)\n\n }\n\n}\n\n\n", "file_path": "examples/deferred/main.rs", "rank": 35, "score": 112391.0448272294 }, { "content": "/// Ability to be used for render targets.\n\npub trait RenderFormat: Formatted {}\n", "file_path": "src/core/src/format.rs", "rank": 36, "score": 112344.0384734048 }, { "content": "/// Ability to be used for depth targets.\n\npub trait DepthFormat: Formatted {}\n", "file_path": "src/core/src/format.rs", "rank": 37, "score": 112344.0384734048 }, { "content": "/// Ability to be used for textures.\n\npub trait TextureFormat: Formatted {}\n", "file_path": "src/core/src/format.rs", "rank": 38, "score": 112339.46185805707 }, { "content": "/// Ability to be used for vertex buffers.\n\npub trait BufferFormat: Formatted {}\n", "file_path": "src/core/src/format.rs", "rank": 39, "score": 112339.41986475706 }, { "content": "/// Ability to be used for blended render targets.\n\npub trait BlendFormat: RenderFormat {}\n\n\n\nimpl<F> BufferFormat for F where\n\n F: Formatted,\n\n F::Surface: BufferSurface,\n\n F::Channel: ChannelTyped,\n\n{}\n\nimpl<F> DepthFormat for F where\n\n F: Formatted,\n\n F::Surface: DepthSurface,\n\n F::Channel: RenderChannel,\n\n{}\n\nimpl<F> StencilFormat for F where\n\n F: Formatted,\n\n F::Surface: StencilSurface,\n\n F::Channel: RenderChannel,\n\n{}\n\nimpl<F> DepthStencilFormat for F where\n\n F: DepthFormat + StencilFormat\n\n{}\n", "file_path": "src/core/src/format.rs", "rank": 40, "score": 110486.20031113004 }, { "content": "pub fn map_surface(surface: SurfaceType) -> Option<DXGI_FORMAT> {\n\n use gfx_core::format::SurfaceType::*;\n\n Some(match surface {\n\n R4_G4 | R4_G4_B4_A4 | R5_G5_B5_A1 | R5_G6_B5 => return None,\n\n R16_G16_B16 => return None,\n\n R8 => DXGI_FORMAT_R8_TYPELESS,\n\n R8_G8 => DXGI_FORMAT_R8G8_TYPELESS,\n\n R8_G8_B8_A8 => DXGI_FORMAT_R8G8B8A8_TYPELESS,\n\n R10_G10_B10_A2 => DXGI_FORMAT_R10G10B10A2_TYPELESS,\n\n R11_G11_B10 => DXGI_FORMAT_R11G11B10_FLOAT, //careful\n\n R16 => DXGI_FORMAT_R16_TYPELESS,\n\n R16_G16 => DXGI_FORMAT_R16G16_TYPELESS,\n\n R16_G16_B16_A16 => DXGI_FORMAT_R16G16B16A16_TYPELESS,\n\n R32 => DXGI_FORMAT_R32_TYPELESS,\n\n R32_G32 => DXGI_FORMAT_R32G32_TYPELESS,\n\n R32_G32_B32 => DXGI_FORMAT_R32G32B32_TYPELESS,\n\n R32_G32_B32_A32 => DXGI_FORMAT_R32G32B32A32_TYPELESS,\n\n D16 => DXGI_FORMAT_R16_TYPELESS,\n\n D24 | D24_S8 => DXGI_FORMAT_R24G8_TYPELESS,\n\n D32 => DXGI_FORMAT_R32_TYPELESS,\n\n })\n\n}\n\n\n", "file_path": "src/backend/dx11/src/data.rs", "rank": 41, "score": 110402.22947896911 }, { "content": "fn format_to_glpixel(format: NewFormat) -> GLenum {\n\n use gfx_core::format::SurfaceType as S;\n\n match format.0 {\n\n S::R8 | S::R16 | S::R32=> gl::RED,\n\n S::R4_G4 | S::R8_G8 | S::R16_G16 | S::R32_G32 => gl::RG,\n\n S::R16_G16_B16 | S::R32_G32_B32 | S::R5_G6_B5 | S::R11_G11_B10 => gl::RGB,\n\n S::R8_G8_B8_A8 | S::R16_G16_B16_A16 | S::R32_G32_B32_A32 |\n\n S::R4_G4_B4_A4 | S::R5_G5_B5_A1 | S::R10_G10_B10_A2 => gl::RGBA,\n\n S::D24_S8 => gl::DEPTH_STENCIL,\n\n S::D16 | S::D24 | S::D32 => gl::DEPTH,\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 42, "score": 108877.23674639124 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum StorageType {\n\n Var(s::BaseType, s::ContainerType),\n\n Sampler(s::BaseType, s::TextureType, s::SamplerType),\n\n Unknown,\n\n}\n\n\n\nimpl StorageType {\n\n fn new(storage: gl::types::GLenum) -> StorageType {\n\n use gfx_core::shade::{BaseType, ContainerType, TextureType, SamplerType, MatrixFormat};\n\n use gfx_core::shade::IsArray::*;\n\n use gfx_core::shade::IsRect::*;\n\n use gfx_core::shade::IsComparison::*;\n\n use gfx_core::shade::IsMultiSample::*;\n\n use self::StorageType::*;\n\n match storage {\n\n gl::FLOAT => Var(BaseType::F32, ContainerType::Single),\n\n gl::FLOAT_VEC2 => Var(BaseType::F32, ContainerType::Vector(2)),\n\n gl::FLOAT_VEC3 => Var(BaseType::F32, ContainerType::Vector(3)),\n\n gl::FLOAT_VEC4 => Var(BaseType::F32, ContainerType::Vector(4)),\n\n\n", "file_path": "src/backend/gl/src/shade.rs", "rank": 43, "score": 108209.74667962291 }, { "content": "fn format_to_gltype(format: NewFormat) -> Result<GLenum, ()> {\n\n use gfx_core::format::SurfaceType as S;\n\n use gfx_core::format::ChannelType as C;\n\n let (fm8, fm16, fm32) = match format.1 {\n\n C::Int | C::Inorm =>\n\n (gl::BYTE, gl::SHORT, gl::INT),\n\n C::Uint | C::Unorm | C::Srgb =>\n\n (gl::UNSIGNED_BYTE, gl::UNSIGNED_SHORT, gl::UNSIGNED_INT),\n\n C::Float => (gl::ZERO, gl::HALF_FLOAT, gl::FLOAT),\n\n };\n\n Ok(match format.0 {\n\n //S::R3_G3_B2 => gl::UNSIGNED_BYTE_3_3_2,\n\n S::R4_G4 => return Err(()),\n\n S::R4_G4_B4_A4 => gl::UNSIGNED_SHORT_4_4_4_4,\n\n S::R5_G5_B5_A1 => gl::UNSIGNED_SHORT_5_5_5_1,\n\n S::R5_G6_B5 => gl::UNSIGNED_SHORT_5_6_5,\n\n S::R8 | S::R8_G8 | S::R8_G8_B8_A8 => fm8,\n\n S::R10_G10_B10_A2 => gl::UNSIGNED_INT_10_10_10_2,\n\n S::R11_G11_B10 => return Err(()),\n\n S::R16 | S::R16_G16 | S::R16_G16_B16 | S::R16_G16_B16_A16 => fm16,\n\n S::R32 | S::R32_G32 | S::R32_G32_B32 | S::R32_G32_B32_A32 => fm32,\n\n S::D16 => gl::UNSIGNED_SHORT,\n\n S::D24 => gl::UNSIGNED_INT,\n\n S::D24_S8 => gl::UNSIGNED_INT_24_8,\n\n S::D32 => gl::FLOAT,\n\n })\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 44, "score": 105878.7870422322 }, { "content": "fn format_to_glfull(format: NewFormat) -> Result<GLenum, ()> {\n\n use gfx_core::format::SurfaceType as S;\n\n use gfx_core::format::ChannelType as C;\n\n let cty = format.1;\n\n Ok(match format.0 {\n\n //S::R3_G3_B2 => gl::R3_G3_B2,\n\n S::R4_G4 => return Err(()),\n\n S::R4_G4_B4_A4 => match cty {\n\n C::Unorm => gl::RGBA4,\n\n _ => return Err(()),\n\n },\n\n S::R5_G5_B5_A1 => match cty {\n\n C::Unorm => gl::RGB5_A1,\n\n _ => return Err(()),\n\n },\n\n S::R5_G6_B5 => match cty {\n\n C::Unorm => gl::RGB565,\n\n _ => return Err(()),\n\n },\n\n // 8 bits\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 45, "score": 105878.7870422322 }, { "content": "/// A trait that statically links simple data types to\n\n/// constant formats.\n\npub trait Formatted {\n\n /// Get the associated constant format.\n\n fn get_format() -> ConstFormat;\n\n}\n\n\n\nmacro_rules! impl_base_type {\n\n { $($name:ident = $value:ident ,)* } => {\n\n $(\n\n impl BaseTyped for $name {\n\n fn get_base_type() -> BaseType {\n\n BaseType::$value\n\n }\n\n }\n\n )*\n\n }\n\n}\n\n\n\nmacro_rules! impl_const_vector {\n\n ( $( $num:expr ),* ) => {\n\n $(\n", "file_path": "src/core/src/shade.rs", "rank": 46, "score": 102346.86299481403 }, { "content": "/// Create a texture, using the descriptor, assuming TexStorage is available.\n\npub fn make_with_storage(gl: &gl::Gl, desc: &t::Descriptor, cty: ChannelType) ->\n\n Result<Texture, t::Error> {\n\n let format = NewFormat(desc.format, cty);\n\n let gl_format = match format_to_glfull(format) {\n\n Ok(f) => f,\n\n Err(_) => return Err(t::Error::Format(desc.format, Some(cty))),\n\n };\n\n let fixed_loc = desc.bind.contains(SHADER_RESOURCE);\n\n make_with_storage_impl(gl, desc.kind, gl_format, desc.levels, fixed_loc)\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 47, "score": 102217.18886260704 }, { "content": "/// Create a render surface.\n\npub fn make_surface(gl: &gl::Gl, desc: &t::Descriptor, cty: ChannelType) ->\n\n Result<Surface, t::Error> {\n\n let format = NewFormat(desc.format, cty);\n\n let format_error = t::Error::Format(desc.format, Some(cty));\n\n let fmt = match format_to_glfull(format) {\n\n Ok(f) => f,\n\n Err(_) => return Err(format_error),\n\n };\n\n make_surface_impl(gl, fmt, desc.kind.get_dimensions())\n\n .map_err(|_| format_error)\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 48, "score": 102208.22138726024 }, { "content": "struct App<R: gfx::Resources> {\n\n pso: gfx::PipelineState<R, pipe::Meta>,\n\n data: pipe::Data<R>,\n\n slice: gfx::Slice<R>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/triangle_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/triangle_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let fs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/triangle_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/triangle_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n", "file_path": "examples/triangle/main.rs", "rank": 49, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources> {\n\n pso: gfx::PipelineState<R, pipe::Meta>,\n\n data: pipe::Data<R>,\n\n slice: gfx::Slice<R>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/instancing_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/instancing_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let fs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/instancing_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/instancing_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n", "file_path": "examples/instancing/main.rs", "rank": 50, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources> {\n\n terrain: terrain::Bundle<R>,\n\n blit: blit::Bundle<R>,\n\n light: light::Bundle<R>,\n\n emitter: emitter::Bundle<R>,\n\n intermediate: ViewPair<R, GFormat>,\n\n light_pos_vec: Vec<LightInfo>,\n\n seed: Seed,\n\n debug_buf: Option<gfx::handle::ShaderResourceView<R, [f32; 4]>>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let (width, height, _, _) = init.color.get_dimensions();\n\n let (gpos, gnormal, gdiffuse, _depth_resource, depth_target) =\n\n create_g_buffer(width, height, &mut factory);\n\n let res = {\n\n let (_ , srv, rtv) = factory.create_render_target(width, height).unwrap();\n", "file_path": "examples/deferred/main.rs", "rank": 51, "score": 101978.49778247789 }, { "content": "struct Entity<R: gfx::Resources> {\n\n dynamic: bool,\n\n mx_to_world: cgmath::Matrix4<f32>,\n\n batch_shadow: shadow::Data<R>,\n\n batch_forward: forward::Data<R>,\n\n slice: gfx::Slice<R>,\n\n}\n\n\n", "file_path": "examples/shadow/main.rs", "rank": 52, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources>{\n\n bundle: pipe::Bundle<R>,\n\n id: u8,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/blend_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/blend_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let ps = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/blend_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/blend_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n\n .. gfx_app::shade::Source::empty()\n", "file_path": "examples/blend/main.rs", "rank": 53, "score": 101978.49778247789 }, { "content": "struct Share<R: gfx::Resources> {\n\n entities: Vec<Entity<R>>,\n\n shadow_pso: gfx::PipelineState<R, shadow::Meta>,\n\n}\n\n\n", "file_path": "examples/shadow/main.rs", "rank": 54, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources>{\n\n bundle: pipe::Bundle<R>,\n\n projection: cgmath::Matrix4<f32>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_150: include_bytes!(\"shader/cubemap_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let ps = gfx_app::shade::Source {\n\n glsl_150: include_bytes!(\"shader/cubemap_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n\n", "file_path": "examples/skybox/main.rs", "rank": 55, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources>{\n\n bundle: pipe::Bundle<R>,\n\n cycles: [f32; 2],\n\n time_start: f64,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/flowmap_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/flowmap_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let ps = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/flowmap_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/flowmap_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n", "file_path": "examples/flowmap/main.rs", "rank": 56, "score": 101978.49778247789 }, { "content": "//----------------------------------------\n\nstruct App<R: gfx::Resources>{\n\n bundle: pipe::Bundle<R>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use cgmath::{Point3, Vector3};\n\n use cgmath::{Transform, AffineMatrix3};\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/cube_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/cube_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let ps = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/cube_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/cube_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n", "file_path": "examples/cube/main.rs", "rank": 57, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources> {\n\n pso: gfx::PipelineState<R, pipe::Meta>,\n\n data: pipe::Data<R>,\n\n slice: gfx::Slice<R>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/terrain_120.glslv\"),\n\n glsl_150: include_bytes!(\"shader/terrain_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let ps = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/terrain_120.glslf\"),\n\n glsl_150: include_bytes!(\"shader/terrain_150.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n", "file_path": "examples/terrain/main.rs", "rank": 58, "score": 101978.49778247789 }, { "content": "struct App<R: gfx::Resources> {\n\n pso: gfx::PipelineState<R, pipe::Meta>,\n\n data: pipe::Data<R>,\n\n slice: gfx::Slice<R>,\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(mut factory: F, init: gfx_app::Init<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n let vs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/120.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vertex.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n\n let fs = gfx_app::shade::Source {\n\n glsl_120: include_bytes!(\"shader/120.glslf\"),\n\n hlsl_40: include_bytes!(\"data/pixel.fx\"),\n\n .. gfx_app::shade::Source::empty()\n\n };\n", "file_path": "examples/mipmap/main.rs", "rank": 59, "score": 101978.49778247789 }, { "content": "/// Return new main target views if the window resolution has changed from the old dimensions.\n\npub fn update_views_raw(window: &glutin::Window, old_dimensions: tex::Dimensions,\n\n color_format: format::Format, ds_format: format::Format)\n\n -> Option<(handle::RawRenderTargetView<R>, handle::RawDepthStencilView<R>)>\n\n{\n\n let dim = get_window_dimensions(window);\n\n if dim != old_dimensions {\n\n Some(gfx_device_gl::create_main_targets_raw(dim, color_format.0, ds_format.0))\n\n }else {\n\n None\n\n }\n\n}\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 60, "score": 101561.44079787881 }, { "content": "/// Create a texture, using the descriptor, assuming TexStorage* isn't available.\n\npub fn make_without_storage(gl: &gl::Gl, desc: &t::Descriptor, cty: ChannelType) ->\n\n Result<Texture, t::Error> {\n\n let format = NewFormat(desc.format, cty);\n\n let gl_format = match format_to_glfull(format) {\n\n Ok(f) => f as GLint,\n\n Err(_) => return Err(t::Error::Format(desc.format, Some(cty))),\n\n };\n\n let gl_pixel_format = format_to_glpixel(format);\n\n let gl_data_type = match format_to_gltype(format) {\n\n Ok(t) => t,\n\n Err(_) => return Err(t::Error::Format(desc.format, Some(cty))),\n\n };\n\n\n\n let fixed_loc = desc.bind.contains(SHADER_RESOURCE);\n\n make_widout_storage_impl(gl, desc.kind, gl_format, gl_pixel_format, gl_data_type,\n\n desc.levels, fixed_loc)\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 61, "score": 100160.50441279395 }, { "content": "/// A trait that statically links simple data types to\n\n/// base types of the shader constants.\n\npub trait BaseTyped {\n\n fn get_base_type() -> BaseType;\n\n}\n\n\n", "file_path": "src/core/src/shade.rs", "rank": 62, "score": 99988.56228336671 }, { "content": "/// A service trait used to get the raw data out of\n\n/// strong types. Not meant for public use.\n\npub trait Typed: Sized {\n\n /// The raw type behind the phantom.\n\n type Raw;\n\n /// Crete a new phantom from the raw type.\n\n fn new(raw: Self::Raw) -> Self;\n\n /// Get an internal reference to the raw type.\n\n fn raw(&self) -> &Self::Raw;\n\n}\n\n\n\n\n", "file_path": "src/core/src/factory.rs", "rank": 63, "score": 98478.45318721989 }, { "content": "/// A service trait implemented by the \"init\" structure of PSO.\n\npub trait PipelineInit {\n\n /// The associated \"meta\" struct.\n\n type Meta;\n\n /// Attempt to map a PSO descriptor to a give shader program,\n\n /// represented by `ProgramInfo`. Returns an instance of the\n\n /// \"meta\" struct upon successful mapping.\n\n fn link_to(&self, &mut Descriptor, &d::shade::ProgramInfo)\n\n -> Result<Self::Meta, InitError>;\n\n}\n\n\n", "file_path": "src/render/src/pso/mod.rs", "rank": 64, "score": 97462.82819999644 }, { "content": "/// A trait to be implemented by any struct having the layout described\n\n/// in the graphics API, like a vertex buffer.\n\npub trait Structure<F> {\n\n /// Get the layout of an element by name.\n\n fn query(&str) -> Option<Element<F>>;\n\n}\n\n\n", "file_path": "src/render/src/pso/buffer.rs", "rank": 65, "score": 95948.40584295208 }, { "content": "fn mock_buffer<T>(len: usize) -> Buffer<DummyResources, T> {\n\n use gfx_core::factory::Typed;\n\n let mut handler = Manager::new();\n\n let raw = handler.make_buffer((), BufferInfo {\n\n role: BufferRole::Vertex,\n\n usage: Usage::Const,\n\n size: mem::size_of::<T>() * len,\n\n stride: 0,\n\n bind: Bind::empty(),\n\n });\n\n Typed::new(raw)\n\n}\n\n\n", "file_path": "tests/handle.rs", "rank": 66, "score": 95450.23300188869 }, { "content": "pub fn init_texture_data(gl: &gl::Gl, name: Texture, desc: t::Descriptor, channel: ChannelType,\n\n data: &[&[u8]]) -> Result<(), t::Error> {\n\n let opt_slices = desc.kind.get_num_slices();\n\n let num_slices = opt_slices.unwrap_or(1) as usize;\n\n let num_mips = desc.levels as usize;\n\n let mut cube_faces = [None; 6];\n\n let faces: &[_] = if desc.kind.is_cube() {\n\n for (cf, orig) in cube_faces.iter_mut().zip(t::CUBE_FACES.iter()) {\n\n *cf = Some(*orig);\n\n }\n\n &cube_faces\n\n } else {\n\n &cube_faces[..1]\n\n };\n\n if data.len() != num_slices * faces.len() * num_mips {\n\n error!(\"Texture expects {} slices {} faces {} mips, given {} data chunks instead\",\n\n num_slices, faces.len(), num_mips, data.len());\n\n return Err(t::Error::Data(0))\n\n }\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 67, "score": 94914.6067785706 }, { "content": "#[allow(missing_docs)]\n\npub trait Factory<R: Resources> {\n\n /// Associated mapper type\n\n type Mapper: Clone + mapping::Raw;\n\n\n\n /// Returns the capabilities available to the specific API implementation\n\n fn get_capabilities(&self) -> &Capabilities;\n\n\n\n // resource creation\n\n fn create_buffer_raw(&mut self, BufferInfo) -> Result<handle::RawBuffer<R>, BufferError>;\n\n fn create_buffer_const_raw(&mut self, data: &[u8], stride: usize, BufferRole, Bind)\n\n -> Result<handle::RawBuffer<R>, BufferError>;\n\n fn create_buffer_const<T: Copy>(&mut self, data: &[T], role: BufferRole, bind: Bind) -> Result<handle::Buffer<R, T>, BufferError> {\n\n self.create_buffer_const_raw(cast_slice(data), mem::size_of::<T>(), role, bind)\n\n .map(|raw| Typed::new(raw))\n\n }\n\n fn create_buffer_dynamic<T>(&mut self, num: usize, role: BufferRole, bind: Bind)\n\n -> Result<handle::Buffer<R, T>, BufferError> {\n\n let stride = mem::size_of::<T>();\n\n let info = BufferInfo {\n\n role: role,\n", "file_path": "src/core/src/factory.rs", "rank": 68, "score": 94757.8470748289 }, { "content": "/// An ability of a channel type to be used for blended render targets.\n\npub trait BlendChannel: RenderChannel {}\n\n\n", "file_path": "src/core/src/format.rs", "rank": 69, "score": 93943.19113866251 }, { "content": "/// Extension to the Device that allows for submitting of commands\n\n/// around a fence\n\npub trait DeviceFence<R: Resources>: Device<Resources=R> where\n\n <Self as Device>::CommandBuffer: draw::CommandBuffer<R> {\n\n /// Submit a command buffer to the stream creating a fence\n\n /// the fence is signaled after the GPU has executed all commands\n\n /// in the buffer\n\n fn fenced_submit(&mut self, &mut Self::CommandBuffer,\n\n after: Option<handle::Fence<R>>) -> handle::Fence<R>;\n\n\n\n /// Wait on the supplied fence stalling the current thread until\n\n /// the fence is satisfied\n\n fn fence_wait(&mut self, fence: &handle::Fence<R>);\n\n}\n", "file_path": "src/core/src/lib.rs", "rank": 70, "score": 93909.50317047702 }, { "content": "#[allow(missing_docs)]\n\npub trait Builder<'a, R: Resources> {\n\n type RawMapping: Raw;\n\n\n\n fn map_readable<T: Copy>(&'a mut self, Self::RawMapping, usize) -> Readable<T, R, Self> where\n\n Self: Sized + Factory<R>;\n\n fn map_writable<T: Copy>(&'a mut self, Self::RawMapping, usize) -> Writable<T, R, Self> where\n\n Self: Sized + Factory<R>;\n\n fn map_read_write<T: Copy>(&'a mut self, Self::RawMapping, usize) -> RW<T, R, Self> where\n\n Self: Sized + Factory<R>;\n\n}\n\n\n\n\n\nimpl<'a, R: Resources, F: Factory<R>> Builder<'a, R> for F where\n\n F::Mapper: 'a\n\n{\n\n type RawMapping = F::Mapper;\n\n\n\n fn map_readable<T: Copy>(&'a mut self, map: F::Mapper,\n\n length: usize) -> Readable<T, R, Self> {\n\n Readable {\n", "file_path": "src/core/src/mapping.rs", "rank": 71, "score": 93526.30975590547 }, { "content": "#[allow(missing_docs)]\n\npub trait CommandBuffer<R: Resources> {\n\n /// Clone as an empty buffer\n\n fn clone_empty(&self) -> Self;\n\n /// Reset the command buffer contents, retain the allocated storage\n\n fn reset(&mut self);\n\n /// Bind a pipeline state object\n\n fn bind_pipeline_state(&mut self, R::PipelineStateObject);\n\n /// Bind a complete set of vertex buffers\n\n fn bind_vertex_buffers(&mut self, pso::VertexBufferSet<R>);\n\n /// Bind a complete set of constant buffers\n\n fn bind_constant_buffers(&mut self, &[pso::ConstantBufferParam<R>]);\n\n /// Bind a global constant\n\n fn bind_global_constant(&mut self, shade::Location, shade::UniformValue);\n\n /// Bind a complete set of shader resource views\n\n fn bind_resource_views(&mut self, &[pso::ResourceViewParam<R>]);\n\n /// Bind a complete set of unordered access views\n\n fn bind_unordered_views(&mut self, &[pso::UnorderedViewParam<R>]);\n\n /// Bind a complete set of samplers\n\n fn bind_samplers(&mut self, &[pso::SamplerParam<R>]);\n\n /// Bind a complete set of pixel targets, including multiple\n", "file_path": "src/core/src/draw.rs", "rank": 72, "score": 92606.48909772446 }, { "content": "/// The \"link\" logic portion of a PSO component.\n\n/// Defines the input data for the component.\n\npub trait DataLink<'a>: Sized {\n\n /// The assotiated \"init\" type - a member of the PSO \"init\" struct.\n\n type Init: 'a;\n\n /// Create a new empty data link.\n\n fn new() -> Self;\n\n /// Check if this link is actually used by the shader.\n\n fn is_active(&self) -> bool;\n\n /// Attempt to link with a vertex attribute.\n\n fn link_input(&mut self, _: &d::shade::AttributeVar, _: &Self::Init) ->\n\n Option<Result<d::pso::AttributeDesc, d::format::Format>> { None }\n\n /// Attempt to link with a constant buffer.\n\n fn link_constant_buffer(&mut self, _: &d::shade::ConstantBufferVar, _: &Self::Init) ->\n\n Option<Result<(), d::shade::ConstFormat>> { None }\n\n /// Attempt to link with a global constant.\n\n fn link_global_constant(&mut self, _: &d::shade::ConstVar, _: &Self::Init) ->\n\n Option<Result<(), d::shade::UniformValue>> { None }\n\n /// Attempt to link with an output render target (RTV).\n\n fn link_output(&mut self, _: &d::shade::OutputVar, _: &Self::Init) ->\n\n Option<Result<d::pso::ColorTargetDesc, d::format::Format>> { None }\n\n /// Attempt to link with a depth-stencil target (DSV).\n", "file_path": "src/render/src/pso/mod.rs", "rank": 73, "score": 92440.45716988144 }, { "content": "pub fn update_texture(context: *mut winapi::ID3D11DeviceContext, texture: &Texture, kind: tex::Kind,\n\n face: Option<tex::CubeFace>, data: &[u8], image: &tex::RawImageInfo) {\n\n use gfx_core::tex::CubeFace::*;\n\n use winapi::UINT;\n\n\n\n let array_slice = match face {\n\n Some(PosX) => 0,\n\n Some(NegX) => 1,\n\n Some(PosY) => 2,\n\n Some(NegY) => 3,\n\n Some(PosZ) => 4,\n\n Some(NegZ) => 5,\n\n None => 0,\n\n };\n\n let num_mipmap_levels = 1; //TODO\n\n let subres = array_slice * num_mipmap_levels + (image.mipmap as UINT);\n\n let dst_resource = texture.to_resource();\n\n\n\n match texture.1 {\n\n Usage::Const | Usage::CpuOnly(MapAccess::Readable) => {\n", "file_path": "src/backend/dx11/src/execute.rs", "rank": 74, "score": 92430.92861724278 }, { "content": "pub trait Application<R: gfx::Resources>: Sized {\n\n fn new<F: gfx::Factory<R>>(F, Init<R>) -> Self;\n\n fn render<C: gfx::CommandBuffer<R>>(&mut self, &mut gfx::Encoder<R, C>);\n\n #[cfg(target_os = \"windows\")]\n\n fn launch_default(name: &str) where WrapD3D11<Self>: ApplicationD3D11 {\n\n WrapD3D11::<Self>::launch(name, DEFAULT_CONFIG);\n\n }\n\n #[cfg(not(target_os = \"windows\"))]\n\n fn launch_default(name: &str) where WrapGL2<Self>: ApplicationGL2 {\n\n WrapGL2::<Self>::launch(name, DEFAULT_CONFIG);\n\n }\n\n}\n\n\n\npub struct Wrap<R: gfx::Resources, C: gfx::CommandBuffer<R>, A>{\n\n encoder: gfx::Encoder<R, C>,\n\n app: A,\n\n}\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub type D3D11CommandBuffer = gfx_device_dx11::CommandBuffer<gfx_device_dx11::DeferredContext>;\n", "file_path": "src/lib.rs", "rank": 75, "score": 92407.35217688364 }, { "content": "/// Update the internal dimensions of the main framebuffer targets. Generic version over the format.\n\npub fn update_views<Cf, Df>(window: &glutin::Window, color_view: &mut handle::RenderTargetView<R, Cf>,\n\n ds_view: &mut handle::DepthStencilView<R, Df>)\n\nwhere\n\n Cf: format::RenderFormat,\n\n Df: format::DepthFormat,\n\n{\n\n use gfx_core::factory::Typed;\n\n let dim = color_view.get_dimensions();\n\n assert_eq!(dim, ds_view.get_dimensions());\n\n if let Some((cv, dv)) = update_views_raw(window, dim, Cf::get_format(), Df::get_format()) {\n\n *color_view = Typed::new(cv);\n\n *ds_view = Typed::new(dv);\n\n }\n\n}\n\n\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 76, "score": 89452.04091625912 }, { "content": "fn match_attribute(_: &shade::AttributeVar, _: Format) -> bool {\n\n true //TODO\n\n}\n\n\n\nimpl<'a,\n\n T: Structure<Format>,\n\n I: AsRef<[()]> + Default,\n\n> DataLink<'a> for VertexBufferCommon<T, I> {\n\n type Init = ();\n\n fn new() -> Self {\n\n VertexBufferCommon(RawVertexBuffer(0), PhantomData)\n\n }\n\n fn is_active(&self) -> bool {\n\n self.0.is_active()\n\n }\n\n fn link_input(&mut self, at: &shade::AttributeVar, _: &Self::Init) ->\n\n Option<Result<pso::AttributeDesc, Format>> {\n\n T::query(&at.name).map(|el| {\n\n let rate = <I as Default>::default().as_ref().len();\n\n self.0.link(at, el, rate as InstanceRate)\n", "file_path": "src/render/src/pso/buffer.rs", "rank": 77, "score": 87737.5884788399 }, { "content": "fn map_stencil_mask<F>(dsi: &pso::DepthStencilInfo, name: &str, accessor: F) -> UINT8\n\n where F: Fn(&state::StencilSide) -> UINT8 {\n\n match (dsi.front, dsi.back) {\n\n (Some(ref front), Some(ref back)) if accessor(front) != accessor(back) => {\n\n error!(\"Different {} masks on stencil front ({}) and back ({}) are not supported\",\n\n name, accessor(front), accessor(back));\n\n accessor(front)\n\n },\n\n (Some(ref front), _) => accessor(front),\n\n (_, Some(ref back)) => accessor(back),\n\n (None, None) => 0,\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 78, "score": 86337.54583314052 }, { "content": "/// Factory extension trait\n\npub trait FactoryExt<R: Resources>: Factory<R> {\n\n /// Create a vertex buffer with an associated slice.\n\n fn create_vertex_buffer<T>(&mut self, data: &[T])\n\n -> (handle::Buffer<R, T>, Slice<R>) where\n\n T: Copy + pso::buffer::Structure<format::Format>\n\n {\n\n let nv = data.len();\n\n //debug_assert!(nv <= self.get_capabilities().max_vertex_count);\n\n let buf = self.create_buffer_const(data, BufferRole::Vertex, Bind::empty())\n\n .unwrap();\n\n (buf, Slice {\n\n start: 0,\n\n end: nv as VertexCount,\n\n instances: None,\n\n kind: SliceKind::Vertex,\n\n })\n\n }\n\n\n\n /// Create a vertex buffer with an index, returned by a slice.\n\n fn create_vertex_buffer_indexed<V, I>(&mut self, vd: &[V], id: I)\n", "file_path": "src/render/src/factory.rs", "rank": 79, "score": 86331.8853725467 }, { "content": "pub fn kind_to_gl(kind: t::Kind) -> GLenum {\n\n match kind {\n\n t::Kind::D1(_) => gl::TEXTURE_1D,\n\n t::Kind::D1Array(_, _) => gl::TEXTURE_1D_ARRAY,\n\n t::Kind::D2(_, _, t::AaMode::Single) => gl::TEXTURE_2D,\n\n t::Kind::D2(_, _, _) => gl::TEXTURE_2D_MULTISAMPLE,\n\n t::Kind::D2Array(_, _, _, t::AaMode::Single) => gl::TEXTURE_2D_ARRAY,\n\n t::Kind::D2Array(_, _, _, _) => gl::TEXTURE_2D_MULTISAMPLE_ARRAY,\n\n t::Kind::D3(_, _, _) => gl::TEXTURE_3D,\n\n t::Kind::Cube(_) => gl::TEXTURE_CUBE_MAP,\n\n t::Kind::CubeArray(_, _) => gl::TEXTURE_CUBE_MAP_ARRAY,\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 80, "score": 84357.13487054995 }, { "content": "pub fn update_texture(gl: &gl::Gl, name: Texture,\n\n kind: t::Kind, face: Option<t::CubeFace>,\n\n img: &t::RawImageInfo, slice: &[u8])\n\n -> Result<(), t::Error> {\n\n //TODO: check size\n\n let data = slice.as_ptr() as *const GLvoid;\n\n let pixel_format = format_to_glpixel(img.format);\n\n let data_type = match format_to_gltype(img.format) {\n\n Ok(t) => t,\n\n Err(_) => return Err(t::Error::Format(img.format.0, Some(img.format.1))),\n\n };\n\n\n\n let target = kind_to_gl(kind);\n\n unsafe { gl.BindTexture(target, name) };\n\n\n\n let target = kind_face_to_gl(kind, face);\n\n update_texture_impl(gl, kind, target, pixel_format, data_type, img, data)\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 81, "score": 81646.09159068881 }, { "content": "pub fn map_comparison(cmp: Comparison) -> gl::types::GLenum {\n\n match cmp {\n\n Comparison::Never => gl::NEVER,\n\n Comparison::Less => gl::LESS,\n\n Comparison::LessEqual => gl::LEQUAL,\n\n Comparison::Equal => gl::EQUAL,\n\n Comparison::GreaterEqual => gl::GEQUAL,\n\n Comparison::Greater => gl::GREATER,\n\n Comparison::NotEqual => gl::NOTEQUAL,\n\n Comparison::Always => gl::ALWAYS,\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/state.rs", "rank": 82, "score": 80262.42110691888 }, { "content": "/// Create a texture, assuming TexStorage is available.\n\nfn make_with_storage_impl(gl: &gl::Gl, kind: t::Kind, format: GLenum,\n\n levels: t::Level, fixed_sample_locations: bool)\n\n -> Result<Texture, t::Error> {\n\n use std::cmp::max;\n\n\n\n fn min(a: u8, b: u8) -> GLint {\n\n ::std::cmp::min(a, b) as GLint\n\n }\n\n fn mip_level1(w: u16) -> u8 {\n\n ((w as f32).log2() + 1.0) as u8\n\n }\n\n fn mip_level2(w: u16, h: u16) -> u8 {\n\n ((max(w, h) as f32).log2() + 1.0) as u8\n\n }\n\n fn mip_level3(w: u16, h: u16, d: u16) -> u8 {\n\n ((max(w, max(h, d)) as f32).log2() + 1.0) as u8\n\n }\n\n\n\n let (name, target) = make_texture(gl, kind);\n\n match kind {\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 83, "score": 79319.83866839789 }, { "content": "fn make_surface_impl(gl: &gl::Gl, format: GLenum, dim: t::Dimensions)\n\n -> Result<Surface, ()> {\n\n let mut name = 0 as GLuint;\n\n unsafe {\n\n gl.GenRenderbuffers(1, &mut name);\n\n }\n\n\n\n let target = gl::RENDERBUFFER;\n\n unsafe {\n\n gl.BindRenderbuffer(target, name);\n\n }\n\n match dim.3 {\n\n t::AaMode::Single => unsafe {\n\n gl.RenderbufferStorage(\n\n target,\n\n format,\n\n dim.0 as GLsizei,\n\n dim.1 as GLsizei\n\n );\n\n },\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 84, "score": 79315.2122381773 }, { "content": "pub fn bind_draw_color_buffers(gl: &gl::Gl, mask: usize) {\n\n let attachments = [\n\n gl::COLOR_ATTACHMENT0, gl::COLOR_ATTACHMENT1, gl::COLOR_ATTACHMENT2,\n\n gl::COLOR_ATTACHMENT3, gl::COLOR_ATTACHMENT4, gl::COLOR_ATTACHMENT5,\n\n gl::COLOR_ATTACHMENT6, gl::COLOR_ATTACHMENT7, gl::COLOR_ATTACHMENT8,\n\n gl::COLOR_ATTACHMENT9, gl::COLOR_ATTACHMENT10, gl::COLOR_ATTACHMENT11,\n\n gl::COLOR_ATTACHMENT12, gl::COLOR_ATTACHMENT13, gl::COLOR_ATTACHMENT14,\n\n gl::COLOR_ATTACHMENT15];\n\n let mut targets = [0; MAX_COLOR_TARGETS];\n\n let mut count = 0;\n\n let mut i = 0;\n\n while mask >> i != 0 {\n\n if mask & (1<<i) != 0 {\n\n targets[count] = attachments[i];\n\n count += 1;\n\n }\n\n i += 1;\n\n }\n\n unsafe { gl.DrawBuffers(count as gl::types::GLint, targets.as_ptr()) };\n\n}\n\n\n", "file_path": "src/backend/gl/src/state.rs", "rank": 85, "score": 78905.86455785582 }, { "content": "pub fn map_dsv_flags(dsf: DepthStencilFlags) -> D3D11_DSV_FLAG {\n\n use gfx_core::tex as t;\n\n let mut out = D3D11_DSV_FLAG(0);\n\n if dsf.contains(t::RO_DEPTH) {\n\n out = out | D3D11_DSV_READ_ONLY_DEPTH;\n\n }\n\n if dsf.contains(t::RO_STENCIL) {\n\n out = out | D3D11_DSV_READ_ONLY_STENCIL;\n\n }\n\n out\n\n}\n", "file_path": "src/backend/dx11/src/data.rs", "rank": 86, "score": 78199.46116134305 }, { "content": "\n\n\n\nimpl<'a, T: format::DepthFormat> DataLink<'a> for DepthTarget<T> {\n\n type Init = state::Depth;\n\n fn new() -> Self { DepthTarget(PhantomData) }\n\n fn is_active(&self) -> bool { true }\n\n fn link_depth_stencil(&mut self, init: &Self::Init) -> Option<pso::DepthStencilDesc> {\n\n Some((T::get_format().0, (*init).into()))\n\n }\n\n}\n\n\n\nimpl<R: Resources, T> DataBind<R> for DepthTarget<T> {\n\n type Data = handle::DepthStencilView<R, T>;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n\n let dsv = data.raw();\n\n out.pixel_targets.add_depth_stencil(man.ref_dsv(dsv), true, false, dsv.get_dimensions());\n\n }\n\n}\n\n\n\nimpl<'a, T: format::StencilFormat> DataLink<'a> for StencilTarget<T> {\n", "file_path": "src/render/src/pso/target.rs", "rank": 87, "score": 76977.94463189536 }, { "content": " type Init = state::Stencil;\n\n fn new() -> Self { StencilTarget(PhantomData) }\n\n fn is_active(&self) -> bool { true }\n\n fn link_depth_stencil(&mut self, init: &Self::Init) -> Option<pso::DepthStencilDesc> {\n\n Some((T::get_format().0, (*init).into()))\n\n }\n\n}\n\n\n\nimpl<R: Resources, T> DataBind<R> for StencilTarget<T> {\n\n type Data = (handle::DepthStencilView<R, T>, (target::Stencil, target::Stencil));\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n\n let dsv = data.0.raw();\n\n out.pixel_targets.add_depth_stencil(man.ref_dsv(dsv), false, true, dsv.get_dimensions());\n\n out.ref_values.stencil = data.1;\n\n }\n\n}\n\n\n\nimpl<'a, T: format::DepthStencilFormat> DataLink<'a> for DepthStencilTarget<T> {\n\n type Init = (state::Depth, state::Stencil);\n\n fn new() -> Self { DepthStencilTarget(PhantomData) }\n", "file_path": "src/render/src/pso/target.rs", "rank": 88, "score": 76977.60283389188 }, { "content": " fn is_active(&self) -> bool { true }\n\n fn link_depth_stencil(&mut self, init: &Self::Init) -> Option<pso::DepthStencilDesc> {\n\n Some((T::get_format().0, (*init).into()))\n\n }\n\n}\n\n\n\nimpl<R: Resources, T> DataBind<R> for DepthStencilTarget<T> {\n\n type Data = (handle::DepthStencilView<R, T>, (target::Stencil, target::Stencil));\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n\n let dsv = data.0.raw();\n\n out.pixel_targets.add_depth_stencil(man.ref_dsv(dsv), true, true, dsv.get_dimensions());\n\n out.ref_values.stencil = data.1;\n\n }\n\n}\n\n\n\n\n\nimpl<'a> DataLink<'a> for Scissor {\n\n type Init = ();\n\n fn new() -> Self { Scissor(false) }\n\n fn is_active(&self) -> bool { self.0 }\n", "file_path": "src/render/src/pso/target.rs", "rank": 89, "score": 76976.88668928937 }, { "content": "// Copyright 2015 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Render target components for the PSO macro.\n\n\n\nuse std::marker::PhantomData;\n\nuse gfx_core::{ColorSlot, Resources};\n\nuse gfx_core::{format, handle, pso, state, target};\n\nuse gfx_core::factory::Typed;\n", "file_path": "src/render/src/pso/target.rs", "rank": 90, "score": 76975.91305062098 }, { "content": "use gfx_core::shade::OutputVar;\n\nuse super::{DataLink, DataBind, RawDataSet};\n\n\n\n/// Render target component. Typically points to a color-formatted texture.\n\n/// - init: `&str` = name of the target\n\n/// - data: `RenderTargetView<T>`\n\npub struct RenderTarget<T>(Option<ColorSlot>, PhantomData<T>);\n\n/// Render target component with active blending mode.\n\n/// - init: (`&str`, `ColorMask`, `Blend` = blending state)\n\n/// - data: `RenderTargetView<T>`\n\npub struct BlendTarget<T>(RawRenderTarget, PhantomData<T>);\n\n/// Raw (untyped) render target component with optional blending.\n\n/// - init: (`&str`, `Format`, `ColorMask`, `Option<Blend>`)\n\n/// - data: `RawRenderTargetView`\n\npub struct RawRenderTarget(Option<ColorSlot>);\n\n/// Depth target component.\n\n/// - init: `Depth` = depth state\n\n/// - data: `DepthStencilView<T>`\n\npub struct DepthTarget<T>(PhantomData<T>);\n\n/// Stencil target component.\n", "file_path": "src/render/src/pso/target.rs", "rank": 91, "score": 76973.90917576682 }, { "content": " RenderTarget(None, PhantomData)\n\n }\n\n fn is_active(&self) -> bool {\n\n self.0.is_some()\n\n }\n\n fn link_output(&mut self, out: &OutputVar, init: &Self::Init) ->\n\n Option<Result<pso::ColorTargetDesc, format::Format>> {\n\n if out.name.is_empty() || &out.name == init {\n\n self.0 = Some(out.slot);\n\n let desc = (T::get_format(), state::MASK_ALL.into());\n\n Some(Ok(desc))\n\n }else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<R: Resources, T> DataBind<R> for RenderTarget<T> {\n\n type Data = handle::RenderTargetView<R, T>;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n", "file_path": "src/render/src/pso/target.rs", "rank": 92, "score": 76973.88406960637 }, { "content": "\n\nimpl<R: Resources, T> DataBind<R> for BlendTarget<T> {\n\n type Data = handle::RenderTargetView<R, T>;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n\n self.0.bind_to(out, data.raw(), man)\n\n }\n\n}\n\n\n\nimpl<'a> DataLink<'a> for RawRenderTarget {\n\n type Init = (&'a str, format::Format, state::ColorMask, Option<state::Blend>);\n\n fn new() -> Self {\n\n RawRenderTarget(None)\n\n }\n\n fn is_active(&self) -> bool {\n\n self.0.is_some()\n\n }\n\n fn link_output(&mut self, out: &OutputVar, init: &Self::Init) ->\n\n Option<Result<pso::ColorTargetDesc, format::Format>> {\n\n if out.name.is_empty() || &out.name == init.0 {\n\n self.0 = Some(out.slot);\n", "file_path": "src/render/src/pso/target.rs", "rank": 93, "score": 76973.24585817968 }, { "content": "/// - init: `Stencil` = stencil state\n\n/// - data: (`DepthStencilView<T>`, `(front, back)` = stencil reference values)\n\npub struct StencilTarget<T>(PhantomData<T>);\n\n/// Depth + stencil target component.\n\n/// - init: (`Depth` = depth state, `Stencil` = stencil state)\n\n/// - data: (`DepthStencilView<T>`, `(front, back)` = stencil reference values)\n\npub struct DepthStencilTarget<T>(PhantomData<T>);\n\n/// Scissor component. Sets up the scissor test for rendering.\n\n/// - init: `()`\n\n/// - data: `Rect` = target area\n\npub struct Scissor(bool);\n\n/// Blend reference component. Sets up the reference color for blending.\n\n/// - init: `()`\n\n/// - data: `ColorValue`\n\npub struct BlendRef;\n\n\n\n\n\nimpl<'a, T: format::RenderFormat> DataLink<'a> for RenderTarget<T> {\n\n type Init = &'a str;\n\n fn new() -> Self {\n", "file_path": "src/render/src/pso/target.rs", "rank": 94, "score": 76972.71275281448 }, { "content": " let desc = (init.1, pso::ColorInfo {\n\n mask: init.2,\n\n color: init.3.map(|b| b.color),\n\n alpha: init.3.map(|b| b.alpha),\n\n });\n\n Some(Ok(desc))\n\n }else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<R: Resources> DataBind<R> for RawRenderTarget {\n\n type Data = handle::RawRenderTargetView<R>;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, man: &mut handle::Manager<R>) {\n\n if let Some(slot) = self.0 {\n\n out.pixel_targets.add_color(slot, man.ref_rtv(data), data.get_dimensions());\n\n }\n\n }\n\n}\n", "file_path": "src/render/src/pso/target.rs", "rank": 95, "score": 76969.94032882625 }, { "content": " if let Some(slot) = self.0 {\n\n out.pixel_targets.add_color(slot, man.ref_rtv(data.raw()), data.raw().get_dimensions());\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl<'a, T: format::BlendFormat> DataLink<'a> for BlendTarget<T> {\n\n type Init = (&'a str, state::ColorMask, state::Blend);\n\n fn new() -> Self {\n\n BlendTarget(RawRenderTarget(None), PhantomData)\n\n }\n\n fn is_active(&self) -> bool {\n\n self.0.is_active()\n\n }\n\n fn link_output(&mut self, out: &OutputVar, init: &Self::Init) ->\n\n Option<Result<pso::ColorTargetDesc, format::Format>> {\n\n self.0.link_output(out, &(init.0, T::get_format(), init.1, Some(init.2)))\n\n }\n\n}\n", "file_path": "src/render/src/pso/target.rs", "rank": 96, "score": 76967.6652316891 }, { "content": " fn link_scissor(&mut self) -> bool { self.0 = true; true }\n\n}\n\n\n\nimpl<R: Resources> DataBind<R> for Scissor {\n\n type Data = target::Rect;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, _: &mut handle::Manager<R>) {\n\n out.scissor = *data;\n\n }\n\n}\n\n\n\nimpl<'a> DataLink<'a> for BlendRef {\n\n type Init = ();\n\n fn new() -> Self { BlendRef }\n\n fn is_active(&self) -> bool { true }\n\n}\n\n\n\nimpl<R: Resources> DataBind<R> for BlendRef {\n\n type Data = target::ColorValue;\n\n fn bind_to(&self, out: &mut RawDataSet<R>, data: &Self::Data, _: &mut handle::Manager<R>) {\n\n out.ref_values.blend = *data;\n\n }\n\n}\n", "file_path": "src/render/src/pso/target.rs", "rank": 97, "score": 76966.78718094558 }, { "content": "struct Harness {\n\n start: f64,\n\n num_frames: f64,\n\n}\n\n\n\nimpl Harness {\n\n fn new() -> Harness {\n\n Harness {\n\n start: time::precise_time_s(),\n\n num_frames: 0.0,\n\n }\n\n }\n\n fn bump(&mut self) {\n\n self.num_frames += 1.0;\n\n }\n\n}\n\n\n\nimpl Drop for Harness {\n\n fn drop(&mut self) {\n\n let time_end = time::precise_time_s();\n\n println!(\"Avg frame time: {} ms\",\n\n (time_end - self.start) * 1000.0 / self.num_frames\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 98, "score": 76902.02063688915 }, { "content": "// Copyright 2015 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Resource components for PSO macro.\n\n\n\nuse std::marker::PhantomData;\n\nuse gfx_core::{ResourceViewSlot, UnorderedViewSlot, SamplerSlot, Resources};\n\nuse gfx_core::{handle, pso, shade};\n\nuse gfx_core::factory::Typed;\n", "file_path": "src/render/src/pso/resource.rs", "rank": 99, "score": 76806.46634734579 } ]
Rust
website-api-tester/src/main.rs
sovrin-foundation/token-website
9e870c49a5e99b5a6d072cc585d229ce0257c0aa
#[macro_use] extern crate trace_macros; use isahc::prelude::*; use serde::{Serialize, Deserialize}; use sha2::Digest; use sodiumoxide::crypto::sign::{ sign_detached, gen_keypair, ed25519::SecretKey }; use structopt::StructOpt; use web_view::*; #[derive(Debug, StructOpt)] #[structopt( name = "basic", version = "0.1", about = "Sovrin Foundation Token Website" )] struct Opt { #[structopt(subcommand)] pub cmd: Command } #[derive(Debug, StructOpt)] enum Command { #[structopt(name = "sign")] Sign { #[structopt(short, long)] key: Option<String>, #[structopt(name = "TOKEN")] token: String } } #[derive(Serialize)] struct PaymentAddressChallengeReponse { address: String, challenge: String, signature: String } #[derive(Debug, Clone, Serialize, Deserialize)] struct WebCmd { consents: String, data: String, path: String, verb: String, url: String, } const MAIN_PAGE_1: &str = r##" <html> <head lang="en"> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous"> <style> .table-row { margin: 0px 0px 15px 0px; } </style> </head> <body> <div class="container-fluid"> <div class="row"> <div class="col-md-12">&nbsp;</div> </div> <div class="row table-row"> <div class="col-md-12"><span id="error" style="color:red;"></span></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>URL</strong></div> <div class="col-md-10"><input id="test_url" type="url" placeholder="https://127.0.0.1:8000/api/v1" width="400px"></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Paths</strong></div> <div class="col-md-4"><select id="paths"> <option value="countries">countries</option> <option value="consents">consents</option> <option value="payment_address_challenge">payment_address_challenge</option> </select></div> <div class="col-md-6"><select id="consent_countries"> "##; const MAIN_PAGE_2: &str = r##" </select></div> </div> <div class="row table-row" style="margin:0px 0px 20px 0px;"> <div class="col-md-2"><strong>Commands</strong></div> <div class="col-md-2"><select id="verbs"> <option value="get">GET</option> </select></div> <div class="col-md-8"><p><strong id="verb_data_label"></strong></p><textarea id="verb_data" type="text" width="100%" height="100%"></textarea></div> </div> <div class="row table-row"> <div class="col-md-12"><button onclick="return perform_action();" width="30px" heigth="30px">Send</button></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Request</strong></div> <div class="col-md-10"><textarea id="request" readonly width="100%" height="100%"></textarea></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Response</strong></div> <div class="col-md-10"><textarea id="response" readonly width="100%" height="100%"></textarea></div> </div> </div> <script src="https://code.jquery.com/jquery-3.4.1.slim.min.js" integrity="sha384-J6qa4849blE2+poT4WnyKhv5vZF5SrPo0iEjwBvKU7imGFAV0wwj1yYfoRSJoZ+n" crossorigin="anonymous"></script> <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/umd/popper.min.js" integrity="sha384-Q6E9RHvbIyZFJoft+2mJbHaEWldlvI9IOYy5n3zV9zzTtmI3UksdQRVvoxMfooAo" crossorigin="anonymous"></script> <script src="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/js/bootstrap.min.js" integrity="sha384-wfSDF2E50Y2D1uUdj0O3uMBJnjuUD4Ih7YwaYd1iqfktj0Uod8GCExl3Og8ifwB6" crossorigin="anonymous"></script> <script type="text/javascript"> function perform_action() { var o = new Object(); o.url = $('#test_url').val(); o.path = $('#paths option:selected').val(); o.verb = $('#verbs option:selected').text(); o.consents = $('#consent_countries option:selected').val(); switch (o.verb) { case "post": case "put": o.data = $('#verb_data').val(); break; default: o.data = ""; break; } window.external.invoke(JSON.stringify(o)); } function set_error(message) { $('#error').html(message); } function result_returned(request, response) { $('#request').val(request); $('#response').val(response); } $(document).ready(function() { $('#verb_data').hide(); $('#consent_countries').hide(); $('#verbs').change(function() { var selectedVerb = $(this).children("option:selected").val(); switch (selectedVerb) { case "get": case "delete": $('#verb_data').hide(); break; case "post": case "put": $('#verb_data').show(); $('#verb_data_label').html(selectedVerb.toUpperCase() + " Body Data"); break; default: $('#verb_data').hide(); break; } }); $('#paths').change(function() { var selectedPath = $(this).children("option:selected").val(); switch (selectedPath) { case "countries": $('#verbs').empty().append('<option selected="selected" value="get">GET</option>'); $('#consent_countries').hide(); break; case "consents": $('#verbs').empty().append('<option selected="selected" value="get">GET</option>'); $('#consent_countries').show(); break; case "payment_address_challenge": $('#verbs').empty().append('<option selected="selected" value="get">GET</option><option value="post">POST</option>'); $('#consent_countries').hide(); break; default: $('#verbs').empty(); $('#consent_countries').hide(); break; } }); }); </script> </body> </html> "##; fn main() { let opt = Opt::from_args(); match opt.cmd { Command::Sign { key, token } => { let (pk, sk) = match key { Some(k) => { let k1 = bs58::decode(k).into_vec().unwrap(); let sk1 = SecretKey::from_slice(k1.as_slice()).unwrap(); let pk1 = sk1.public_key(); (pk1, sk1) }, None => gen_keypair() }; let mut sha = sha2::Sha256::new(); let challenge = base64_url::decode(&token).unwrap(); sha.input(format!("\x6DSovrin Signed Message:\nLength: {}\n", challenge.len()).as_bytes()); sha.input(challenge.as_slice()); let data = sha.result(); let signature = sign_detached(data.as_slice(), &sk); let response = PaymentAddressChallengeReponse { address: format!("pay:sov:{}", bs58::encode(&pk[..]).with_check().into_string()), challenge: token, signature: base64_url::encode(&signature[..]) }; println!("key = {}", bs58::encode(sk).with_check().into_string()); println!("response = {}", serde_json::to_string(&response).unwrap()); } } }
#[macro_use] extern crate trace_macros; use isahc::prelude::*; use serde::{Serialize, Deserialize}; use sha2::Digest; use sodiumoxide::crypto::sign::{ sign_detached, gen_keypair, ed25519::SecretKey }; use structopt::StructOpt; use web_view::*; #[derive(Debug, StructOpt)] #[structopt( name = "basic", version = "0.1", about = "Sovrin Foundation Token Website" )] struct Opt { #[structopt(subcommand)] pub cmd: Command } #[derive(Debug, StructOpt)] enum Command { #[structopt(name = "sign")] Sign { #[structopt(short, long)] key: Option<String>, #[structopt(name = "TOKEN")] token: String } } #[derive(Serialize)] struct PaymentAddressChallengeReponse { address: String, challenge: String, signature: String } #[derive(Debug, Clone, Serialize, Deserialize)] struct WebCmd { consents: String, data: String, path: String, verb: String, url: String, } const MAIN_PAGE_1: &str = r##" <html> <head lang="en"> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous"> <style> .table-row { margin: 0px 0px 15px 0px; } </style> </head> <body> <div class="container-fluid"> <div class="row"> <div class="col-md-12">&nbsp;</div> </div> <div class="row table-row"> <div class="col-md-12"><span id="error" style="color:red;"></span></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>URL</strong></div> <div class="col-md-10"><input id="test_url" type="url" placeholder="https://127.0.0.1:8000/api/v1" width="400px"></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Paths</strong></div> <div class="col-md-4"><select id="paths"> <option value="countries">countries</option> <option value="consents">consents</option> <option value="payment_address_challenge">payment_address_challenge</option> </select></div> <div class="col-md-6"><select id="consent_countries"> "##; const MAIN_PAGE_2: &str = r##" </select></div> </div> <div class="row table-row" style="margin:0px 0px 20px 0px;"> <div class="col-md-2"><strong>Commands</strong></div> <div class="col-md-2"><select id="verbs"> <option value="get">GET</option> </select></div> <div class="col-md-8"><p><strong id="verb_data_label"></strong></p><textarea id="verb_data" type="text" width="100%" height="100%"></textarea></div> </div> <div class="row table-row"> <div class="col-md-12"><button onclick="return perform_action();" width="30px" heigth="30px">Send</button></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Request</strong></div> <div class="col-md-10"><textarea id="request" readonly width="100%" height="100%"></textarea></div> </div> <div class="row table-row"> <div class="col-md-2"><strong>Response</strong></div> <div class="col-md-10"><textarea id="response" readonly width="100%" height="100%"></textarea></div> </div> </div> <script src="https://code.jquery.com/jquery-3.4.1.slim.min.js" integrity="sha384-J6qa4849blE2+poT4WnyKhv5vZF5SrPo0iEjwBvKU7imGFAV0wwj1yYfoRSJoZ+n" crossorigin="anonymous"></script> <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/umd/popper.min.js" integrity="sha384-Q6E9RHvbIyZFJoft+2mJbHaEWldlvI9IOYy5n3zV9zzTtmI3UksdQRVvoxMfooAo" crossorigin="anonymous"></script> <script src="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/js/bootstrap.min.js" integrity="sha384-wfSDF2E50Y2D1uUdj0O3uMBJnjuUD4Ih7YwaYd1iqfktj0Uod8GCExl3Og8ifwB6" crossorigin="anonymous"></script> <script type="text/javascript"> function perform_action() { var o = new Object(); o.url = $('#test_url').val(); o.path = $('#paths option:selected').val(); o.verb = $('#verbs option:selected').text(); o.consents = $('#consent_countries option:selected').val(); switch (o.verb) { case "post": case "put": o.data = $('#verb_data').val(); break; default: o.data = ""; break; } window.external.invoke(JSON.stringify(o)); } function set_error(message) { $('#error').html(message); } function result_returned(request, response) { $('#request').val(request); $('#response').val(response); } $(document).ready(function() { $('#verb_data').hide(); $('#consent_countries').hide(); $('#verbs').change(function() { var selectedVerb = $(this).children("option:selected").val(); switch (selectedVerb) { case "get": case "delete": $('#verb_data').hide(); break; case "post": case "put": $('#verb_data').show(); $('#verb_data_label').html(selectedVerb.toUpperCase() + " Body Data"); break; default: $('#verb_data').hide(); break; } }); $('#paths').change(function() { var selectedPath = $(this).children("option:selected").val(); switch (selectedPath) { case "countries": $('#verbs').empty().append('<option selected="selected" value="get">GET</option>'); $('#consent_countries').hide(); break; case "consents": $('#verbs').empty().append('<option selected="selected" value="get">GET</option>'); $('#consent_countries').show(); break; case "payment_address_challenge": $('#verbs').empty().append('<option selected="selected" value="get">GET</option><option value="post">POST</option>'); $('#consent_countries').hide(); break; default: $('#verbs').empty(); $('#consent_countries').hide(); break; } }); }); </script> </body> </html> "##; fn main() {
let opt = Opt::from_args(); match opt.cmd { Command::Sign { key, token } => { let (pk, sk) = match key { Some(k) => { let k1 = bs58::decode(k).into_vec().unwrap(); let sk1 = SecretKey::from_slice(k1.as_slice()).unwrap(); let pk1 = sk1.public_key(); (pk1, sk1) }, None => gen_keypair() }; let mut sha = sha2::Sha256::new(); let challenge = base64_url::decode(&token).unwrap(); sha.input(format!("\x6DSovrin Signed Message:\nLength: {}\n", challenge.len()).as_bytes()); sha.input(challenge.as_slice()); let data = sha.result(); let signature = sign_detached(data.as_slice(), &sk); let response = PaymentAddressChallengeReponse { address: format!("pay:sov:{}", bs58::encode(&pk[..]).with_check().into_string()), challenge: token, signature: base64_url::encode(&signature[..]) }; println!("key = {}", bs58::encode(sk).with_check().into_string()); println!("response = {}", serde_json::to_string(&response).unwrap()); } } }
function_block-function_prefix_line
[ { "content": "fn prompt_for_value(value_name: &str) -> String {\n\n loop {\n\n match rpassword::read_password_from_tty(Some(format!(\"Enter {}: \", value_name).as_str())) {\n\n Ok(v) => {\n\n if v.len() > 0 {\n\n return v;\n\n } else {\n\n eprintln!(\"{} cannot be empty.\", value_name);\n\n }\n\n },\n\n Err(e) => {\n\n panic ! (\"An error occurred while reading {}: {}\", value_name, e);\n\n }\n\n };\n\n }\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 0, "score": 129728.19778717692 }, { "content": "fn get_trulioo_secret(key_name: &str, secret_backend: Option<SecretBackend>) -> Vec<u8> {\n\n let apikey;\n\n if let Some(backend) = secret_backend {\n\n match backend {\n\n SecretBackend::OsKeyRing => {\n\n let mut keyring = get_os_keyring(TRULIOO_SERVICE).unwrap();\n\n apikey = keyring.get_secret(key_name).unwrap().as_slice().to_vec();\n\n },\n\n _ => {\n\n panic!(\"{} not handled\", backend);\n\n }\n\n }\n\n } else {\n\n panic!(\"trulioo name cannot be used without a secret backend\");\n\n }\n\n apikey\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 1, "score": 127025.22620366236 }, { "content": "fn get_config(opt: &Opt) -> Config {\n\n let mut config: Config;\n\n match &opt.config {\n\n Some(c) => {\n\n if !c.exists() || !c.is_file() {\n\n panic!(\"The config file does not exist: '{:?}'\", c);\n\n }\n\n\n\n match fs::read_to_string(c) {\n\n Err(why) => panic!(\"Unable to read {:?}: {}\", c, why.description()),\n\n Ok(contents) => {\n\n config = match toml::from_str(contents.as_str()) {\n\n Ok(f) => f,\n\n Err(e) => panic!(\"An error occurred while parsing '{:?}': {}\", c, e.description())\n\n };\n\n }\n\n };\n\n config.copy_from_opt(opt);\n\n },\n\n None => {\n\n config = get_home_config(opt);\n\n }\n\n };\n\n\n\n config\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 2, "score": 116025.0205453014 }, { "content": "fn get_home_config(opt: &Opt) -> Config {\n\n let mut home = PathBuf::new();\n\n home.push(env!(\"HOME\"));\n\n home.push(\".token-website\");\n\n if !home.exists() {\n\n fs::create_dir_all(home.clone()).unwrap();\n\n }\n\n home.push(\"config\");\n\n let mut config: Config;\n\n if home.exists() {\n\n let config_temp = match fs::read_to_string(&home) {\n\n Err(why) => panic!(\"Unable to read {:?}: {}\", home, why.description()),\n\n Ok(c) => c\n\n };\n\n config = match toml::from_str(&config_temp) {\n\n Err(why) => panic!(\"Unable to parse {:?}: {}\", home, why.description()),\n\n Ok(t) => t\n\n };\n\n config.copy_from_opt(opt);\n\n } else {\n\n config = opt.into();\n\n }\n\n config\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 3, "score": 112567.54685163792 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n let config = get_config(&opt);\n\n\n\n let request = get_trulioo_request(&config);\n\n let mut countries = BTreeMap::new();\n\n\n\n async_std::task::block_on(async {\n\n let codes = request.get_country_codes().await.unwrap();\n\n for code in codes {\n\n if let Ok(c) = Country::from_str(&code) {\n\n countries.insert(c.alpha2.to_string(), c);\n\n }\n\n }\n\n });\n\n\n\n let mut home = PathBuf::new();\n\n home.push(env!(\"HOME\"));\n\n home.push(\".token-website\");\n\n if !home.exists() {\n", "file_path": "website/src/main.rs", "rank": 8, "score": 91400.24225597466 }, { "content": "fn generate_timestamp() -> Result<u64, String> {\n\n Ok(SystemTime::now().duration_since(UNIX_EPOCH).map_err(|e| e.to_string())?.as_secs())\n\n}\n", "file_path": "website/src/main.rs", "rank": 10, "score": 85998.77188831942 }, { "content": "fn get_trulioo_request(config: &Config) -> TruliooRequest {\n\n let (url, key);\n\n if let Some(ref t) = config.trulioo {\n\n url = t.url.clone();\n\n if let Some(key_name) = &t.key_name {\n\n key = std::str::from_utf8(&get_trulioo_secret(&key_name, config.secret_backend)).unwrap().to_string();\n\n } else if let Some(key_value) = &t.key_value {\n\n key = key_value.clone();\n\n } else {\n\n key = prompt_for_value(trulioo::API_KEY_HEADER);\n\n }\n\n } else {\n\n url = prompt_for_value(\"trulioo api url\");\n\n key = prompt_for_value(trulioo::API_KEY_HEADER);\n\n }\n\n\n\n TruliooRequest { key, url }\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 11, "score": 70099.11347337492 }, { "content": "use crate::secret_backend::SecretBackend;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(\n\n name = \"basic\",\n\n version = \"0.1\",\n\n about = \"Sovrin Foundation Token Website\"\n\n)]\n\npub struct Opt {\n\n #[structopt(short = \"i\", long)]\n\n pub challenge_signing_key: Option<String>,\n\n #[structopt(short, long, parse(from_os_str))]\n\n pub config: Option<PathBuf>,\n\n #[structopt(short, long, default_value = \"8000\")]\n\n pub port: u16,\n\n #[structopt(short, long)]\n\n pub secretbackend: Option<SecretBackend>,\n\n #[structopt(short, long)]\n\n pub test: bool,\n\n #[structopt(short = \"u\", long)]\n\n pub truliooapiurl: Option<String>,\n\n #[structopt(short = \"n\", long)]\n\n pub truliooapikeyname: Option<String>,\n\n #[structopt(short = \"k\", long)]\n\n pub truliooapikeyvalue: Option<String>,\n\n}\n", "file_path": "website/src/cmd_opt.rs", "rank": 13, "score": 61690.61642122684 }, { "content": "# token-website\n\nA web framework written in Rust for managing Sovrin Tokens\n", "file_path": "README.md", "rank": 16, "score": 55953.751684984025 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct Consent {\n\n name: String,\n\n description: String,\n\n country_code: celes::Country\n\n}\n", "file_path": "website/src/consents.rs", "rank": 17, "score": 41494.15360830469 }, { "content": " async_std::task::block_on(async {\n\n consents = request.inner().get_detailed_consents(country).await\n\n });\n\n match consents {\n\n Ok(c) => format!(r#\"{{ \"status\": \"success\", \"result\": {} }}\"#, serde_json::to_string(&c).unwrap()),\n\n Err(e) => format!(r#\"{{ \"status\": \"error\", \"message\": {} }}\"#, e)\n\n }\n\n}\n\n\n\n#[get(\"/payment_address_challenge\")]\n\npub(crate) fn get_payment_address_challenge(challenge_signing_key: State<Vec<u8>>) -> String {\n\n let mut rng = rand::rngs::OsRng{};\n\n let mut result = generate_timestamp().unwrap().to_be_bytes().to_vec();\n\n let mut challenge = vec![0u8; 32];\n\n rng.fill_bytes(challenge.as_mut_slice());\n\n\n\n let mut hmac = HmacSha256::new_varkey(&challenge_signing_key.inner().as_slice()).unwrap();\n\n hmac.input(result.as_slice());\n\n hmac.input(challenge.as_slice());\n\n let hash = hmac.result().code();\n", "file_path": "website/src/main.rs", "rank": 18, "score": 37734.152949315096 }, { "content": "\n\n result.extend_from_slice(challenge.as_slice());\n\n result.extend_from_slice(hash.as_slice());\n\n\n\n format!(r#\"{{ \"status\": \"success\", \"result\": \"{}\" }}\"#, base64_url::encode(result.as_slice()))\n\n}\n\n\n\n#[post(\"/payment_address_challenge\", format = \"application/json\", data = \"<challenge>\")]\n\npub(crate) fn verify_payment_address_challenge(challenge: Json<responses::PaymentAddressChallengeResponse>, challenge_signing_key: State<Vec<u8>>) -> String {\n\n const TIMESTAMP: usize = 8;\n\n const NONCE: usize = 32;\n\n const EXPIRE: u64 = 3600;\n\n let response = challenge.into_inner();\n\n\n\n let challenge = match base64_url::decode(&response.challenge) {\n\n Err(why) => return format!(r#\"{{ \"status\": \"error\", \"message\": {} }}\"#, why.description()),\n\n Ok(c) => c,\n\n };\n\n\n\n let signature = match base64_url::decode(&response.signature) {\n", "file_path": "website/src/main.rs", "rank": 19, "score": 37734.09260400757 }, { "content": " error::Error,\n\n fs,\n\n io::Write,\n\n path::PathBuf,\n\n str::FromStr,\n\n time::{SystemTime, UNIX_EPOCH}\n\n};\n\nuse structopt::StructOpt;\n\nuse subtle::ConstantTimeEq;\n\nuse trulioo::TruliooRequest;\n\n\n\nconst TOKEN_WEBSITE_SERVICE: &str = \"token_website\";\n\nconst TRULIOO_SERVICE: &str = \"trulioo\";\n\n\n", "file_path": "website/src/main.rs", "rank": 20, "score": 37733.974370895 }, { "content": " .manage(countries)\n\n .manage(base64_url::decode(&config.keys.challenge_signing_key).unwrap())\n\n .manage(request)\n\n .mount(\"/\", StaticFiles::from(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/public\")))\n\n .mount(\"/api/v1\", routes![get_allowed_countries,\n\n get_consents,\n\n get_payment_address_challenge,\n\n verify_payment_address_challenge]).launch();\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 21, "score": 37733.71647197127 }, { "content": "use celes::Country;\n\nuse cmd_opt::Opt;\n\nuse config::Config;\n\nuse ed25519_dalek::{Signature, PublicKey};\n\nuse hmac::{Hmac, Mac};\n\nuse lox::prelude::*;\n\nuse rand::RngCore;\n\nuse rocket::{\n\n State\n\n};\n\nuse rocket_contrib::{\n\n helmet::SpaceHelmet,\n\n json::Json,\n\n serve::StaticFiles,\n\n};\n\nuse secret_backend::SecretBackend;\n\nuse serde::{Deserialize, Serialize};\n\nuse sha2::{Sha256, Digest};\n\nuse std::{\n\n collections::BTreeMap,\n", "file_path": "website/src/main.rs", "rank": 22, "score": 37730.501562454076 }, { "content": "#![feature(proc_macro_hygiene,\n\n decl_macro)]\n\n//#![deny(warnings,\n\n// unused_import_braces,\n\n// unused_qualifications,\n\n// trivial_casts,\n\n// trivial_numeric_casts)]\n\n#[macro_use]\n\nextern crate arrayref;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate rocket;\n\n\n\nmod cmd_opt;\n\nmod config;\n\nmod secret_backend;\n\nmod consents;\n\nmod responses;\n\n\n", "file_path": "website/src/main.rs", "rank": 23, "score": 37729.67034699298 }, { "content": " return format!(r#\"{{ \"status\": \"error\", \"message\": \"Unexpected address type\" }}\"#);\n\n }\n\n\n\n let decodedkey = match bs58::decode(&response.address[8..]).with_check(None).into_vec() {\n\n Err(_) => return format!(r#\"{{ \"status\": \"error\", \"message\": \"Invalid address\" }}\"#),\n\n Ok(d) => d,\n\n };\n\n\n\n let pubkey = match PublicKey::from_bytes(decodedkey.as_slice()) {\n\n Err(_) => return format!(r#\"{{ \"status\": \"error\", \"message\": \"Address cannot be converted to a public key\" }}\"#),\n\n Ok(p) => p,\n\n };\n\n\n\n let sig = match Signature::from_bytes(signature.as_slice()) {\n\n Err(_) => return format!(r#\"{{ \"status\": \"error\", \"message\": \"Invalid signature\" }}\"#),\n\n Ok(s) => s,\n\n };\n\n\n\n let mut sha = Sha256::new();\n\n sha.input(format!(\"\\x6DSovrin Signed Message:\\nLength: {}\\n\", challenge.len()).as_bytes());\n\n sha.input(challenge.as_slice());\n\n let digest = sha.result();\n\n\n\n match pubkey.verify(digest.as_slice(), &sig) {\n\n Err(_) => format!(r#\"{{ \"status\": \"success\", \"result\": false }}\"#),\n\n Ok(_) => format!(r#\"{{ \"status\": \"success\", \"result\": true }} \"#)\n\n }\n\n}\n\n\n", "file_path": "website/src/main.rs", "rank": 24, "score": 37727.733857986146 }, { "content": " Err(why) => return format!(r#\"{{ \"status\": \"error\", \"message\": {} }}\"#, why.description()),\n\n Ok(s) => s,\n\n };\n\n\n\n let timestamp = u64::from_be_bytes(*array_ref!(challenge, 0, TIMESTAMP));\n\n\n\n if timestamp + EXPIRE < generate_timestamp().unwrap() {\n\n return format!(r#\"{{ \"status\": \"error\", \"message\": \"Challenge has expired\" }}\"#);\n\n }\n\n\n\n let mut hmac = HmacSha256::new_varkey(&challenge_signing_key.inner().as_slice()).unwrap();\n\n hmac.input(&challenge[..(TIMESTAMP + NONCE)]);\n\n let expected_tag = hmac.result().code();\n\n\n\n //Check if this is a challenge from here\n\n if expected_tag.ct_eq(&challenge[(TIMESTAMP + NONCE)..]).unwrap_u8() != 1 {\n\n return format!(r#\"{{ \"status\": \"error\", \"message\": \"Invalid challenge\" }}\"#);\n\n }\n\n\n\n if response.address.len() < 8 || &response.address[..8] != \"pay:sov:\" {\n", "file_path": "website/src/main.rs", "rank": 25, "score": 37725.43592741954 }, { "content": " fs::create_dir_all(home.clone()).unwrap();\n\n }\n\n home.push(\"config\");\n\n\n\n if !home.exists() {\n\n let mut file = match fs::File::create(&home) {\n\n Err(why) => panic!(\"Couldn't create {:?}: {}\", home, why.description()),\n\n Ok(file) => file\n\n };\n\n println!(\"config = {:?}\", config);\n\n let recipe_toml = toml::Value::try_from(&config).unwrap();\n\n let contents = toml::to_string(&recipe_toml).unwrap();\n\n println!(\"contents = {}\", contents);\n\n if let Err(why) = file.write_all(contents.as_bytes()) {\n\n panic!(\"Unable to write to {:?}: {}\", home, why.description());\n\n }\n\n }\n\n\n\n rocket::ignite()\n\n .attach(SpaceHelmet::default())\n", "file_path": "website/src/main.rs", "rank": 26, "score": 37719.449587553936 }, { "content": "type HmacSha256 = Hmac<Sha256>;\n\n\n\n#[get(\"/countries\")]\n\npub(crate) fn get_allowed_countries(countries: State<BTreeMap<String, Country>>) -> String {\n\n #[derive(Serialize)]\n\n struct SimpleCountry {\n\n alpha2: String,\n\n long_name: String\n\n };\n\n let list = countries.inner().iter().map(|(_, c)| SimpleCountry { alpha2: c.alpha2.to_string(), long_name: c.long_name.to_string() }).collect::<Vec<SimpleCountry>>();\n\n format!(r#\"{{ \"status\": \"success\", \"result\": {} }}\"#, serde_json::to_string(&list).unwrap())\n\n}\n\n\n\n#[get(\"/consents/<country>\")]\n\npub(crate) fn get_consents(country: String, request: State<TruliooRequest>, countries: State<BTreeMap<String, Country>>) -> String {\n\n if !countries.inner().contains_key(&country) {\n\n return format!(r#\"{{ \"status\": \"error\", \"message\": {} }}\"#, \"Invalid country code\");\n\n }\n\n\n\n let mut consents = Ok(Vec::new());\n", "file_path": "website/src/main.rs", "rank": 37, "score": 31010.326685605345 }, { "content": "use crate::cmd_opt::Opt;\n\nuse crate::secret_backend::SecretBackend;\n\nuse rand::RngCore;\n\nuse serde::{Serialize, Deserialize};\n\nuse zeroize::Zeroize;\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Config {\n\n pub keys: Keys,\n\n pub port: u16,\n\n pub secret_backend: Option<SecretBackend>,\n\n pub trulioo: Option<Trulioo>\n\n}\n\n\n\nimpl Config {\n\n pub fn copy_from_opt(&mut self, opt: &Opt) {\n\n if let Some(ref c) = opt.challenge_signing_key {\n\n if let Err(why) = base64_url::decode(c) {\n\n panic!(\"Incompatible format for challenge signing key: {}\", why);\n\n }\n", "file_path": "website/src/config.rs", "rank": 38, "score": 17627.244443604024 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Trulioo {\n\n pub key_name: Option<String>,\n\n pub key_value: Option<String>,\n\n pub url: String\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Keys {\n\n pub challenge_signing_key: String\n\n}\n\n\n\nimpl Default for Keys {\n\n fn default() -> Self {\n\n let mut rng = rand::rngs::OsRng{};\n\n let mut key = vec![0u8; 32];\n\n rng.fill_bytes(key.as_mut_slice());\n\n let challenge_signing_key = base64_url::encode(&key);\n\n key.zeroize();\n\n Self { challenge_signing_key }\n\n }\n\n}\n", "file_path": "website/src/config.rs", "rank": 39, "score": 17625.85321451085 }, { "content": "use serde::Deserialize;\n\n\n\n#[derive(Deserialize)]\n\npub(crate) struct PaymentAddressChallengeResponse {\n\n pub address: String,\n\n pub challenge: String,\n\n pub signature: String\n\n}\n", "file_path": "website/src/responses.rs", "rank": 40, "score": 17621.638266759124 }, { "content": " self.keys.challenge_signing_key = c.to_string();\n\n }\n\n\n\n if let Some(ref mut t) = self.trulioo {\n\n if let Some(url) = &opt.truliooapiurl {\n\n t.url = url.to_string();\n\n }\n\n if let Some(name) = &opt.truliooapikeyname {\n\n t.key_name = Some(name.to_string());\n\n }\n\n if let Some(value) = &opt.truliooapikeyvalue {\n\n t.key_value = Some(value.to_string());\n\n }\n\n } else {\n\n let url = opt.truliooapiurl.clone().unwrap_or(String::new());\n\n if let Some(name) = &opt.truliooapikeyname {\n\n self.trulioo = Some(Trulioo {\n\n key_name: Some(name.to_string()),\n\n key_value: None,\n\n url\n", "file_path": "website/src/config.rs", "rank": 41, "score": 17621.26388950013 }, { "content": " trulioo = Some(Trulioo {\n\n key_name: None,\n\n key_value: Some(value.to_string()),\n\n url\n\n });\n\n }\n\n let keys =\n\n if let Some(ref c) = opt.challenge_signing_key {\n\n if let Err(why) = base64_url::decode(c) {\n\n panic!(\"Incompatible format for challenge signing key: {}\", why);\n\n }\n\n Keys { challenge_signing_key: c.to_string() }\n\n } else {\n\n Keys::default()\n\n };\n\n Config {\n\n keys,\n\n port: opt.port,\n\n secret_backend: opt.secretbackend,\n\n trulioo\n", "file_path": "website/src/config.rs", "rank": 42, "score": 17617.628607147577 }, { "content": " Config {\n\n keys: Keys::default(),\n\n port: 8000,\n\n secret_backend: None,\n\n trulioo: None\n\n }\n\n }\n\n}\n\n\n\nimpl From<&Opt> for Config {\n\n fn from(opt: &Opt) -> Self {\n\n let mut trulioo = None;\n\n let url = opt.truliooapiurl.clone().unwrap_or(String::new());\n\n if let Some(name) = &opt.truliooapikeyname {\n\n trulioo = Some(Trulioo {\n\n key_name: Some(name.to_string()),\n\n key_value: None,\n\n url\n\n });\n\n } else if let Some(value) = &opt.truliooapikeyvalue {\n", "file_path": "website/src/config.rs", "rank": 43, "score": 17616.785605750996 }, { "content": " });\n\n } else if let Some(value) = &opt.truliooapikeyvalue {\n\n self.trulioo = Some(Trulioo {\n\n key_name: None,\n\n key_value: Some(value.to_string()),\n\n url\n\n });\n\n }\n\n }\n\n\n\n if opt.secretbackend.is_some() {\n\n self.secret_backend = opt.secretbackend.clone();\n\n }\n\n\n\n self.port = opt.port;\n\n }\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Self {\n", "file_path": "website/src/config.rs", "rank": 44, "score": 17615.73026726187 }, { "content": "use self::SecretBackend::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::TryFrom;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Clone, Copy, Debug, Deserialize, Serialize)]\n\npub enum SecretBackend {\n\n AwsKms,\n\n AzureKeyVault,\n\n OsKeyRing,\n\n}\n\n\n\nimpl std::fmt::Display for SecretBackend {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match *self {\n\n AwsKms => write!(f, \"awskms\"),\n\n AzureKeyVault => write!(f, \"azurekeyvault\"),\n\n OsKeyRing => write!(f, \"oskeyring\")\n\n }\n\n }\n", "file_path": "website/src/secret_backend.rs", "rank": 45, "score": 16712.115317998592 }, { "content": "}\n\n\n\nimpl FromStr for SecretBackend {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n SecretBackend::try_from(s)\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for SecretBackend {\n\n type Error = String;\n\n\n\n fn try_from(s: &str) -> Result<Self, Self::Error> {\n\n match s.to_lowercase().as_str() {\n\n \"awskms\" => Ok(AwsKms),\n\n \"azurekeyvault\" => Ok(AzureKeyVault),\n\n \"oskeyring\" => Ok(OsKeyRing),\n\n _ => Err(format!(\"Unknown value: {}\", s))\n\n }\n", "file_path": "website/src/secret_backend.rs", "rank": 46, "score": 16705.29003195945 }, { "content": " }\n\n}\n\n\n\nimpl TryFrom<String> for SecretBackend {\n\n type Error = String;\n\n\n\n fn try_from(s: String) -> Result<Self, Self::Error> {\n\n SecretBackend::try_from(s.as_str())\n\n }\n\n}\n", "file_path": "website/src/secret_backend.rs", "rank": 47, "score": 16703.281760549155 }, { "content": " let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/recommendedfields/{}/{}\",\n\n self.url, CONFIGURATION_NAME, country\n\n ))\n\n .await?;\n\n Ok(body)\n\n }\n\n\n\n pub async fn get_consents<S: Display>(&self, country: S) -> Result<Vec<Consent>, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/consents/{}/{}\",\n\n self.url, CONFIGURATION_NAME, country\n\n ))\n\n .await?;\n\n let result: Vec<String> = serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n let result = result\n\n .iter()\n\n .map(|c| Consent {\n", "file_path": "trulioo/src/lib.rs", "rank": 48, "score": 18.9192710156085 }, { "content": "use indexmap::IndexMap;\n\nuse isahc::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt::Display;\n\nuse zeroize::Zeroize;\n\n\n\npub const TRIAL_BASE_URL: &str = \"https://gateway.Trulioo.com/trial/configuration\";\n\npub const BASE_URL: &str = \"https://api.globaldatacompany.com/\";\n\npub const API_KEY_HEADER: &str = \"x-trulioo-api-key\";\n\npub const CONFIGURATION_NAME: &str = \"Identity%20Verification\";\n\n\n\n#[derive(Copy, Clone, Debug, Deserialize, Serialize)]\n\npub enum Gender {\n\n #[serde(rename = \"M\")]\n\n Male,\n\n #[serde(rename = \"F\")]\n\n Female,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Deserialize, Serialize)]\n", "file_path": "trulioo/src/lib.rs", "rank": 49, "score": 18.571274760105606 }, { "content": " name: c.to_string(),\n\n text: None,\n\n url: None,\n\n })\n\n .collect();\n\n Ok(result)\n\n }\n\n\n\n pub async fn get_detailed_consents<S: Display>(\n\n &self,\n\n country: S,\n\n ) -> Result<Vec<Consent>, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/detailedConsents/{}/{}\",\n\n self.url, CONFIGURATION_NAME, country\n\n ))\n\n .await?;\n\n let result: Vec<Consent> = serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n", "file_path": "trulioo/src/lib.rs", "rank": 50, "score": 18.250581519603468 }, { "content": " \"{}/configuration/v1/countrysubdivisions/{}\",\n\n self.url, country\n\n ))\n\n .await?;\n\n let result: Vec<Subdivision> =\n\n serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n\n }\n\n\n\n pub async fn get_fields<S: Display>(&self, country: S) -> Result<String, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/fields/{}/{}\",\n\n self.url, CONFIGURATION_NAME, country\n\n ))\n\n .await?;\n\n Ok(body)\n\n }\n\n\n\n pub async fn get_recommended_fields<S: Display>(&self, country: S) -> Result<String, String> {\n", "file_path": "trulioo/src/lib.rs", "rank": 51, "score": 16.389280735626098 }, { "content": " }\n\n\n\n pub async fn get_test_entities<S: Display>(\n\n &self,\n\n country: S,\n\n ) -> Result<Vec<Option<Entity>>, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/testentities/{}/{}\",\n\n self.url, CONFIGURATION_NAME, country\n\n ))\n\n .await?;\n\n let result: Vec<Option<Entity>> =\n\n serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n\n }\n\n\n\n pub async fn verify_identity<S: Display>(\n\n &self,\n\n request: &VerifyIdentityRequest,\n", "file_path": "trulioo/src/lib.rs", "rank": 52, "score": 16.246204606641438 }, { "content": "}\n\n\n\nimpl TruliooRequest {\n\n pub async fn get_country_codes(&self) -> Result<Vec<String>, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/countrycodes/{}\",\n\n self.url, CONFIGURATION_NAME\n\n ))\n\n .await?;\n\n let result: Vec<String> = serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n\n }\n\n\n\n pub async fn get_country_subdivisions<S: Display>(\n\n &self,\n\n country: S,\n\n ) -> Result<Vec<Subdivision>, String> {\n\n let body = self\n\n .get(format!(\n", "file_path": "trulioo/src/lib.rs", "rank": 53, "score": 16.221077714139877 }, { "content": "\n\nimpl Default for TruliooRequest {\n\n fn default() -> Self {\n\n TruliooRequest { key: String::new(), url: String::new() }\n\n }\n\n}\n\n\n\nmacro_rules! api_obj_impl {\n\n ($class:ident, $($rename:expr => $field:ident: $ty:ty),+) => {\n\n #[derive(Clone, Debug, Deserialize, Serialize)]\n\n pub struct $class {\n\n $(\n\n #[serde(rename = $rename)]\n\n pub $field: $ty\n\n ),+\n\n }\n\n\n\n display_impl!($class, $($field),+);\n\n };\n\n}\n", "file_path": "trulioo/src/lib.rs", "rank": 54, "score": 16.119097748358346 }, { "content": " ) -> Result<VerifyIdentityResponse, String> {\n\n let post_body = serde_json::to_string(request).map_err(|e| format!(\"{:?}\", e))?;\n\n let body = self\n\n .post(format!(\"{}/verifications/v1/verify\", self.url), post_body)\n\n .await?;\n\n let result: VerifyIdentityResponse =\n\n serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n\n }\n\n\n\n pub async fn get_document_types<S: Display>(\n\n &self,\n\n country: S,\n\n ) -> Result<IndexMap<String, Vec<DocumentTypes>>, String> {\n\n let body = self\n\n .get(format!(\n\n \"{}/configuration/v1/documentTypes/{}\",\n\n self.url, country\n\n ))\n\n .await?;\n", "file_path": "trulioo/src/lib.rs", "rank": 55, "score": 15.455681443624036 }, { "content": " \"YearOfExpiry\" => year_of_expiry: usize);\n\n\n\napi_obj_impl!(VerifyIdentityRequest,\n\n \"AcceptTruliooTermsAndConditions\" => accept_trulioo_terms_and_conditions: bool,\n\n \"ConfigurationName\" => configuration_name: String,\n\n \"CallBackUrl\" => callback_url: String,\n\n \"ConsentForDataSources\" => consent_for_data_sources: Vec<String>,\n\n \"CountryCode\" => country_code: String,\n\n \"CustomerReferenceID\" => customer_reference_id: String,\n\n \"DataFields\" => datafields: DataFields,\n\n \"Timeout\" => timeout: Option<usize>,\n\n \"CleansedAddress\" => cleansed_address: Option<bool>);\n\n\n\napi_obj_impl!(DataFields,\n\n \"PersonInfo\" => person_info: Option<PersonInfo>,\n\n \"Location\" => location: Option<Location>,\n\n \"Communication\" => communication: Option<Communication>,\n\n \"DriverLicence\" => driver_license: Option<DriverLicense>,\n\n \"NationalIds\" => national_ids: Option<Vec<NationalIds>>,\n\n \"Passport\" => passport: Option<Passport>,\n", "file_path": "trulioo/src/lib.rs", "rank": 56, "score": 14.668650666869976 }, { "content": " \"Errors\" => errors: Vec<String>,\n\n \"Rule\" => rule: String,\n\n \"Note\" => note: String);\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use async_std::task;\n\n use std::env;\n\n use std::fs;\n\n use std::path::Path;\n\n use toml;\n\n\n\n #[test]\n\n fn get_country_codes_works() {\n\n\n\n let request;\n\n if Path::new(\".env\").exists() {\n\n let config: Config = toml::from_str(&fs::read_to_string(\".env\").unwrap()).unwrap();\n\n request = config.api.into();\n", "file_path": "trulioo/src/lib.rs", "rank": 57, "score": 13.40127969518965 }, { "content": "pub enum NationalId {\n\n Id,\n\n Health,\n\n SocialService,\n\n TaxIdNumber,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Deserialize, Serialize)]\n\npub enum DocumentTypes {\n\n DrivingLicence,\n\n IdentityCard,\n\n Passport,\n\n ResidencePermit\n\n}\n\n\n\n#[derive(Clone, Debug, Zeroize)]\n\n#[zeroize(drop)]\n\npub struct TruliooRequest {\n\n pub key: String,\n\n pub url: String,\n", "file_path": "trulioo/src/lib.rs", "rank": 58, "score": 13.396380097666434 }, { "content": " let documenttypes = request.get_document_types(&country).await.unwrap();\n\n println!(\"{:?}\", documenttypes);\n\n }\n\n });\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Config {\n\n api: Api,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Api {\n\n url: String,\n\n key: String,\n\n }\n\n\n\n impl From<Api> for TruliooRequest {\n\n fn from(a: Api) -> Self {\n\n TruliooRequest {\n\n key: a.key,\n\n url: a.url,\n\n }\n\n }\n\n }\n\n}\n", "file_path": "trulioo/src/lib.rs", "rank": 59, "score": 12.903985925575366 }, { "content": " let result: IndexMap<String, Vec<DocumentTypes>> =\n\n serde_json::from_str(&body).map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(result)\n\n }\n\n\n\n async fn post(&self, url: String, request: String) -> Result<String, String> {\n\n let mut response = Request::post(&url)\n\n .header(\"Accept\", \"application/json\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(API_KEY_HEADER, &self.key)\n\n .body(request)\n\n .map_err(|e| format!(\"{:?}\", e))?\n\n .send_async()\n\n .await\n\n .map_err(|e| format!(\"{:?}\", e))?;\n\n\n\n let body = response\n\n .text_async()\n\n .await\n\n .map_err(|e| format!(\"{:?}\", e))?;\n", "file_path": "trulioo/src/lib.rs", "rank": 60, "score": 11.557874258317057 }, { "content": " Ok(body)\n\n }\n\n\n\n async fn get(&self, url: String) -> Result<String, String> {\n\n let mut response = Request::get(&url)\n\n .header(\"Accept\", \"application/json\")\n\n .header(API_KEY_HEADER, &self.key)\n\n .body(Body::empty())\n\n .map_err(|e| format!(\"{:?}\", e))?\n\n .send_async()\n\n .await\n\n .map_err(|e| format!(\"{:?}\", e))?;\n\n\n\n let body = response\n\n .text_async()\n\n .await\n\n .map_err(|e| format!(\"{:?}\", e))?;\n\n Ok(body)\n\n }\n\n}\n", "file_path": "trulioo/src/lib.rs", "rank": 61, "score": 11.335852615160949 }, { "content": " } else {\n\n request = TruliooRequest {\n\n key: env::var(\"TRULIOO_API_KEY\").unwrap(),\n\n url: env::var(\"TRULIOO_API_URL\").unwrap()\n\n }\n\n }\n\n\n\n task::block_on(async {\n\n let codes = request.get_country_codes().await.unwrap();\n\n assert!(codes.len() > 0);\n\n println!(\"{:?}\", codes);\n\n for country in codes {\n\n// let consents = request.get_detailed_consents(&country).await.unwrap();\n\n// println!(\"{:?}\", consents);\n\n\n\n// let subdivisions = request.get_country_subdivisions(&country).await.unwrap();\n\n// println!(\"{:?}\", subdivisions);\n\n\n\n// let testentities = request.get_test_entities(&country).await.unwrap();\n\n// println!(\"{:?}\", testentities);\n", "file_path": "trulioo/src/lib.rs", "rank": 62, "score": 11.318896675986469 }, { "content": " \"AppendedFields\" => appended_fields: Vec<Field>,\n\n \"OutputFields\" => output_fields: Vec<Field>,\n\n \"SourceType\" => source_type: String,\n\n \"UpdateFrequency\" => update_frequency: Option<String>,\n\n \"Coverage\" => coverage: String);\n\n\n\napi_obj_impl!(Field,\n\n \"FieldName\" => name: String,\n\n \"Type\" => xtype: String);\n\n\n\napi_obj_impl!(Subdivision,\n\n \"Name\" => name: String,\n\n \"Code\" => code: String,\n\n \"ParentCode\" => parent_code: String);\n\n\n\napi_obj_impl!(Consent,\n\n \"Name\" => name: String,\n\n \"Text\" => text: Option<String>,\n\n \"Url\" => url: Option<String>);\n\n\n", "file_path": "trulioo/src/lib.rs", "rank": 63, "score": 10.699282406644823 }, { "content": "api_obj_impl!(Location,\n\n \"BuildingNumber\" => building_number: Option<String>,\n\n \"BuildingName\" => building_name: Option<String>,\n\n \"UnitNumber\" => unit_number: Option<String>,\n\n \"StreetName\" => street_name: Option<String>,\n\n \"StreetType\" => street_type: Option<String>,\n\n \"City\" => city: Option<String>,\n\n \"Suburb\" => suburb: Option<String>,\n\n \"StateProvinceCode\" => state_province_code: Option<String>,\n\n \"PostalCode\" => postal_code: Option<String>,\n\n \"POBox\" => po_box: Option<String>,\n\n \"AdditionalFields\" => additional_fields: Option<AdditionalFieldsLocation>);\n\n\n\napi_obj_impl!(Communication,\n\n \"Telephone\" => telephone: Option<String>,\n\n \"Telephone2\" => telephone2: Option<String>,\n\n \"MobileNumber\" => mobile_number: Option<String>,\n\n \"EmailAddress\" => email_address: Option<String>);\n\n\n\napi_obj_impl!(NationalIds,\n", "file_path": "trulioo/src/lib.rs", "rank": 64, "score": 9.581326131354583 }, { "content": " \"CountrySpecific\"=> country_specific: Option<IndexMap<String, IndexMap<String, String>>>);\n\n\n\napi_obj_impl!(AdditionalFieldsPersonInfo,\n\n \"FullName\" => full_name: String);\n\n\n\napi_obj_impl!(AdditionalFieldsLocation,\n\n \"Address1\" => address1: String);\n\n\n\napi_obj_impl!(VerifyIdentityResponse,\n\n \"TransactionID\" => transaction_id: String,\n\n \"UploadedDt\" => uploaded_date: String,\n\n \"CountryCode\" => country_code: String,\n\n \"ProductName\" => product_name: String,\n\n \"Record\" => record: VerifyRecord,\n\n \"Errors\" => errors: Vec<String>);\n\n\n\napi_obj_impl!(VerifyRecord,\n\n \"TransactionRecordID\" => id: String,\n\n \"RecordStatus\" => status: String,\n\n \"DatasourceResults\" => data_source_results: Vec<String>,\n", "file_path": "trulioo/src/lib.rs", "rank": 65, "score": 9.337293171494785 }, { "content": "api_obj_impl!(Entity,\n\n \"PersonInfo\" => person_info: Option<PersonInfo>,\n\n \"Location\" => location: Option<Location>,\n\n \"Communication\" => communication: Option<Communication>,\n\n \"DriverLicense\" => driver_license: Option<DriverLicense>,\n\n \"Passport\" => passport: Option<Passport>,\n\n \"CountrySpecific\" => country_specific: Option<IndexMap<String, IndexMap<String, String>>>);\n\n\n\napi_obj_impl!(PersonInfo,\n\n \"FirstGivenName\" => first_given_name: Option<String>,\n\n \"MiddleName\" => middle_name: Option<String>,\n\n \"FirstSurName\" => first_surname: Option<String>,\n\n \"DayOfBirth\" => day_of_birth: Option<usize>,\n\n \"MonthOfBirth\" => month_of_birth: Option<usize>,\n\n \"YearOfBirth\" => year_of_birth: Option<usize>,\n\n \"ISOLatin1Name\" => iso_latin1_name: Option<String>,\n\n \"Gender\" => gender: Option<Gender>,\n\n \"MinimumAge\" => minimum_age: Option<usize>,\n\n \"AdditionalFields\" => additional_fields: Option<AdditionalFieldsPersonInfo>);\n\n\n", "file_path": "trulioo/src/lib.rs", "rank": 66, "score": 9.282576262843333 }, { "content": "macro_rules! display_impl {\n\n ($class:ident, $( $field:ident ),+) => {\n\n impl Display for $class {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, stringify!($class))?;\n\n write!(f, \"{{\")?;\n\n $(\n\n write!(f, \"{}: {:?}\", stringify!(Self.$field), self.$field)?;\n\n )+\n\n write!(f, \"}}\")\n\n }\n\n }\n\n };\n\n}\n\n\n\napi_obj_impl!(DataSource,\n\n \"Name\" => name: String,\n\n \"Description\" => description: String,\n\n \"RequiredFields\" => required_fields: Vec<Field>,\n\n \"OptionalFields\" => optional_fields: Vec<Field>,\n", "file_path": "trulioo/src/lib.rs", "rank": 67, "score": 7.535615186275901 }, { "content": " \"Number\" => number: Option<String>,\n\n \"Type\" => xtype: String,\n\n \"DistrictOfIssue\" => district_of_issue: Option<String>,\n\n \"CityOfIssue\" => city_of_issue: Option<String>,\n\n \"ProvinceOfIssue\" => province_of_issue: Option<String>,\n\n \"CountyOfIssue\" => county_of_issue: Option<String>);\n\n\n\napi_obj_impl!(Passport,\n\n \"Number\" => number: String,\n\n \"Mrz1\" => mrz1: Option<String>,\n\n \"Mrz2\" => mrz2: Option<String>,\n\n \"DayOfExpiry\" => day_of_expiry: Option<usize>,\n\n \"MonthOfExpiry\" => month_of_expiry: Option<usize>,\n\n \"YearOfExpiry\" => year_of_expiry: Option<usize>);\n\n\n\napi_obj_impl!(DriverLicense,\n\n \"Number\" => number: String,\n\n \"State\" => state: String,\n\n \"DayOfExpiry\" => day_of_expiry: usize,\n\n \"MonthOfExpiry\" => month_of_expiry: usize,\n", "file_path": "trulioo/src/lib.rs", "rank": 68, "score": 5.62948680449923 } ]
Rust
src/licensee.rs
kain88-de/spdx
6df4065871137935c4c7698ee6cf2ecbedd48a4d
use crate::{ error::{ParseError, Reason}, ExceptionId, Lexer, LicenseItem, LicenseReq, Token, }; #[derive(PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct Licensee { inner: LicenseReq, } impl Licensee { pub fn new(license: LicenseItem, exception: Option<ExceptionId>) -> Self { if let LicenseItem::SPDX { or_later, .. } = &license { debug_assert!(!or_later) } Self { inner: LicenseReq { license, exception }, } } pub fn parse(original: &str) -> Result<Self, ParseError<'_>> { let mut lexer = Lexer::new(original); let license = { let lt = lexer.next().ok_or_else(|| ParseError { original, span: 0..original.len(), reason: Reason::Empty, })??; match lt.token { Token::SPDX(id) => LicenseItem::SPDX { id, or_later: false, }, Token::LicenseRef { doc_ref, lic_ref } => LicenseItem::Other { doc_ref: doc_ref.map(String::from), lic_ref: lic_ref.to_owned(), }, _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["<license>"]), }) } } }; let exception = match lexer.next() { None => None, Some(lt) => { let lt = lt?; match lt.token { Token::With => { let lt = lexer.next().ok_or_else(|| ParseError { original, span: lt.span, reason: Reason::Empty, })??; match lt.token { Token::Exception(exc) => Some(exc), _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["<exception>"]), }) } } } _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["WITH"]), }) } } } }; Ok(Licensee { inner: LicenseReq { license, exception }, }) } pub fn satisfies(&self, req: &LicenseReq) -> bool { match (&self.inner.license, &req.license) { (LicenseItem::SPDX { id: a, .. }, LicenseItem::SPDX { id: b, or_later }) => { if a.index != b.index { if *or_later { let a_name = &a.name[..a.name.rfind('-').unwrap_or_else(|| a.name.len())]; let b_name = &b.name[..b.name.rfind('-').unwrap_or_else(|| b.name.len())]; if a_name != b_name || a.name < b.name { return false; } } else { return false; } } } ( LicenseItem::Other { doc_ref: doca, lic_ref: lica, }, LicenseItem::Other { doc_ref: docb, lic_ref: licb, }, ) => { if doca != docb || lica != licb { return false; } } _ => return false, } req.exception == self.inner.exception } } impl PartialOrd<LicenseReq> for Licensee { fn partial_cmp(&self, o: &LicenseReq) -> Option<std::cmp::Ordering> { self.inner.partial_cmp(o) } } impl PartialEq<LicenseReq> for Licensee { fn eq(&self, o: &LicenseReq) -> bool { self.inner.eq(o) } } #[cfg(test)] mod test { use crate::{exception_id, license_id, LicenseItem, LicenseReq, Licensee}; const LICENSEES: &[&str] = &[ "LicenseRef-Embark-Proprietary", "BSD-2-Clause", "Apache-2.0 WITH LLVM-exception", "BSD-2-Clause-FreeBSD", "BSL-1.0", "Zlib", "CC0-1.0", "FTL", "ISC", "MIT", "MPL-2.0", "BSD-3-Clause", "Unicode-DFS-2016", "Unlicense", "Apache-2.0", ]; #[test] fn handles_or_later() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let mpl_id = license_id("MPL-2.0").unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: mpl_id, or_later: true, }, exception: None, }; assert!(licensees.binary_search_by(|l| l.inner.cmp(&req)).is_err()); match &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner .license { LicenseItem::SPDX { id, .. } => assert_eq!(*id, mpl_id), o => panic!("unexepcted {:?}", o), } } #[test] fn handles_exceptions() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let apache_id = license_id("Apache-2.0").unwrap(); let llvm_exc = exception_id("LLVM-exception").unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: apache_id, or_later: false, }, exception: Some(llvm_exc), }; assert_eq!( &req, &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner ); } #[test] fn handles_license_ref() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let req = LicenseReq { license: LicenseItem::Other { doc_ref: None, lic_ref: "Embark-Proprietary".to_owned(), }, exception: None, }; assert_eq!( &req, &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner ); } #[test] fn handles_close() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); for id in &["BSD-2-Clause", "BSD-2-Clause-FreeBSD"] { let lic_id = license_id(id).unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: lic_id, or_later: true, }, exception: None, }; assert!(licensees.binary_search_by(|l| l.inner.cmp(&req)).is_err()); match &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner .license { LicenseItem::SPDX { id, .. } => assert_eq!(*id, lic_id), o => panic!("unexepcted {:?}", o), } } } }
use crate::{ error::{ParseError, Reason}, ExceptionId, Lexer, LicenseItem, LicenseReq, Token, }; #[derive(PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct Licensee { inner: LicenseReq, } impl Licensee {
pub fn parse(original: &str) -> Result<Self, ParseError<'_>> { let mut lexer = Lexer::new(original); let license = { let lt = lexer.next().ok_or_else(|| ParseError { original, span: 0..original.len(), reason: Reason::Empty, })??; match lt.token { Token::SPDX(id) => LicenseItem::SPDX { id, or_later: false, }, Token::LicenseRef { doc_ref, lic_ref } => LicenseItem::Other { doc_ref: doc_ref.map(String::from), lic_ref: lic_ref.to_owned(), }, _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["<license>"]), }) } } }; let exception = match lexer.next() { None => None, Some(lt) => { let lt = lt?; match lt.token { Token::With => { let lt = lexer.next().ok_or_else(|| ParseError { original, span: lt.span, reason: Reason::Empty, })??; match lt.token { Token::Exception(exc) => Some(exc), _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["<exception>"]), }) } } } _ => { return Err(ParseError { original, span: lt.span, reason: Reason::Unexpected(&["WITH"]), }) } } } }; Ok(Licensee { inner: LicenseReq { license, exception }, }) } pub fn satisfies(&self, req: &LicenseReq) -> bool { match (&self.inner.license, &req.license) { (LicenseItem::SPDX { id: a, .. }, LicenseItem::SPDX { id: b, or_later }) => { if a.index != b.index { if *or_later { let a_name = &a.name[..a.name.rfind('-').unwrap_or_else(|| a.name.len())]; let b_name = &b.name[..b.name.rfind('-').unwrap_or_else(|| b.name.len())]; if a_name != b_name || a.name < b.name { return false; } } else { return false; } } } ( LicenseItem::Other { doc_ref: doca, lic_ref: lica, }, LicenseItem::Other { doc_ref: docb, lic_ref: licb, }, ) => { if doca != docb || lica != licb { return false; } } _ => return false, } req.exception == self.inner.exception } } impl PartialOrd<LicenseReq> for Licensee { fn partial_cmp(&self, o: &LicenseReq) -> Option<std::cmp::Ordering> { self.inner.partial_cmp(o) } } impl PartialEq<LicenseReq> for Licensee { fn eq(&self, o: &LicenseReq) -> bool { self.inner.eq(o) } } #[cfg(test)] mod test { use crate::{exception_id, license_id, LicenseItem, LicenseReq, Licensee}; const LICENSEES: &[&str] = &[ "LicenseRef-Embark-Proprietary", "BSD-2-Clause", "Apache-2.0 WITH LLVM-exception", "BSD-2-Clause-FreeBSD", "BSL-1.0", "Zlib", "CC0-1.0", "FTL", "ISC", "MIT", "MPL-2.0", "BSD-3-Clause", "Unicode-DFS-2016", "Unlicense", "Apache-2.0", ]; #[test] fn handles_or_later() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let mpl_id = license_id("MPL-2.0").unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: mpl_id, or_later: true, }, exception: None, }; assert!(licensees.binary_search_by(|l| l.inner.cmp(&req)).is_err()); match &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner .license { LicenseItem::SPDX { id, .. } => assert_eq!(*id, mpl_id), o => panic!("unexepcted {:?}", o), } } #[test] fn handles_exceptions() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let apache_id = license_id("Apache-2.0").unwrap(); let llvm_exc = exception_id("LLVM-exception").unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: apache_id, or_later: false, }, exception: Some(llvm_exc), }; assert_eq!( &req, &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner ); } #[test] fn handles_license_ref() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); let req = LicenseReq { license: LicenseItem::Other { doc_ref: None, lic_ref: "Embark-Proprietary".to_owned(), }, exception: None, }; assert_eq!( &req, &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner ); } #[test] fn handles_close() { let mut licensees: Vec<_> = LICENSEES .iter() .map(|l| Licensee::parse(l).unwrap()) .collect(); licensees.sort(); for id in &["BSD-2-Clause", "BSD-2-Clause-FreeBSD"] { let lic_id = license_id(id).unwrap(); let req = LicenseReq { license: LicenseItem::SPDX { id: lic_id, or_later: true, }, exception: None, }; assert!(licensees.binary_search_by(|l| l.inner.cmp(&req)).is_err()); match &licensees[licensees .binary_search_by(|l| l.partial_cmp(&req).unwrap()) .unwrap()] .inner .license { LicenseItem::SPDX { id, .. } => assert_eq!(*id, lic_id), o => panic!("unexepcted {:?}", o), } } } }
pub fn new(license: LicenseItem, exception: Option<ExceptionId>) -> Self { if let LicenseItem::SPDX { or_later, .. } = &license { debug_assert!(!or_later) } Self { inner: LicenseReq { license, exception }, } }
function_block-full_function
[ { "content": "#[inline]\n\npub fn license_id(name: &str) -> Option<LicenseId> {\n\n let name = &name.trim_end_matches('+');\n\n identifiers::LICENSES\n\n .binary_search_by(|lic| lic.0.cmp(name))\n\n .map(|index| {\n\n let (name, flags) = identifiers::LICENSES[index];\n\n LicenseId { name, index, flags }\n\n })\n\n .ok()\n\n}\n\n\n\n/// Attempts to find an ExceptionId for the string\n", "file_path": "src/lib.rs", "rank": 0, "score": 56208.99910702869 }, { "content": "#[inline]\n\npub fn license_version() -> &'static str {\n\n identifiers::VERSION\n\n}\n", "file_path": "src/lib.rs", "rank": 1, "score": 55637.86528433059 }, { "content": "#[inline]\n\npub fn exception_id(name: &str) -> Option<ExceptionId> {\n\n identifiers::EXCEPTIONS\n\n .binary_search_by(|exc| exc.0.cmp(name))\n\n .map(|index| {\n\n let (name, flags) = identifiers::EXCEPTIONS[index];\n\n ExceptionId { name, index, flags }\n\n })\n\n .ok()\n\n}\n\n\n\n/// Returns the version number of the SPDX list from which\n\n/// the license and exception identifiers are sourced from\n", "file_path": "src/lib.rs", "rank": 2, "score": 40652.32392678196 }, { "content": "use crate::{\n\n error::{ParseError, Reason},\n\n ExceptionId, LicenseId,\n\n};\n\nuse lazy_static::lazy_static;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum Token<'a> {\n\n SPDX(LicenseId),\n\n LicenseRef {\n\n doc_ref: Option<&'a str>,\n\n lic_ref: &'a str,\n\n },\n\n Exception(ExceptionId),\n\n Plus,\n\n OpenParen,\n\n CloseParen,\n\n With,\n\n And,\n\n Or,\n", "file_path": "src/lexer.rs", "rank": 3, "score": 27648.649108929934 }, { "content": " inner: text,\n\n original: text,\n\n offset: 0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LexerToken<'a> {\n\n /// The token that was lexed\n\n pub token: Token<'a>,\n\n /// The range of the token characters in the original license expression\n\n pub span: std::ops::Range<usize>,\n\n}\n\n\n\nimpl<'a> Iterator for Lexer<'a> {\n\n type Item = Result<LexerToken<'a>, ParseError<'a>>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n lazy_static! {\n", "file_path": "src/lexer.rs", "rank": 4, "score": 27648.383170124256 }, { "content": " \"DocumentRef-\".len() + d.len() + 1\n\n }) + \"LicenseRef-\".len()\n\n + lic_ref.len()\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Allows iteration through a license expression, yielding\n\n/// a token or a parser error\n\npub struct Lexer<'a> {\n\n inner: &'a str,\n\n original: &'a str,\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> Lexer<'a> {\n\n /// Creates a Lexer over a license expression\n\n pub fn new(text: &'a str) -> Self {\n\n Self {\n", "file_path": "src/lexer.rs", "rank": 5, "score": 27645.992622116268 }, { "content": "use spdx::{Lexer, Token};\n\n\n\nmacro_rules! test_lex {\n\n ($text:expr, [$($token:expr),+$(,)?]) => {\n\n let lexed: Vec<_> = Lexer::new($text).map(|r| r.map(|lt| lt.token).unwrap()).collect();\n\n let expected = {\n\n let mut v = Vec::new();\n\n $(\n\n v.push($token);\n\n )+\n\n v\n\n };\n\n\n\n assert_eq!(lexed, expected);\n\n }\n\n}\n\n\n\nmacro_rules! lic_tok {\n\n ($id:expr) => {\n\n Token::SPDX(spdx::license_id($id).unwrap())\n", "file_path": "tests/lexer.rs", "rank": 6, "score": 27642.37387669249 }, { "content": " original: self.original,\n\n span: self.offset..self.offset + m.end(),\n\n reason: Reason::UnknownTerm,\n\n }))\n\n }\n\n }\n\n },\n\n }\n\n .map(|res| {\n\n res.map(|tok| {\n\n let len = tok.len();\n\n let start = self.offset;\n\n self.inner = &self.inner[len..];\n\n self.offset += len;\n\n\n\n LexerToken {\n\n token: tok,\n\n span: start..self.offset,\n\n }\n\n })\n\n })\n\n }\n\n}\n", "file_path": "src/lexer.rs", "rank": 7, "score": 27642.17276906472 }, { "content": "}\n\n\n\nimpl<'a> std::fmt::Display for Token<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n std::fmt::Debug::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl<'a> Token<'a> {\n\n fn len(&self) -> usize {\n\n match self {\n\n Token::SPDX(id) => id.name.len(),\n\n Token::Exception(e) => e.name.len(),\n\n Token::With => 4,\n\n Token::And => 3,\n\n Token::Or => 2,\n\n Token::Plus | Token::OpenParen | Token::CloseParen => 1,\n\n Token::LicenseRef { doc_ref, lic_ref } => {\n\n doc_ref.map_or(0, |d| {\n\n // +1 is for the `:`\n", "file_path": "src/lexer.rs", "rank": 8, "score": 27640.933978181958 }, { "content": " Some('+') => Some(if non_whitespace_index != 0 {\n\n Err(ParseError {\n\n original: self.original,\n\n span: self.offset - non_whitespace_index..self.offset,\n\n reason: Reason::SeparatedPlus,\n\n })\n\n } else {\n\n Ok(Token::Plus)\n\n }),\n\n Some('(') => Some(Ok(Token::OpenParen)),\n\n Some(')') => Some(Ok(Token::CloseParen)),\n\n _ => match TEXTTOKEN.find(self.inner) {\n\n None => Some(Err(ParseError {\n\n original: self.original,\n\n span: self.offset..self.offset + self.inner.len(),\n\n reason: Reason::InvalidCharacters,\n\n })),\n\n Some(m) => {\n\n if m.as_str() == \"WITH\" {\n\n Some(Ok(Token::With))\n", "file_path": "src/lexer.rs", "rank": 9, "score": 27640.17899765212 }, { "content": " } else if m.as_str() == \"AND\" {\n\n Some(Ok(Token::And))\n\n } else if m.as_str() == \"OR\" {\n\n Some(Ok(Token::Or))\n\n } else if let Some(lic_id) = crate::license_id(&m.as_str()) {\n\n Some(Ok(Token::SPDX(lic_id)))\n\n } else if let Some(exc_id) = crate::exception_id(&m.as_str()) {\n\n Some(Ok(Token::Exception(exc_id)))\n\n } else if let Some(c) = DOCREFLICREF.captures(m.as_str()) {\n\n Some(Ok(Token::LicenseRef {\n\n doc_ref: Some(c.get(1).unwrap().as_str()),\n\n lic_ref: c.get(2).unwrap().as_str(),\n\n }))\n\n } else if let Some(c) = LICREF.captures(m.as_str()) {\n\n Some(Ok(Token::LicenseRef {\n\n doc_ref: None,\n\n lic_ref: c.get(1).unwrap().as_str(),\n\n }))\n\n } else {\n\n Some(Err(ParseError {\n", "file_path": "src/lexer.rs", "rank": 10, "score": 27639.56395438671 }, { "content": " static ref TEXTTOKEN: regex::Regex = regex::Regex::new(r\"^[-a-zA-Z0-9.:]+\").unwrap();\n\n static ref DOCREFLICREF: regex::Regex =\n\n regex::Regex::new(r\"^DocumentRef-([-a-zA-Z0-9.]+):LicenseRef-([-a-zA-Z0-9.]+)\")\n\n .unwrap();\n\n static ref LICREF: regex::Regex =\n\n regex::Regex::new(r\"^LicenseRef-([-a-zA-Z0-9.]+)\").unwrap();\n\n }\n\n\n\n // Jump over any whitespace, updating `self.inner` and `self.offset` appropriately\n\n let non_whitespace_index = match self.inner.find(|c: char| !c.is_whitespace()) {\n\n Some(idx) => idx,\n\n None => self.inner.len(),\n\n };\n\n self.inner = &self.inner[non_whitespace_index..];\n\n self.offset += non_whitespace_index;\n\n\n\n match self.inner.chars().next() {\n\n None => None,\n\n // From SPDX 2.1 spec\n\n // There MUST NOT be whitespace between a license-id and any following \"+\".\n", "file_path": "src/lexer.rs", "rank": 11, "score": 27636.80701370178 }, { "content": " };\n\n}\n\n\n\nmacro_rules! exc_tok {\n\n ($id:expr) => {\n\n Token::Exception(spdx::exception_id($id).unwrap())\n\n };\n\n}\n\n\n\n#[test]\n", "file_path": "tests/lexer.rs", "rank": 12, "score": 27633.88230706857 }, { "content": "#[test]\n\nfn lexes_or() {\n\n let s = \"Apache-2.0 OR MIT\";\n\n\n\n test_lex!(s, [lic_tok!(\"Apache-2.0\"), Token::Or, lic_tok!(\"MIT\"),]);\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 13, "score": 25864.93992796228 }, { "content": "#[test]\n\nfn lexes_and() {\n\n let s = \"BSD-3-Clause AND Zlib\";\n\n\n\n test_lex!(s, [lic_tok!(\"BSD-3-Clause\"), Token::And, lic_tok!(\"Zlib\"),]);\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 14, "score": 25864.93992796228 }, { "content": "fn download<F>(uri: &str, mut action: F, debug: bool) -> Result<()>\n\nwhere\n\n F: FnMut(Map) -> Result<()>,\n\n{\n\n let json: Value = reqwest::get(uri)?.json()?;\n\n let json = if let Value::Object(m) = json {\n\n m\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", json)\n\n };\n\n\n\n if debug {\n\n writeln!(io::stderr(), \"#json == {}\", json.len())?;\n\n writeln!(\n\n io::stderr(),\n\n \"License list version {}\",\n\n get(&json, \"licenseListVersion\")?\n\n )?;\n\n }\n\n\n\n action(json)\n\n}\n\n\n", "file_path": "update/src/main.rs", "rank": 15, "score": 24459.640541616216 }, { "content": "#[test]\n\nfn lexes_single() {\n\n let s = \"0BSD\";\n\n\n\n test_lex!(s, [lic_tok!(s)]);\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 16, "score": 24310.998003636814 }, { "content": "#[test]\n\nfn lexes_complex() {\n\n let complex = \"(Apache-2.0 WITH LLVM-exception) OR Apache-2.0 OR MIT\";\n\n\n\n test_lex!(\n\n complex,\n\n [\n\n Token::OpenParen,\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::With,\n\n exc_tok!(\"LLVM-exception\"),\n\n Token::CloseParen,\n\n Token::Or,\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::Or,\n\n lic_tok!(\"MIT\"),\n\n ]\n\n );\n\n}\n", "file_path": "tests/lexer.rs", "rank": 17, "score": 24310.998003636814 }, { "content": "#[test]\n\nfn lexes_exception() {\n\n let s = \"Apache-2.0 WITH LLVM-exception\";\n\n\n\n test_lex!(\n\n s,\n\n [\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::With,\n\n exc_tok!(\"LLVM-exception\"),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 18, "score": 24310.998003636814 }, { "content": "#[test]\n\nfn fails_with_slash() {\n\n let mut lexer = Lexer::new(\"MIT/Apache-2.0\");\n\n assert_eq!(lexer.next().unwrap().unwrap().token, lic_tok!(\"MIT\"));\n\n assert_eq!(\n\n lexer.next().unwrap().unwrap_err(),\n\n spdx::ParseError {\n\n original: \"MIT/Apache-2.0\",\n\n span: 3..14,\n\n reason: spdx::error::Reason::InvalidCharacters,\n\n }\n\n );\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 19, "score": 24310.998003636814 }, { "content": "#[test]\n\nfn lexes_all_the_things() {\n\n let text = \"MIT+ OR () Apache-2.0 WITH AND LicenseRef-World Classpath-exception-2.0 DocumentRef-Test:LicenseRef-Hello\";\n\n\n\n test_lex!(\n\n text,\n\n [\n\n lic_tok!(\"MIT\"),\n\n Token::Plus,\n\n Token::Or,\n\n Token::OpenParen,\n\n Token::CloseParen,\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::With,\n\n Token::And,\n\n Token::LicenseRef {\n\n doc_ref: None,\n\n lic_ref: \"World\",\n\n },\n\n exc_tok!(\"Classpath-exception-2.0\"),\n\n Token::LicenseRef {\n\n doc_ref: Some(\"Test\"),\n\n lic_ref: \"Hello\",\n\n },\n\n ]\n\n );\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 20, "score": 24310.998003636814 }, { "content": "#[test]\n\nfn lexes_exceptions_with_ors() {\n\n let s = \"Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT\";\n\n\n\n test_lex!(\n\n s,\n\n [\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::With,\n\n exc_tok!(\"LLVM-exception\"),\n\n Token::Or,\n\n lic_tok!(\"Apache-2.0\"),\n\n Token::Or,\n\n lic_tok!(\"MIT\"),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "tests/lexer.rs", "rank": 21, "score": 22933.192766225504 }, { "content": "use crate::LicenseReq;\n\nuse smallvec::SmallVec;\n\nuse std::fmt;\n\n\n\n/// A license requirement inside an SPDX license expression, including\n\n/// the span in the expression where it is located\n\n#[derive(Debug, Clone)]\n\npub struct ExpressionReq {\n\n pub req: LicenseReq,\n\n pub span: std::ops::Range<u32>,\n\n}\n\n\n\nimpl PartialEq for ExpressionReq {\n\n fn eq(&self, o: &Self) -> bool {\n\n self.req == o.req\n\n }\n\n}\n\n\n\n/// The joining operators supported by SPDX 2.1\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n", "file_path": "src/expression.rs", "rank": 23, "score": 19.251892820911355 }, { "content": "use std::{cmp, fmt};\n\n\n\npub mod error;\n\npub mod expression;\n\nmod identifiers;\n\nmod lexer;\n\nmod licensee;\n\npub mod parser;\n\n\n\npub use error::ParseError;\n\npub use expression::Expression;\n\npub use lexer::{Lexer, Token};\n\npub use licensee::Licensee;\n\n\n\n/// Unique identifier for a particular license\n\n#[derive(Copy, Clone, Eq, Ord)]\n\npub struct LicenseId {\n\n /// The short identifier for the exception\n\n pub name: &'static str,\n\n index: usize,\n", "file_path": "src/lib.rs", "rank": 25, "score": 17.050292589825073 }, { "content": "use std::{error::Error, fmt};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct ParseError<'a> {\n\n pub original: &'a str,\n\n pub span: std::ops::Range<usize>,\n\n pub reason: Reason,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Reason {\n\n /// The specified license short-identifier was not\n\n /// found the SPDX list\n\n UnknownLicense,\n\n /// The specified exception short-identifier was not\n\n /// found the SPDX list\n\n UnknownException,\n\n /// The characters are not valid in an SDPX license expression\n\n InvalidCharacters,\n\n /// An opening parens was unmatched with a closing parens\n", "file_path": "src/error.rs", "rank": 26, "score": 14.935637792302622 }, { "content": "use crate::{\n\n error::{ParseError, Reason},\n\n expression::{ExprNode, Expression, ExpressionReq, Operator},\n\n lexer::{Lexer, Token},\n\n LicenseItem, LicenseReq,\n\n};\n\nuse smallvec::SmallVec;\n\n\n\nimpl Expression {\n\n /// Given a license expression, attempts to parse and validate it as a valid SPDX expression\n\n ///\n\n /// The validation can fail for many reasons:\n\n /// * The expression contains invalid characters\n\n /// * An unknown/invalid license or exception identifier was found. Only\n\n /// [SPDX short identifiers](https://spdx.org/ids) are allowed\n\n /// * The expression contained unbalanced parentheses\n\n /// * A license or exception immediately follows another license or exception, without\n\n /// a valid AND, OR, or WITH operator separating them\n\n /// * An AND, OR, or WITH doesn't have a license or `)` preceding it\n\n pub fn parse(original: &str) -> Result<Self, ParseError> {\n", "file_path": "src/parser.rs", "rank": 27, "score": 13.98912947890356 }, { "content": " write!(f, \"{}\", self.name)\n\n }\n\n}\n\n\n\n/// Represents a single license requirement, which must include a valid\n\n/// LicenseItem, and may allow current a future versions of the license,\n\n/// and may also allow for a specific exception\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct LicenseReq {\n\n /// The license\n\n pub license: LicenseItem,\n\n /// The exception allowed for this license, as specified following\n\n /// the `WITH` operator\n\n pub exception: Option<ExceptionId>,\n\n}\n\n\n\nimpl fmt::Display for LicenseReq {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n self.license.fmt(f)?;\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 13.844926486663594 }, { "content": " flags: u8,\n\n}\n\n\n\nimpl PartialEq for LicenseId {\n\n #[inline]\n\n fn eq(&self, o: &LicenseId) -> bool {\n\n self.index == o.index\n\n }\n\n}\n\n\n\nimpl PartialOrd for LicenseId {\n\n #[inline]\n\n fn partial_cmp(&self, o: &LicenseId) -> Option<cmp::Ordering> {\n\n self.index.partial_cmp(&o.index)\n\n }\n\n}\n\n\n\npub const IS_FSF_LIBRE: u8 = 0x1;\n\npub const IS_OSI_APPROVED: u8 = 0x2;\n\npub const IS_DEPRECATED: u8 = 0x4;\n", "file_path": "src/lib.rs", "rank": 29, "score": 13.261083360072945 }, { "content": "\n\nimpl fmt::Debug for LicenseId {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.name)\n\n }\n\n}\n\n\n\n/// Unique identifier for a particular exception\n\n#[derive(Copy, Clone, Eq, Ord)]\n\npub struct ExceptionId {\n\n /// The short identifier for the exception\n\n pub name: &'static str,\n\n index: usize,\n\n flags: u8,\n\n}\n\n\n\nimpl PartialEq for ExceptionId {\n\n #[inline]\n\n fn eq(&self, o: &ExceptionId) -> bool {\n\n self.index == o.index\n", "file_path": "src/lib.rs", "rank": 30, "score": 13.178975429775143 }, { "content": "pub enum Operator {\n\n And,\n\n Or,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub(crate) enum ExprNode {\n\n Op(Operator),\n\n Req(ExpressionReq),\n\n}\n\n\n\n/// An SPDX license expression that is both syntactically\n\n/// and semantically valid, and can be evaluated\n\n#[derive(Clone)]\n\npub struct Expression {\n\n pub(crate) expr: SmallVec<[ExprNode; 5]>,\n\n // We keep the original string around for display purposes only\n\n pub(crate) original: String,\n\n}\n\n\n", "file_path": "src/expression.rs", "rank": 32, "score": 11.977394363591813 }, { "content": " let lexer = Lexer::new(original);\n\n\n\n // Operator precedence in SPDX 2.1\n\n // +\n\n // WITH\n\n // AND\n\n // OR\n\n #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]\n\n enum Op {\n\n //Plus,\n\n //With,\n\n And,\n\n Or,\n\n Open,\n\n }\n\n\n\n struct OpAndSpan {\n\n op: Op,\n\n span: std::ops::Range<usize>,\n\n }\n", "file_path": "src/parser.rs", "rank": 33, "score": 11.35493928708014 }, { "content": " None | Some(Token::And) | Some(Token::Or) | Some(Token::OpenParen) => {\n\n &[\"<license>\", \"(\"]\n\n }\n\n Some(Token::CloseParen) => &[\"AND\", \"OR\"],\n\n Some(Token::Exception(_)) => &[\"AND\", \"OR\", \")\"],\n\n Some(Token::SPDX(_)) => &[\"AND\", \"OR\", \"WITH\", \")\", \"+\"],\n\n Some(Token::LicenseRef { .. }) | Some(Token::Plus) => &[\"AND\", \"OR\", \"WITH\", \")\"],\n\n Some(Token::With) => &[\"<exception>\"],\n\n };\n\n\n\n Err(ParseError {\n\n original,\n\n span,\n\n reason: Reason::Unexpected(&expected),\n\n })\n\n };\n\n\n\n // Basic implementation of the https://en.wikipedia.org/wiki/Shunting-yard_algorithm\n\n 'outer: for tok in lexer {\n\n let lt = tok?;\n", "file_path": "src/parser.rs", "rank": 34, "score": 10.918716882100975 }, { "content": " if let Some(ref exe) = self.exception {\n\n write!(f, \" WITH {}\", exe.name)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A single license term in a license expression, according to the SPDX spec.\n\n/// This can be either an SPDX license, which is mapped to a LicenseId from\n\n/// a valid SPDX short identifier, or else a document AND/OR license ref\n\n#[derive(Debug, Clone, Eq, Ord)]\n\npub enum LicenseItem {\n\n /// A regular SPDX license id\n\n SPDX {\n\n id: LicenseId,\n\n /// Indicates the license had a `+`, allowing the licensee to license\n\n /// the software under either the specific version, or any later versions\n\n or_later: bool,\n\n },\n\n Other {\n", "file_path": "src/lib.rs", "rank": 35, "score": 9.942657808353793 }, { "content": " // Validate that the terminating token is valid\n\n match last_token {\n\n Some(Token::SPDX(_))\n\n | Some(Token::LicenseRef { .. })\n\n | Some(Token::Exception(_))\n\n | Some(Token::CloseParen)\n\n | Some(Token::Plus) => {}\n\n // We have to have at least one valid license requirement\n\n None => {\n\n return Err(ParseError {\n\n original,\n\n span: 0..original.len(),\n\n reason: Reason::Empty,\n\n });\n\n }\n\n _ => return make_err_for_token(last_token, original.len()..original.len()),\n\n }\n\n\n\n while let Some(top) = op_stack.pop() {\n\n match top.op {\n", "file_path": "src/parser.rs", "rank": 39, "score": 8.846891642754173 }, { "content": " }\n\n}\n\n\n\nimpl PartialOrd for ExceptionId {\n\n #[inline]\n\n fn partial_cmp(&self, o: &ExceptionId) -> Option<cmp::Ordering> {\n\n self.index.partial_cmp(&o.index)\n\n }\n\n}\n\n\n\nimpl ExceptionId {\n\n /// Returns true if the exception is deprecated\n\n #[inline]\n\n pub fn is_deprecated(self) -> bool {\n\n self.flags & IS_DEPRECATED != 0\n\n }\n\n}\n\n\n\nimpl fmt::Debug for ExceptionId {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/lib.rs", "rank": 40, "score": 8.804917823867147 }, { "content": "impl Expression {\n\n /// Returns each of the license requirements in the license expression,\n\n /// but not the operators that join them together\n\n pub fn requirements(&self) -> impl Iterator<Item = &ExpressionReq> {\n\n self.expr.iter().filter_map(|item| match item {\n\n ExprNode::Req(req) => Some(req),\n\n _ => None,\n\n })\n\n }\n\n\n\n /// Evaluates the expression, using the provided function\n\n /// to determine if the licensee meets the requirements\n\n /// for each license term. If enough requirements are\n\n /// satisfied the evaluation will return true.\n\n pub fn evaluate<AF: FnMut(&LicenseReq) -> bool>(&self, mut allow_func: AF) -> bool {\n\n let mut result_stack = SmallVec::<[bool; 8]>::new();\n\n\n\n // We store the expression as postfix, so just evaluate each license\n\n // requirement in the order it comes, and then combining the previous\n\n // results according to each operator as it comes\n", "file_path": "src/expression.rs", "rank": 41, "score": 8.062018165203906 }, { "content": "\n\nimpl LicenseId {\n\n /// Returns true if the license is [considered free by the FSF](https://www.gnu.org/licenses/license-list.en.html)\n\n #[inline]\n\n pub fn is_fsf_free_libre(self) -> bool {\n\n self.flags & IS_FSF_LIBRE != 0\n\n }\n\n\n\n /// Returns true if the license is [OSI approved](https://opensource.org/licenses)\n\n #[inline]\n\n pub fn is_osi_approved(self) -> bool {\n\n self.flags & IS_OSI_APPROVED != 0\n\n }\n\n\n\n /// Returns true if the license is deprecated\n\n #[inline]\n\n pub fn is_deprecated(self) -> bool {\n\n self.flags & IS_DEPRECATED != 0\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 42, "score": 8.027354245739161 }, { "content": " match &lt.token {\n\n Token::SPDX(id) => match last_token {\n\n None | Some(Token::And) | Some(Token::Or) | Some(Token::OpenParen) => {\n\n expr_queue.push(ExprNode::Req(ExpressionReq {\n\n req: LicenseReq {\n\n license: LicenseItem::SPDX {\n\n id: *id,\n\n or_later: false,\n\n },\n\n exception: None,\n\n },\n\n span: lt.span.start as u32..lt.span.end as u32,\n\n }));\n\n }\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::LicenseRef { doc_ref, lic_ref } => match last_token {\n\n None | Some(Token::And) | Some(Token::Or) | Some(Token::OpenParen) => {\n\n expr_queue.push(ExprNode::Req(ExpressionReq {\n\n req: LicenseReq {\n", "file_path": "src/parser.rs", "rank": 46, "score": 6.950432912061272 }, { "content": " reason: Reason::UnopenedParens,\n\n });\n\n }\n\n _ => return make_err_for_token(last_token, lt.span),\n\n }\n\n }\n\n Token::Exception(exc) => match last_token {\n\n Some(Token::With) => match expr_queue.last_mut() {\n\n Some(ExprNode::Req(lic)) => {\n\n lic.req.exception = Some(*exc);\n\n }\n\n _ => unreachable!(),\n\n },\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n }\n\n\n\n last_token = Some(lt.token);\n\n }\n\n\n", "file_path": "src/parser.rs", "rank": 47, "score": 6.828687725963206 }, { "content": " }\n\n v.sort_by_key(|v| v.0);\n\n\n\n let lic_list_ver = get(&json, \"licenseListVersion\")?;\n\n if let Value::String(ref s) = lic_list_ver {\n\n writeln!(identifiers, \"pub const VERSION: &str = {:?};\", s)?;\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", lic_list_ver)\n\n }\n\n writeln!(identifiers)?;\n\n writeln!(identifiers, \"pub const LICENSES: &[(&str, u8)] = &[\")?;\n\n for (lic, flags) in v.iter() {\n\n writeln!(identifiers, \" (\\\"{}\\\", {}),\", lic, flags)?;\n\n }\n\n writeln!(identifiers, \"];\")?;\n\n\n\n Ok(())\n\n },\n\n debug,\n\n )?;\n", "file_path": "update/src/main.rs", "rank": 48, "score": 6.753359968957545 }, { "content": " }) => {\n\n *or_later = true;\n\n }\n\n _ => unreachable!(),\n\n },\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::With => match last_token {\n\n Some(Token::SPDX(_)) | Some(Token::LicenseRef { .. }) | Some(Token::Plus) => {}\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::Or | Token::And => match last_token {\n\n Some(Token::SPDX(_))\n\n | Some(Token::LicenseRef { .. })\n\n | Some(Token::CloseParen)\n\n | Some(Token::Exception(_))\n\n | Some(Token::Plus) => {\n\n let new_op = match lt.token {\n\n Token::Or => Op::Or,\n\n Token::And => Op::And,\n", "file_path": "src/parser.rs", "rank": 50, "score": 6.715109117567015 }, { "content": " license: LicenseItem::Other {\n\n doc_ref: doc_ref.map(String::from),\n\n lic_ref: String::from(*lic_ref),\n\n },\n\n exception: None,\n\n },\n\n span: lt.span.start as u32..lt.span.end as u32,\n\n }));\n\n }\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::Plus => match last_token {\n\n Some(Token::SPDX(_)) => match expr_queue.last_mut().unwrap() {\n\n ExprNode::Req(ExpressionReq {\n\n req:\n\n LicenseReq {\n\n license: LicenseItem::SPDX { or_later, .. },\n\n ..\n\n },\n\n ..\n", "file_path": "src/parser.rs", "rank": 51, "score": 6.685843221703347 }, { "content": "}\n\n\n\nimpl PartialEq for Expression {\n\n fn eq(&self, o: &Self) -> bool {\n\n // The expressions can be semantically the same but not\n\n // syntactically the same, if the user wants to compare\n\n // the raw expressions they can just do a string compare\n\n if self.expr.len() != o.expr.len() {\n\n return false;\n\n }\n\n\n\n !self.expr.iter().zip(o.expr.iter()).any(|(a, b)| a != b)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Expression;\n\n\n\n #[test]\n", "file_path": "src/expression.rs", "rank": 52, "score": 6.651773977344346 }, { "content": " lic_ref: bl,\n\n },\n\n ) => match ad.cmp(bd) {\n\n cmp::Ordering::Equal => al.partial_cmp(bl),\n\n o => Some(o),\n\n },\n\n (Self::SPDX { .. }, Self::Other { .. }) => Some(cmp::Ordering::Less),\n\n (Self::Other { .. }, Self::SPDX { .. }) => Some(cmp::Ordering::Greater),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for LicenseItem {\n\n fn eq(&self, o: &Self) -> bool {\n\n if let Some(cmp::Ordering::Equal) = self.partial_cmp(o) {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 53, "score": 6.304453303583661 }, { "content": "use spdx::{exception_id, license_id};\n\n\n\n#[test]\n", "file_path": "tests/spdx_list.rs", "rank": 54, "score": 6.175216804239588 }, { "content": " \"master\".to_owned()\n\n }\n\n Some(ut) => {\n\n if debug {\n\n eprintln!(\"Using tag {:?}\", ut);\n\n }\n\n ut\n\n }\n\n };\n\n\n\n let mut identifiers = std::fs::File::create(\"src/identifiers.rs\")?;\n\n\n\n writeln!(\n\n identifiers,\n\n \"\\\n\n/*\n\n * list fetched from https://github.com/spdx/license-list-data @ {}\n\n *\n\n * AUTO-GENERATED BY ./update\n\n * DO NOT MODIFY\n", "file_path": "update/src/main.rs", "rank": 56, "score": 6.078042205445854 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.2.0] - 2019-10-03\n\n### Added\n\n- Added a `Expression` which can parse and validate an SPDX license expression is\n\nboth syntactically and semantically correct, as well as evaluate the expression via\n\na user provided callback\n\n- Added an update exe for pulling new SDPX information, copied from https://github.com/rust-lang-nursery/license-exprs\n\n- Added support for some of the metadata available from the SPDX format, namely \"IsDeprecated\", \"IsFSFLibre\", and \"IsOSIApproved\"\n\n\n\n### Changed\n\n- Uhm...everything. I hope no one was using 0.1.0.\n\n- Use a better lexer, mostly copied from https://github.com/rust-lang-nursery/license-exprs/pull/29\n\n\n\n## [0.1.0] - 2019-09-02\n\n### Added\n\n- Initial add of spdx crate, based primarly on [`license-exprs`](https://github.com/rust-lang-nursery/license-exprs)\n\n\n\n[Unreleased]: https://github.com/EmbarkStudios/spdx/compare/0.2.0...HEAD\n\n[0.2.0]: https://github.com/EmbarkStudios/cargo-deny/compare/0.1.0...0.2.0\n\n[0.1.0]: https://github.com/EmbarkStudios/spdx/releases/tag/0.1.0\n", "file_path": "CHANGELOG.md", "rank": 57, "score": 5.598515003514273 }, { "content": " }\n\n }\n\n }\n\n\n\n if let Some(false) = result_stack.pop() {\n\n Err(failures)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nimpl AsRef<str> for Expression {\n\n fn as_ref(&self) -> &str {\n\n &self.original\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Expression {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/expression.rs", "rank": 58, "score": 5.545294194793862 }, { "content": "/*\n\n * list fetched from https://github.com/spdx/license-list-data @ v3.6\n\n *\n\n * AUTO-GENERATED BY ./update\n\n * DO NOT MODIFY\n\n *\n\n * cargo run --manifest-path update/Cargo.toml -- v<version> > src/identifiers.rs\n\n */\n\n\n\npub const VERSION: &str = \"3.6\";\n\n\n\npub const LICENSES: &[(&str, u8)] = &[\n\n (\"0BSD\", 2),\n\n (\"AAL\", 2),\n\n (\"ADSL\", 0),\n\n (\"AFL-1.1\", 3),\n\n (\"AFL-1.2\", 3),\n\n (\"AFL-2.0\", 3),\n\n (\"AFL-2.1\", 3),\n\n (\"AFL-3.0\", 3),\n", "file_path": "src/identifiers.rs", "rank": 59, "score": 5.251325664707851 }, { "content": " | Some(Token::LicenseRef { .. })\n\n | Some(Token::Plus)\n\n | Some(Token::Exception(_))\n\n | Some(Token::CloseParen) => {\n\n while let Some(top) = op_stack.pop() {\n\n match top.op {\n\n Op::And | Op::Or => apply_op(top, &mut expr_queue)?,\n\n Op::Open => {\n\n // This is the only place we go back to the top of the outer loop,\n\n // so make sure we correctly record this token\n\n last_token = Some(Token::CloseParen);\n\n continue 'outer;\n\n }\n\n }\n\n }\n\n\n\n // We didn't have an opening parentheses if we get here\n\n return Err(ParseError {\n\n original,\n\n span: lt.span,\n", "file_path": "src/parser.rs", "rank": 60, "score": 5.2396140639531374 }, { "content": "# 🆔 spdx\n\n\n\n[![Build Status](https://github.com/EmbarkStudios/spdx/workflows/CI/badge.svg)](https://github.com/EmbarkStudios/spdx/actions?workflow=CI)\n\n[![Crates.io](https://img.shields.io/crates/v/spdx.svg)](https://crates.io/crates/spdx)\n\n[![Docs](https://docs.rs/spdx/badge.svg)](https://docs.rs/spdx)\n\n[![Contributor Covenant](https://img.shields.io/badge/contributor%20covenant-v1.4%20adopted-ff69b4.svg)](CODE_OF_CONDUCT.md)\n\n[![Embark](https://img.shields.io/badge/embark-open%20source-blueviolet.svg)](http://embark.rs)\n\n\n\nHelper crate for [SPDX](https://spdx.org/about) [license expressions](https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60).\n\n\n\n## Usage\n\n\n\n```rust\n\nuse spdx::Expression;\n\n\n\nfn main() {\n\n let this_is_fine = Expression::parse(\"MIT OR Apache-2.0\").unwrap();\n\n\n\n assert!(this_is_fine.evaluate(|req| {\n\n if let spdx::LicenseItem::SPDX { id, .. } = req.license {\n\n // Both MIT and Apache-2.0 are OSI approved, so this expression\n\n // evaluates to true\n\n return id.is_osi_approved();\n\n }\n\n\n\n false\n\n }));\n\n\n\n assert!(!this_is_fine.evaluate(|req| {\n\n if let spdx::LicenseItem::SPDX { id, .. } = req.license {\n\n // This is saying we don't accept any licenses that are OSI approved\n\n // so the expression will evaluate to false as both sides of the OR\n\n // are now rejected\n\n return !id.is_osi_approved();\n\n }\n\n\n\n false\n\n }));\n\n\n\n // `NOPE` is not a valid SPDX license identifier, so this expression\n\n // will fail to parse\n\n let _this_is_not = Expression::parse(\"MIT OR NOPE\").unwrap_err();\n\n}\n\n```\n\n\n\n## Updating SPDX list\n\n\n\nYou can update the list of SPDX identifiers for licenses and exceptions by running the update program `cargo run --manifest-path=update/Cargo.toml -- v3.6` where `v3.6` is the tag in the [SPDX data repo](https://github.com/spdx/license-list-data).\n\n\n\n\n", "file_path": "README.md", "rank": 61, "score": 5.224785389997933 }, { "content": " /// Purpose: Identify any external SPDX documents referenced within this SPDX document.\n\n /// https://spdx.org/spdx-specification-21-web-version#h.h430e9ypa0j9\n\n doc_ref: Option<String>,\n\n /// Purpose: Provide a locally unique identifier to refer to licenses that are not found on the SPDX License List.\n\n /// https://spdx.org/spdx-specification-21-web-version#h.4f1mdlm\n\n lic_ref: String,\n\n },\n\n}\n\n\n\nimpl PartialOrd for LicenseItem {\n\n fn partial_cmp(&self, o: &Self) -> Option<cmp::Ordering> {\n\n match (self, o) {\n\n (Self::SPDX { id: a, .. }, Self::SPDX { id: b, .. }) => a.partial_cmp(b),\n\n (\n\n Self::Other {\n\n doc_ref: ad,\n\n lic_ref: al,\n\n },\n\n Self::Other {\n\n doc_ref: bd,\n", "file_path": "src/lib.rs", "rank": 62, "score": 5.194574101086977 }, { "content": "}\n\n\n\nimpl fmt::Display for LicenseItem {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match self {\n\n LicenseItem::SPDX { id, or_later } => {\n\n id.name.fmt(f)?;\n\n\n\n if *or_later {\n\n f.write_str(\"+\")?;\n\n }\n\n\n\n Ok(())\n\n }\n\n LicenseItem::Other {\n\n doc_ref: Some(d),\n\n lic_ref: l,\n\n } => write!(f, \"DocumentRef-{}:LicenseRef-{}\", d, l),\n\n LicenseItem::Other {\n\n doc_ref: None,\n\n lic_ref: l,\n\n } => write!(f, \"LicenseRef-{}\", l),\n\n }\n\n }\n\n}\n\n\n\n/// Attempts to find a LicenseId for the string\n\n/// Note: any '+' at the end is trimmed\n\n#[inline]\n", "file_path": "src/lib.rs", "rank": 63, "score": 5.185247989751424 }, { "content": " };\n\n eprintln!(\"#licenses == {}\", licenses.len());\n\n\n\n let mut v = vec![];\n\n for lic in licenses.iter() {\n\n let lic = if let Value::Object(ref m) = *lic {\n\n m\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", lic)\n\n };\n\n if debug {\n\n eprintln!(\"{:?},{:?}\", get(&lic, \"licenseId\"), get(&lic, \"name\"));\n\n }\n\n\n\n let lic_id = get(&lic, \"licenseId\")?;\n\n if let Value::String(ref s) = lic_id {\n\n let mut flags = 0;\n\n\n\n if let Ok(Value::Bool(val)) = get(&lic, \"isDeprecatedLicenseId\") {\n\n if *val {\n", "file_path": "update/src/main.rs", "rank": 64, "score": 5.135024876842906 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Reason {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::UnknownLicense => f.write_str(\"unknown license id\"),\n\n Self::UnknownException => f.write_str(\"unknown exception id\"),\n\n Self::InvalidCharacters => f.write_str(\"invalid character(s)\"),\n\n Self::UnclosedParens => f.write_str(\"unclosed parens\"),\n\n Self::UnopenedParens => f.write_str(\"unopened parens\"),\n\n Self::Empty => f.write_str(\"empty expression\"),\n\n Self::Unexpected(expected) => {\n\n if expected.len() > 1 {\n\n f.write_str(\"expected one of \")?;\n\n\n\n for (i, exp) in expected.iter().enumerate() {\n\n f.write_fmt(format_args!(\"{}`{}`\", if i > 0 { \", \" } else { \"\" }, exp))?;\n\n }\n", "file_path": "src/error.rs", "rank": 66, "score": 4.680370318427006 }, { "content": " for exc in exceptions.iter() {\n\n let exc = if let Value::Object(ref m) = *exc {\n\n m\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", exc)\n\n };\n\n if debug {\n\n eprintln!(\n\n \"{:?},{:?}\",\n\n get(&exc, \"licenseExceptionId\"),\n\n get(&exc, \"name\")\n\n );\n\n }\n\n\n\n let lic_exc_id = get(&exc, \"licenseExceptionId\")?;\n\n if let Value::String(ref s) = lic_exc_id {\n\n let flags = match get(&exc, \"isDeprecatedLicenseId\") {\n\n Ok(Value::Bool(val)) => {\n\n if *val {\n\n IS_DEPRECATED\n", "file_path": "update/src/main.rs", "rank": 67, "score": 4.5051180292622615 }, { "content": " f.write_str(\" here\")\n\n } else if !expected.is_empty() {\n\n f.write_fmt(format_args!(\"expected a `{}` here\", expected[0]))\n\n } else {\n\n f.write_str(\"the term was not expected here\")\n\n }\n\n }\n\n Self::SeparatedPlus => f.write_str(\"`+` must not follow whitespace\"),\n\n Self::UnknownTerm => f.write_str(\"unknown term\"),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Error for ParseError<'a> {\n\n fn description(&self) -> &str {\n\n match *self {\n\n //ParseError::UnknownLicenseId(_) => \"unknown license or other term\",\n\n //ParseError::InvalidStructure(_) => \"invalid license expression\",\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 68, "score": 4.385562240396471 }, { "content": " Op::And | Op::Or => apply_op(top, &mut expr_queue)?,\n\n Op::Open => {\n\n return Err(ParseError {\n\n original,\n\n span: top.span,\n\n reason: Reason::UnclosedParens,\n\n });\n\n }\n\n }\n\n }\n\n\n\n // TODO: Investigate using https://github.com/oli-obk/quine-mc_cluskey to simplify\n\n // expressions, but not really critical. Just cool.\n\n\n\n Ok(Expression {\n\n original: original.to_owned(),\n\n expr: expr_queue,\n\n })\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 69, "score": 4.352036384538618 }, { "content": "\n\n op_stack.push(OpAndSpan {\n\n op: new_op,\n\n span: lt.span,\n\n });\n\n }\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::OpenParen => match last_token {\n\n None | Some(Token::And) | Some(Token::Or) | Some(Token::OpenParen) => {\n\n op_stack.push(OpAndSpan {\n\n op: Op::Open,\n\n span: lt.span,\n\n });\n\n }\n\n _ => return make_err_for_token(last_token, lt.span),\n\n },\n\n Token::CloseParen => {\n\n match last_token {\n\n Some(Token::SPDX(_))\n", "file_path": "src/parser.rs", "rank": 70, "score": 4.158478371878549 }, { "content": "#![allow(clippy::nonminimal_bool, clippy::eq_op, clippy::cognitive_complexity)]\n\n\n\nuse spdx::LicenseItem;\n\n\n\nmacro_rules! exact {\n\n ($req:expr, $e:expr) => {\n\n spdx::Licensee::parse($e).unwrap().satisfies($req)\n\n };\n\n}\n\n\n\nmacro_rules! check {\n\n ($le:expr => [$($logical_expr:expr => $is_allowed:expr),+$(,)?]) => {\n\n let validated = spdx::Expression::parse($le).unwrap();\n\n\n\n $(\n\n // Evaluate the logical expression to determine if we are\n\n // expecting an Ok or Err\n\n let expected = $logical_expr;\n\n\n\n match validated.evaluate_with_failures($is_allowed) {\n\n Ok(_) => assert!(expected, stringify!($logical_expr)),\n\n Err(f) => assert!(!expected, \"{} {:?}\", stringify!($logical_expr), f),\n\n }\n\n )+\n\n };\n\n}\n\n\n\n#[test]\n", "file_path": "tests/check.rs", "rank": 71, "score": 4.040351365066867 }, { "content": "\n\n result_stack.pop().unwrap()\n\n }\n\n\n\n /// Just as with evaluate, the license expression is evaluated to see if\n\n /// enough license requirements in the expresssion are met for the evaluation\n\n /// to succeed, except this method also keeps track of each failed requirement\n\n /// and returns them, allowing for more detailed error reporting about precisely\n\n /// what terms in the expression caused the overall failure\n\n pub fn evaluate_with_failures<AF: FnMut(&LicenseReq) -> bool>(\n\n &self,\n\n mut allow_func: AF,\n\n ) -> Result<(), Vec<&ExpressionReq>> {\n\n let mut result_stack = SmallVec::<[bool; 8]>::new();\n\n let mut failures = Vec::new();\n\n\n\n // We store the expression as postfix, so just evaluate each license\n\n // requirement in the order it comes, and then combining the previous\n\n // results according to each operator as it comes\n\n for node in self.expr.iter() {\n", "file_path": "src/expression.rs", "rank": 72, "score": 3.905938540413595 }, { "content": "use failure::bail;\n\nuse serde_json::{map, Value};\n\nuse std::{\n\n env,\n\n io::{self, Write},\n\n process,\n\n};\n\n\n", "file_path": "update/src/main.rs", "rank": 73, "score": 3.9033814831545897 }, { "content": " UnclosedParens,\n\n /// A closing parens was unmatched with an opening parens\n\n UnopenedParens,\n\n /// The expression does not contain any valid terms\n\n Empty,\n\n /// Found an unexpected term, which wasn't one of the\n\n /// expected terms that is listed\n\n Unexpected(&'static [&'static str]),\n\n /// A + was found after whitespace, which is not allowed\n\n /// by the SPDX spec\n\n SeparatedPlus,\n\n /// When lexing, a term was found that was\n\n /// 1. Not a license short-id\n\n /// 2. Not an exception short-id\n\n /// 3. Not a document/license ref\n\n /// 4. Not an AND, OR, or WITH\n\n UnknownTerm,\n\n}\n\n\n\nimpl<'a> fmt::Display for ParseError<'a> {\n", "file_path": "src/error.rs", "rank": 74, "score": 3.8973889360609753 }, { "content": "## Contributing\n\n\n\nWe welcome community contributions to this project.\n\n\n\nPlease read our [Contributor Guide](CONTRIBUTING.md) for more information on how to get started.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 75, "score": 3.856410119217993 }, { "content": " *\n\n * cargo run --manifest-path update/Cargo.toml -- v<version> > src/identifiers.rs\n\n */\n\n\",\n\n upstream_tag\n\n )?;\n\n\n\n let licenses_json_uri = format!(\n\n \"https://raw.githubusercontent.com/spdx/license-list-data/{}/json/licenses.json\",\n\n upstream_tag\n\n );\n\n\n\n download(\n\n &licenses_json_uri,\n\n |json| {\n\n let licenses = get(&json, \"licenses\")?;\n\n let licenses = if let Value::Array(ref v) = licenses {\n\n v\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", licenses)\n", "file_path": "update/src/main.rs", "rank": 76, "score": 3.765837062509775 }, { "content": "\n\n let mut op_stack = SmallVec::<[OpAndSpan; 3]>::new();\n\n let mut expr_queue = SmallVec::<[ExprNode; 5]>::new();\n\n\n\n // Keep track of the last token to simplify validation of the token stream\n\n let mut last_token: Option<Token<'_>> = None;\n\n\n\n let apply_op = |op: OpAndSpan, q: &mut SmallVec<[ExprNode; 5]>| {\n\n let op = match op.op {\n\n Op::And => Operator::And,\n\n Op::Or => Operator::Or,\n\n _ => unreachable!(),\n\n };\n\n\n\n q.push(ExprNode::Op(op));\n\n Ok(())\n\n };\n\n\n\n let make_err_for_token = |last_token: Option<Token<'_>>, span: std::ops::Range<usize>| {\n\n let expected: &[&str] = match last_token {\n", "file_path": "src/parser.rs", "rank": 77, "score": 3.679317200551488 }, { "content": " fn eq() {\n\n let normal = Expression::parse(\"MIT OR Apache-2.0\").unwrap();\n\n let extra_parens = Expression::parse(\"(MIT OR (Apache-2.0))\").unwrap();\n\n let llvm_exc = Expression::parse(\"MIT OR Apache-2.0 WITH LLVM-exception\").unwrap();\n\n\n\n assert_eq!(normal, normal);\n\n assert_eq!(extra_parens, extra_parens);\n\n assert_eq!(llvm_exc, llvm_exc);\n\n\n\n assert_eq!(normal, extra_parens);\n\n\n\n assert_ne!(normal, llvm_exc);\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 78, "score": 3.551079256184675 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(self.original)?;\n\n f.write_str(\"\\n\")?;\n\n\n\n for _ in 0..self.span.start {\n\n f.write_str(\" \")?;\n\n }\n\n\n\n // Mismatched parens have a slightly different output\n\n // than the other errors\n\n match &self.reason {\n\n Reason::UnclosedParens => f.write_fmt(format_args!(\"- {}\", Reason::UnclosedParens)),\n\n Reason::UnopenedParens => f.write_fmt(format_args!(\"^ {}\", Reason::UnopenedParens)),\n\n other => {\n\n for _ in self.span.start..self.span.end {\n\n f.write_str(\"^\")?;\n\n }\n\n\n\n f.write_fmt(format_args!(\" {}\", other))\n\n }\n", "file_path": "src/error.rs", "rank": 79, "score": 3.5076748454736095 }, { "content": " difference::Changeset::new(expected_str, &actual_str, \" \")\n\n );\n\n }\n\n )+\n\n\n\n if let Some((_, additional)) = reqs.next() {\n\n assert!(false, \"found additional requirement {}\", additional.req);\n\n }\n\n )+\n\n };\n\n}\n\n\n\nmacro_rules! err {\n\n ($text:expr => $reason:ident @ $range:expr) => {\n\n let act_err = spdx::Expression::parse($text).unwrap_err();\n\n\n\n let expected = ParseError {\n\n original: $text,\n\n span: $range,\n\n reason: spdx::error::Reason::$reason,\n", "file_path": "tests/validation.rs", "rank": 80, "score": 3.3857644220028194 }, { "content": " Ok(())\n\n },\n\n debug,\n\n )?;\n\n\n\n drop(identifiers);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "update/src/main.rs", "rank": 81, "score": 3.355611369812136 }, { "content": " };\n\n\n\n if act_err != expected {\n\n let act_text = format!(\"{:?}\", act_err);\n\n let exp_text = format!(\"{:?}\", expected);\n\n assert!(\n\n false,\n\n \"{}\",\n\n difference::Changeset::new(&exp_text, &act_text, \"\")\n\n );\n\n }\n\n };\n\n\n\n ($text:expr => $unexpected:expr; $range:expr) => {\n\n let act_err = spdx::Expression::parse($text).unwrap_err();\n\n\n\n let expected = ParseError {\n\n original: $text,\n\n span: $range,\n\n reason: spdx::error::Reason::Unexpected($unexpected),\n", "file_path": "tests/validation.rs", "rank": 84, "score": 2.5480617104554195 }, { "content": " (\"Aladdin\", 0),\n\n (\"Apache-1.0\", 1),\n\n (\"Apache-1.1\", 3),\n\n (\"Apache-2.0\", 3),\n\n (\"Artistic-1.0\", 2),\n\n (\"Artistic-1.0-Perl\", 2),\n\n (\"Artistic-1.0-cl8\", 2),\n\n (\"Artistic-2.0\", 3),\n\n (\"BSD-1-Clause\", 0),\n\n (\"BSD-2-Clause\", 2),\n\n (\"BSD-2-Clause-FreeBSD\", 1),\n\n (\"BSD-2-Clause-NetBSD\", 0),\n\n (\"BSD-2-Clause-Patent\", 2),\n\n (\"BSD-3-Clause\", 3),\n\n (\"BSD-3-Clause-Attribution\", 0),\n\n (\"BSD-3-Clause-Clear\", 1),\n\n (\"BSD-3-Clause-LBNL\", 2),\n\n (\"BSD-3-Clause-No-Nuclear-License\", 0),\n\n (\"BSD-3-Clause-No-Nuclear-License-2014\", 0),\n\n (\"BSD-3-Clause-No-Nuclear-Warranty\", 0),\n", "file_path": "src/identifiers.rs", "rank": 85, "score": 2.365268267804897 }, { "content": " } else {\n\n 0\n\n }\n\n }\n\n _ => 0,\n\n };\n\n\n\n v.push((s, flags));\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", lic_exc_id)\n\n };\n\n }\n\n\n\n writeln!(identifiers, \"pub const EXCEPTIONS: &[(&str, u8)] = &[\")?;\n\n v.sort_by_key(|v| v.0);\n\n for (exc, flags) in v.iter() {\n\n writeln!(identifiers, \" (\\\"{}\\\", {}),\", exc, flags)?;\n\n }\n\n writeln!(identifiers, \"];\")?;\n\n\n", "file_path": "update/src/main.rs", "rank": 86, "score": 2.3161667018807104 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 87, "score": 2.0970676866396447 }, { "content": " (\"libtiff\", 0),\n\n (\"mpich2\", 0),\n\n (\"psfrag\", 0),\n\n (\"psutils\", 0),\n\n (\"wxWindows\", 4),\n\n (\"xinetd\", 1),\n\n (\"xpp\", 0),\n\n (\"zlib-acknowledgement\", 0),\n\n];\n\n\n\npub const EXCEPTIONS: &[(&str, u8)] = &[\n\n (\"389-exception\", 0),\n\n (\"Autoconf-exception-2.0\", 0),\n\n (\"Autoconf-exception-3.0\", 0),\n\n (\"Bison-exception-2.2\", 0),\n\n (\"Bootloader-exception\", 0),\n\n (\"CLISP-exception-2.0\", 0),\n\n (\"Classpath-exception-2.0\", 0),\n\n (\"DigiRule-FOSS-exception\", 0),\n\n (\"FLTK-exception\", 0),\n", "file_path": "src/identifiers.rs", "rank": 88, "score": 2.0647929620968615 }, { "content": "### Making Changes\n\n\n\n1. Fork the repository.\n\n\n\n2. Create a new feature branch.\n\n\n\n3. Make your changes. Ensure that there are no build errors by running the project with your changes locally.\n\n\n\n4. Open a pull request with a name and description of what you did. You can read more about working with pull requests on GitHub [here](https://help.github.com/en/articles/creating-a-pull-request-from-a-fork).\n\n\n\n5. A maintainer will review your pull request and may ask you to make changes.\n\n\n\n## Code Guidelines\n\n\n\nYou can read about our standards and recommendations for working with Rust [here](https://github.com/EmbarkStudios/rust-ecosystem/blob/master/guidelines.md).\n\n\n\n## Licensing\n\n\n\nUnless otherwise specified, all Embark open source projects are licensed under a dual MIT OR Apache-2.0 license, allowing licensees to chose either at their option. You can read more in each project's respective README.\n\n\n\n## Code of Conduct\n\n\n\nPlease note that our projects are released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md) to ensure that they are welcoming places for everyone to contribute. By participating in any Embark open source project, you agree to abide by these terms.\n", "file_path": "CONTRIBUTING.md", "rank": 90, "score": 1.950352133960504 }, { "content": " for (i, node) in self.expr.iter().enumerate() {\n\n if i > 0 {\n\n f.write_str(\" \")?;\n\n }\n\n\n\n match node {\n\n ExprNode::Req(req) => write!(f, \"{}\", req.req)?,\n\n ExprNode::Op(Operator::And) => f.write_str(\"AND\")?,\n\n ExprNode::Op(Operator::Or) => f.write_str(\"OR\")?,\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Expression {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(&self.original)\n\n }\n", "file_path": "src/expression.rs", "rank": 91, "score": 1.865602319768409 }, { "content": "# Embark Contributor Guidelines\n\n\n\nWelcome! This project is created by the team at [Embark Studios](https://embark.games). We're glad you're interested in contributing! We welcome contributions from people of all backgrounds who are interested in making great software with us.\n\n\n\nAt Embark, we aspire to empower everyone to create interactive experiences. To do this, we're exploring and pushing the boundaries of new technologies, and sharing our learnings with the open source community.\n\n\n\nIf you have ideas for collaboration, email us at [email protected].\n\n\n\nWe're also hiring full-time engineers to work with us in Stockholm! Check out our current job postings [here](https://embark.games/careers).\n\n\n\n## Issues\n\n\n\n### Feature Requests\n\n\n\nIf you have ideas or how to improve our projects, you can suggest features by opening a GitHub issue. Make sure to include details about the feature or change, and describe any uses cases it would enable.\n\n\n\nFeature requests will be tagged as `enhancement` and their status will be updated in the comments of the issue.\n\n\n\n### Bugs\n\n\n\nWhen reporting a bug or unexpected behaviour in a project, make sure your issue descibes steps to reproduce the behaviour, including the platform you were using, what steps you took, and any error messages.\n\n\n\nReproducible bugs will be tagged as `bug` and their status will be updated in the comments of the issue.\n\n\n\n### Wontfix\n\n\n\nIssues will be closed and tagged as `wontfix` if we decide that we do not wish to implement it, usually due to being misaligned with the project vision or out of scope. We will comment on the issue with more detailed reasoning.\n\n\n\n## Contribution Workflow\n\n\n\n### Open Issues\n\n\n\nIf you're ready to contribute, start by looking at our open issues tagged as [`help wanted`](../../issues?q=is%3Aopen+is%3Aissue+label%3A\"help+wanted\") or [`good first issue`](../../issues?q=is%3Aopen+is%3Aissue+label%3A\"good+first+issue\").\n\n\n\nYou can comment on the issue to let others know you're interested in working on it or to ask questions.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 92, "score": 1.8065091107243396 }, { "content": "use spdx::ParseError;\n\n\n\nmacro_rules! test_validate {\n\n (ok [$($text:expr => [$($expected:expr),+$(,)?]),+$(,)?]) => {\n\n $(\n\n let val_expr = spdx::Expression::parse($text).unwrap();\n\n let mut reqs = val_expr.requirements().enumerate();\n\n\n\n $(\n\n let actual = reqs.next().unwrap();\n\n println!(\"{:?}\", actual);\n\n\n\n let actual_str = format!(\"{}\", actual.1.req);\n\n let expected_str = $expected;\n\n\n\n if actual_str != expected_str {\n\n assert!(\n\n false,\n\n \"failed @ index {} - {}\",\n\n actual.0,\n", "file_path": "tests/validation.rs", "rank": 93, "score": 1.7019164599572805 }, { "content": "\n\n writeln!(identifiers)?;\n\n\n\n let exceptions_json_uri = format!(\n\n \"https://raw.githubusercontent.com/spdx/license-list-data/{}/json/exceptions.json\",\n\n upstream_tag\n\n );\n\n\n\n download(\n\n &exceptions_json_uri,\n\n |json| {\n\n let exceptions = get(&json, \"exceptions\")?;\n\n let exceptions = if let Value::Array(ref v) = exceptions {\n\n v\n\n } else {\n\n bail!(\"Malformed JSON: {:?}\", exceptions)\n\n };\n\n eprintln!(\"#exceptions == {}\", exceptions.len());\n\n\n\n let mut v = vec![];\n", "file_path": "update/src/main.rs", "rank": 94, "score": 1.695699731219333 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at [email protected]. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 95, "score": 0.8591573588015935 } ]
Rust
src/cmd/export/runit.rs
dan-da/ultraman
f6b491f6f39e693b404b1c35daf49317d476774a
use super::base::{Exportable, Template}; use crate::cmd::export::ExportOpts; use crate::env::read_env; use crate::process::port_for; use crate::procfile::{Procfile, ProcfileEntry}; use handlebars::to_json; use serde_derive::Serialize; use serde_json::value::{Map, Value as Json}; use std::collections::HashMap; use std::env; use std::fs::File; use std::io::Write; use std::path::PathBuf; pub struct Exporter { pub procfile: Procfile, pub opts: ExportOpts, } #[derive(Serialize)] struct RunParams { work_dir: String, user: String, env_dir_path: String, process_command: String, } #[derive(Serialize)] struct LogRunParams { log_path: String, user: String, } impl Default for Exporter { fn default() -> Self { Exporter { procfile: Procfile { data: HashMap::new(), }, opts: ExportOpts { format: String::from(""), location: PathBuf::from("location"), app: None, formation: String::from("all=1"), log_path: None, run_path: None, port: None, template_path: None, user: None, env_path: PathBuf::from(".env"), procfile_path: PathBuf::from("Procfile"), root_path: Some(env::current_dir().unwrap()), timeout: String::from("5"), }, } } } impl Exporter { fn boxed(self) -> Box<Self> { Box::new(self) } pub fn boxed_new() -> Box<Self> { Self::default().boxed() } fn run_tmpl_path(&self) -> PathBuf { let mut path = self.project_root_path(); let tmpl_path = PathBuf::from("src/cmd/export/templates/runit/run.hbs"); path.push(tmpl_path); path } fn log_run_tmpl_path(&self) -> PathBuf { let mut path = self.project_root_path(); let tmpl_path = PathBuf::from("src/cmd/export/templates/runit/log/run.hbs"); path.push(tmpl_path); path } fn make_run_data(&self, pe: &ProcfileEntry, env_dir_path: &PathBuf) -> Map<String, Json> { let mut data = Map::new(); let rp = RunParams { work_dir: self.root_path().into_os_string().into_string().unwrap(), user: self.username(), env_dir_path: env_dir_path.clone().into_os_string().into_string().unwrap(), process_command: pe.command.to_string(), }; data.insert("run".to_string(), to_json(&rp)); data } fn make_log_run_data(&self, process_name: &str) -> Map<String, Json> { let mut data = Map::new(); let log_path = format!( "{}/{}", self.log_path().into_os_string().into_string().unwrap(), &process_name ); let lr = LogRunParams { log_path, user: self.username(), }; data.insert("log_run".to_string(), to_json(&lr)); data } fn write_env(&self, output_dir_path: &PathBuf, index: usize, con_index: usize) { let mut env = read_env(self.opts.env_path.clone()).expect("failed read .env"); let port = port_for( self.opts.env_path.clone(), self.opts.port.clone(), index, con_index + 1, ); env.insert("PORT".to_string(), port); for (key, val) in env.iter() { let path = output_dir_path.join(&key); let display = path.clone().into_os_string().into_string().unwrap(); self.clean(&path); let mut file = File::create(path.clone()).expect(&format!("Could not create file: {}", &display)); self.say(&format!("writing: {}", &display)); writeln!(&mut file, "{}", &val).expect(&format!("Could not write file: {}", &display)); } } } struct EnvTemplate { template_path: PathBuf, index: usize, con_index: usize, } impl Exportable for Exporter { fn export(&self) -> Result<(), Box<dyn std::error::Error>> { self.base_export().expect("failed execute base_export"); let mut index = 0; let mut clean_paths: Vec<PathBuf> = vec![]; let mut create_recursive_dir_paths: Vec<PathBuf> = vec![]; let mut tmpl_data: Vec<Template> = vec![]; let mut env_data: Vec<EnvTemplate> = vec![]; for (name, pe) in self.procfile.data.iter() { let con = pe.concurrency.get(); for n in 0..con { index += 1; let process_name = format!("{}-{}", &name, n + 1); let service_name = format!("{}-{}-{}", self.app(), &name, n + 1); let mut path_for_run = self.opts.location.clone(); let mut path_for_env = path_for_run.clone(); let mut path_for_log = path_for_run.clone(); let run_file_path = PathBuf::from(format!("{}/run", &service_name)); let env_dir_path = PathBuf::from(format!("{}/env", &service_name)); let log_dir_path = PathBuf::from(format!("{}/log", &service_name)); path_for_run.push(run_file_path); path_for_env.push(env_dir_path); path_for_log.push(log_dir_path); create_recursive_dir_paths.push(path_for_env.clone()); create_recursive_dir_paths.push(path_for_log.clone()); let run_data = self.make_run_data( pe, &PathBuf::from(format!("/etc/service/{}/env", &service_name)), ); let log_run_data = self.make_log_run_data(&process_name); clean_paths.push(path_for_run.clone()); tmpl_data.push(Template { template_path: self.run_tmpl_path(), data: run_data, output_path: path_for_run, }); path_for_log.push("run"); clean_paths.push(path_for_log.clone()); tmpl_data.push(Template { template_path: self.log_run_tmpl_path(), data: log_run_data, output_path: path_for_log, }); env_data.push(EnvTemplate { template_path: path_for_env.clone(), index, con_index: n, }); } } for path in clean_paths { self.clean(&path); } for dir_path in create_recursive_dir_paths { self.create_dir_recursive(&dir_path); } for tmpl in tmpl_data { self.write_template(tmpl); } for e in env_data { self.write_env(&e.template_path, e.index, e.con_index); } Ok(()) } fn ref_opts(&self) -> &ExportOpts { &self.opts } }
use super::base::{Exportable, Template}; use crate::cmd::export::ExportOpts; use crate::env::read_env; use crate::process::port_for; use crate::procfile::{Procfile, ProcfileEntry}; use handlebars::to_json; use serde_derive::Serialize; use serde_json::value::{Map, Value as Json}; use std::collections::HashMap; use std::env; use std::fs::File; use std::io::Write; use std::path::PathBuf; pub struct Exporter { pub procfile: Procfile, pub opts: ExportOpts, } #[derive(Serialize)] struct RunParams { work_dir: String, user: String, env_dir_path: String, process_command: String, } #[derive(Serialize)] struct LogRunParams { log_path: String, user: String, } impl Default for Exporter { fn default() -> Self { Exporter { procfile: Procfile { data: HashMap::new(), }, opts: ExportOpts { format: String::from(""), location: PathBuf::from("location"), app: None, formation: String::from("all=1"), log_path: None, run_path: None, port: None, template_path: None, user: None, env_path: PathBuf::from(".env"), procfile_path: PathBuf::from("Procfile"), root_path: Some(env::current_dir().unwrap()), timeout: String::from("5"), }, } } } impl Exporter { fn boxed(self) -> Box<Self> { Box::new(self) } pub fn boxed_new() -> Box<Self> { Self::default().boxed() } fn run_tmpl_path(&self) -> PathBuf { let mut path = self.project_root_path(); let tmpl_path = PathBuf::from("src/cmd/export/templates/runit/run.hbs"); path.push(tmpl_path); path } fn log_run_tmpl_path(&self) -> PathBuf { let mut path = self.project_root_path(); let tmpl_path = PathBuf::from("src/cmd/export/templates/runit/log/run.hbs"); path.push(tmpl_path); path } fn make_run_data(&self, pe: &ProcfileEntry, env_dir_path: &PathBuf) -> Map<String, Json> { let mut data = Map::new(); let rp = RunParams { work_dir: self.root_path().into_os_string().into_string().unwrap(), user: self.username(), env_dir_path: env_dir_path.clone().into_os_string().into_string().unwrap(), process_command: pe.command.to_string(), }; data.insert("run".to_string(), to_json(&rp)); data } fn make_log_run_data(&self, process_name: &str) -> Map<String, Json> { let mut data = Map::new(); let log_path = format!( "{}/{}", self.log_path().into_os_string().into_string().unwrap(), &process_name ); let lr = LogRunParams { log_path, user: self.username(), }; data.insert("log_run".to_string(), to_json(&lr)); data } fn write_env(&self, output_dir_path: &PathBuf, index: usize, con_index: usize) { let mut env = read_env(self.opts.env_path.clone()).expect("failed read .env"); let port =
; env.insert("PORT".to_string(), port); for (key, val) in env.iter() { let path = output_dir_path.join(&key); let display = path.clone().into_os_string().into_string().unwrap(); self.clean(&path); let mut file = File::create(path.clone()).expect(&format!("Could not create file: {}", &display)); self.say(&format!("writing: {}", &display)); writeln!(&mut file, "{}", &val).expect(&format!("Could not write file: {}", &display)); } } } struct EnvTemplate { template_path: PathBuf, index: usize, con_index: usize, } impl Exportable for Exporter { fn export(&self) -> Result<(), Box<dyn std::error::Error>> { self.base_export().expect("failed execute base_export"); let mut index = 0; let mut clean_paths: Vec<PathBuf> = vec![]; let mut create_recursive_dir_paths: Vec<PathBuf> = vec![]; let mut tmpl_data: Vec<Template> = vec![]; let mut env_data: Vec<EnvTemplate> = vec![]; for (name, pe) in self.procfile.data.iter() { let con = pe.concurrency.get(); for n in 0..con { index += 1; let process_name = format!("{}-{}", &name, n + 1); let service_name = format!("{}-{}-{}", self.app(), &name, n + 1); let mut path_for_run = self.opts.location.clone(); let mut path_for_env = path_for_run.clone(); let mut path_for_log = path_for_run.clone(); let run_file_path = PathBuf::from(format!("{}/run", &service_name)); let env_dir_path = PathBuf::from(format!("{}/env", &service_name)); let log_dir_path = PathBuf::from(format!("{}/log", &service_name)); path_for_run.push(run_file_path); path_for_env.push(env_dir_path); path_for_log.push(log_dir_path); create_recursive_dir_paths.push(path_for_env.clone()); create_recursive_dir_paths.push(path_for_log.clone()); let run_data = self.make_run_data( pe, &PathBuf::from(format!("/etc/service/{}/env", &service_name)), ); let log_run_data = self.make_log_run_data(&process_name); clean_paths.push(path_for_run.clone()); tmpl_data.push(Template { template_path: self.run_tmpl_path(), data: run_data, output_path: path_for_run, }); path_for_log.push("run"); clean_paths.push(path_for_log.clone()); tmpl_data.push(Template { template_path: self.log_run_tmpl_path(), data: log_run_data, output_path: path_for_log, }); env_data.push(EnvTemplate { template_path: path_for_env.clone(), index, con_index: n, }); } } for path in clean_paths { self.clean(&path); } for dir_path in create_recursive_dir_paths { self.create_dir_recursive(&dir_path); } for tmpl in tmpl_data { self.write_template(tmpl); } for e in env_data { self.write_env(&e.template_path, e.index, e.con_index); } Ok(()) } fn ref_opts(&self) -> &ExportOpts { &self.opts } }
port_for( self.opts.env_path.clone(), self.opts.port.clone(), index, con_index + 1, )
call_expression
[ { "content": "fn base_port(env_path: PathBuf, port: Option<String>) -> String {\n\n let env = read_env(env_path).unwrap();\n\n let default_port = String::from(\"5000\");\n\n\n\n if let Some(p) = port {\n\n p\n\n } else if let Some(p) = env.get(\"PORT\") {\n\n p.clone()\n\n } else if let Ok(p) = os_env::var(\"PORT\") {\n\n p\n\n } else {\n\n default_port\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/process.rs", "rank": 0, "score": 205743.0749319796 }, { "content": "pub fn output(proc_name: &str, content: &str, index: Option<usize>, opt: &LogOpt) {\n\n let index = index.unwrap_or_else(|| 0);\n\n let log = Log::new(index, opt);\n\n log.output(proc_name, content)\n\n}\n\n\n", "file_path": "src/log/mod.rs", "rank": 1, "score": 186582.64001949097 }, { "content": "fn export_format(format: &str) -> ExportFormat {\n\n if format == \"upstart\" {\n\n ExportFormat::Upstart\n\n } else if format == \"systemd\" {\n\n ExportFormat::Systemd\n\n } else if format == \"supervisord\" {\n\n ExportFormat::Supervisord\n\n } else if format == \"runit\" {\n\n ExportFormat::Runit\n\n } else if format == \"launchd\" {\n\n ExportFormat::Launchd\n\n } else if format == \"daemon\" {\n\n ExportFormat::Daemon\n\n } else {\n\n panic!(\"Do not support format {}\", format)\n\n }\n\n}\n", "file_path": "src/cmd/export/mod.rs", "rank": 2, "score": 162201.46876335097 }, { "content": "pub fn read_env(filepath: PathBuf) -> Result<Env, Box<dyn std::error::Error>> {\n\n let mut env: Env = HashMap::new();\n\n\n\n if let Some(iter) = dotenv::from_path_iter(filepath.as_path()).ok() {\n\n for item in iter {\n\n let (key, val) = item.expect(\"Could not convert .env to tuple\");\n\n env.insert(key, val);\n\n }\n\n return Ok(env);\n\n }\n\n Ok(env)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use std::io::Write;\n\n use tempfile::tempdir;\n\n\n", "file_path": "src/env.rs", "rank": 3, "score": 149936.48799035716 }, { "content": "pub fn read_procfile(filepath: PathBuf) -> Result<Procfile, Box<dyn std::error::Error>> {\n\n let display = filepath.clone().into_os_string().into_string().unwrap();\n\n\n\n let file = match File::open(filepath) {\n\n Ok(f) => f,\n\n Err(why) => panic!(\"cloud't open {}: {}\", display, why),\n\n };\n\n\n\n parse_procfile(&file)\n\n}\n\n\n", "file_path": "src/procfile.rs", "rank": 4, "score": 138025.74855416876 }, { "content": "pub fn now() -> String {\n\n Local::now().format(\"%H:%M:%S\").to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use anyhow;\n\n use std::error::Error;\n\n use std::fmt;\n\n\n\n #[test]\n\n fn test_output_when_coloring() -> anyhow::Result<()> {\n\n let log = Log::new(\n\n 0,\n\n &LogOpt {\n\n is_color: true,\n\n padding: 10,\n\n is_timestamp: true,\n\n },\n", "file_path": "src/log/mod.rs", "rank": 5, "score": 128545.62324133096 }, { "content": "pub fn port_for(\n\n env_path: PathBuf,\n\n port: Option<String>,\n\n index: usize,\n\n concurrency: usize,\n\n) -> String {\n\n let result =\n\n base_port(env_path, port).parse::<usize>().unwrap() + index * 100 + concurrency - 1;\n\n result.to_string()\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 7, "score": 120076.900662943 }, { "content": "pub fn error(proc_name: &str, err: &dyn std::error::Error, is_padding: bool, opt: &LogOpt) {\n\n let content = &format!(\"error: {:?}\", err);\n\n if is_padding {\n\n output(proc_name, content, None, &opt);\n\n } else {\n\n let remake_opt = LogOpt {\n\n is_color: opt.is_color,\n\n padding: proc_name.len() + 1,\n\n is_timestamp: opt.is_timestamp,\n\n };\n\n output(proc_name, content, None, &remake_opt);\n\n }\n\n}\n\n\n", "file_path": "src/log/mod.rs", "rank": 8, "score": 115381.02638898534 }, { "content": "#[derive(Serialize)]\n\nstruct AppConfDataParams {\n\n user: String,\n\n work_dir: String,\n\n program: String,\n\n process_command: String,\n\n environment: String,\n\n stdout_logfile: String,\n\n stderr_logfile: String,\n\n}\n\n\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 9, "score": 114743.9924600654 }, { "content": "fn ps_for(process_name: String, concurrency: usize) -> String {\n\n format!(\"{}.{}\", process_name, concurrency)\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 10, "score": 111766.33012905452 }, { "content": "pub fn run(opts: RunOpts) {\n\n let app_name = opts.app_name;\n\n let procfile_path = opts.procfile_path;\n\n let env_path = opts.env_path;\n\n\n\n let procfile = read_procfile(procfile_path).expect(\"failed read Procfile\");\n\n let pe = procfile.find_by(&app_name);\n\n\n\n let mut read_env = read_env(env_path).expect(\"failed read .env\");\n\n read_env.insert(String::from(\"PORT\"), String::from(\"5000\"));\n\n read_env.insert(String::from(\"PS\"), String::from(&app_name));\n\n\n\n let shell = std_env::var(\"SHELL\").expect(\"$SHELL is not set\");\n\n\n\n unsafe {\n\n match fork() {\n\n Ok(fork_result) => match fork_result {\n\n ForkResult::Child => {\n\n let _ = Command::new(shell)\n\n .arg(\"-c\")\n", "file_path": "src/cmd/run.rs", "rank": 11, "score": 109824.77298001261 }, { "content": "pub fn run(opts: ExportOpts) -> Result<(), Box<dyn std::error::Error>> {\n\n let exporter = new(&opts);\n\n exporter.export().expect(\"failed ultraman export\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd/export/mod.rs", "rank": 12, "score": 109691.1221451564 }, { "content": "type ProcfileData = HashMap<String, ProcfileEntry>;\n\n\n\n#[derive(Default)]\n\npub struct Procfile {\n\n pub data: ProcfileData,\n\n}\n\n\n\nimpl Procfile {\n\n pub fn padding(&self) -> usize {\n\n // e.g) <name>.<concurrency> |\n\n self.data\n\n .keys()\n\n .map(|name| name.len())\n\n .max()\n\n .expect(\"failed calculate padding\")\n\n + 3\n\n }\n\n\n\n pub fn process_len(&self) -> usize {\n\n self.data\n", "file_path": "src/procfile.rs", "rank": 13, "score": 100435.96400477519 }, { "content": "fn new(opts: &ExportOpts) -> Box<dyn Exportable> {\n\n let procfile_path = opts.procfile_path.clone();\n\n let display = procfile_path\n\n .clone()\n\n .into_os_string()\n\n .into_string()\n\n .unwrap();\n\n let procfile =\n\n read_procfile(procfile_path).expect(&format!(\"Could not read Procfile: {}\", display));\n\n let format = opts.format.as_str();\n\n\n\n match export_format(format) {\n\n ExportFormat::Upstart => {\n\n let mut expo = upstart::Exporter::boxed_new();\n\n // Read the formation from the command line option and always call it before process_len for the convenience of setting concurrency\n\n procfile.set_concurrency(&opts.formation);\n\n expo.procfile = procfile;\n\n expo.opts = opts.clone();\n\n expo\n\n }\n", "file_path": "src/cmd/export/mod.rs", "rank": 14, "score": 95400.74680240049 }, { "content": "#[derive(Serialize)]\n\nstruct AppConfParams {\n\n app: String,\n\n service_names: String,\n\n data: Vec<AppConfDataParams>,\n\n}\n\n\n\nimpl Default for Exporter {\n\n fn default() -> Self {\n\n Exporter {\n\n procfile: Procfile {\n\n data: HashMap::new(),\n\n },\n\n opts: ExportOpts {\n\n format: String::from(\"\"),\n\n location: PathBuf::from(\"location\"),\n\n app: None,\n\n formation: String::from(\"all=1\"),\n\n log_path: None,\n\n run_path: None,\n\n port: None,\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 15, "score": 93435.21736460729 }, { "content": "pub fn kill_children(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n signal: Signal,\n\n _code: i32,\n\n opts: DisplayOpts,\n\n) {\n\n for proc in procs.lock().unwrap().iter() {\n\n let proc = proc.lock().unwrap();\n\n let child = &proc.child;\n\n\n\n log::output(\n\n \"system\",\n\n &format!(\n\n \"sending {3} for {0:1$} at pid {2}\",\n\n &proc.name,\n\n opts.padding,\n\n &child.id(),\n\n Signal::as_str(signal),\n\n ),\n\n None,\n", "file_path": "src/signal.rs", "rank": 16, "score": 88675.46859893396 }, { "content": "pub fn terminate_gracefully(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n signal: Signal,\n\n code: i32,\n\n timeout: u64,\n\n opts: DisplayOpts,\n\n) {\n\n let procs2 = Arc::clone(&procs);\n\n kill_children(procs, signal, code, opts.clone());\n\n\n\n // Wait for all children to stop or until the time comes to kill them all\n\n let start_time = Instant::now();\n\n while start_time.elapsed() < Duration::from_secs(timeout) {\n\n if procs2.lock().unwrap().len() == 0 {\n\n return;\n\n }\n\n\n\n let procs3 = Arc::clone(&procs2);\n\n process::check_for_child_termination(procs3, opts.clone());\n\n\n", "file_path": "src/signal.rs", "rank": 17, "score": 88675.46859893396 }, { "content": "pub fn run(opts: StartOpts) -> Result<(), Box<dyn std::error::Error>> {\n\n let mut proc_handles = vec![];\n\n let procs: Arc<Mutex<Vec<Arc<Mutex<process::Process>>>>> = Arc::new(Mutex::new(vec![]));\n\n\n\n let procfile = read_procfile(opts.procfile_path).expect(\"failed read Procfile\");\n\n // Read the formation from the command line option and always call it before process_len for the convenience of setting concurrency\n\n procfile.set_concurrency(&opts.formation);\n\n\n\n let process_len = procfile.process_len();\n\n let padding = procfile.padding();\n\n\n\n let barrier = Arc::new(Barrier::new(process_len + 1));\n\n let mut total = 0;\n\n let is_timestamp = !opts.is_no_timestamp;\n\n let display_opts = DisplayOpts {\n\n padding,\n\n is_timestamp,\n\n };\n\n\n\n for (name, pe) in procfile.data.iter() {\n", "file_path": "src/cmd/start.rs", "rank": 18, "score": 86776.99397493537 }, { "content": "pub fn handle_signal_thread(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n timeout: u64,\n\n opts: DisplayOpts,\n\n) -> JoinHandle<()> {\n\n let result = thread::Builder::new()\n\n .name(String::from(\"handling signal\"))\n\n .spawn(move || {\n\n trap_signal_at_multithred(procs, timeout, opts).expect(\"failed trap signals\")\n\n })\n\n .expect(\"failed handle signals\");\n\n\n\n result\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 19, "score": 86327.2987543723 }, { "content": "pub fn check_for_child_termination(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n opts: DisplayOpts,\n\n) -> Option<(Pid, i32)> {\n\n let child_termination_fn = Box::new(move |pid: Pid, message: &str| {\n\n procs.lock().unwrap().retain(|p| {\n\n let child_id = p.lock().unwrap().child.id() as i32;\n\n if Pid::from_raw(child_id) == pid {\n\n let proc = p.lock().unwrap();\n\n let proc_name = &proc.name;\n\n let proc_index = proc.index;\n\n log::output(\n\n &proc_name,\n\n &message,\n\n Some(proc_index),\n\n &LogOpt {\n\n is_color: true,\n\n padding: opts.padding,\n\n is_timestamp: opts.is_timestamp,\n\n },\n", "file_path": "src/process.rs", "rank": 20, "score": 86327.2987543723 }, { "content": "pub fn build_check_for_child_termination_thread(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n opts: DisplayOpts,\n\n) -> JoinHandle<()> {\n\n thread::Builder::new()\n\n .name(String::from(format!(\"check child terminated\")))\n\n .spawn(move || {\n\n loop {\n\n // Waiting for the end of any one child process\n\n let procs2 = Arc::clone(&procs);\n\n let procs3 = Arc::clone(&procs);\n\n if let Some((_, code)) = check_for_child_termination(procs2, opts.clone()) {\n\n signal::kill_children(procs3, Signal::SIGTERM, code, opts.clone())\n\n }\n\n // check_for_child_termination returns immediately, so let's sleep\n\n // a little to avoid pegging CPU.\n\n std::thread::sleep(std::time::Duration::from_millis(10));\n\n }\n\n })\n\n .expect(\"failed check child terminated\")\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 21, "score": 82194.76909823173 }, { "content": "pub trait Exportable {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>>;\n\n //https://yajamon.hatenablog.com/entry/2018/01/30/202849\n\n fn ref_opts(&self) -> &ExportOpts;\n\n\n\n fn base_export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n let opts = self.ref_opts();\n\n let location = &opts.location;\n\n let display = location.clone().into_os_string().into_string().unwrap();\n\n create_dir_all(&location).expect(&format!(\"Could not create: {}\", display));\n\n\n\n // self.chown(&username, &self.log_path());\n\n // self.chown(&username, &self.run_path());\n\n Ok(())\n\n }\n\n\n\n fn app(&self) -> String {\n\n self.ref_opts()\n\n .app\n\n .clone()\n", "file_path": "src/cmd/export/base.rs", "rank": 22, "score": 75240.02605502398 }, { "content": "#[derive(Serialize)]\n\nstruct LaunchdParams {\n\n label: String,\n\n env: Vec<EnvParameter>,\n\n command_args: Vec<String>,\n\n stdout_path: String,\n\n stderr_path: String,\n\n user: String,\n\n work_dir: String,\n\n}\n\n\n\nimpl Default for Exporter {\n\n fn default() -> Self {\n\n Exporter {\n\n procfile: Procfile {\n\n data: HashMap::new(),\n\n },\n\n opts: ExportOpts {\n\n format: String::from(\"\"),\n\n location: PathBuf::from(\"location\"),\n\n app: None,\n", "file_path": "src/cmd/export/launchd.rs", "rank": 23, "score": 70272.6144241625 }, { "content": "#[derive(Serialize)]\n\nstruct ProcessParams {\n\n service_name: String,\n\n env: Vec<EnvParameter>,\n\n user: String,\n\n work_dir: String,\n\n pid_path: String,\n\n command: String,\n\n command_args: String,\n\n log_path: String,\n\n}\n\n\n\nimpl Default for Exporter {\n\n fn default() -> Self {\n\n Exporter {\n\n procfile: Procfile {\n\n data: HashMap::new(),\n\n },\n\n opts: ExportOpts {\n\n format: String::from(\"\"),\n\n location: PathBuf::from(\"location\"),\n", "file_path": "src/cmd/export/daemon.rs", "rank": 24, "score": 70272.6144241625 }, { "content": "#[derive(Serialize)]\n\nstruct ProcessParams {\n\n app: String,\n\n name: String,\n\n port: String,\n\n env_without_port: Vec<EnvParameter>,\n\n setuid: String,\n\n chdir: String,\n\n exec: String,\n\n}\n\n\n\n// http://takoyaking.hatenablog.com/entry/anonymous_lifetime\n\nimpl Exporter {\n\n fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n\n\n\n pub fn boxed_new() -> Box<Self> {\n\n Self::default().boxed()\n\n }\n\n\n", "file_path": "src/cmd/export/upstart.rs", "rank": 25, "score": 70272.6144241625 }, { "content": "#[derive(Serialize)]\n\nstruct MasterParams {\n\n user: String,\n\n log_dir_path: String,\n\n run_dir_path: String,\n\n}\n\n\n", "file_path": "src/cmd/export/daemon.rs", "rank": 26, "score": 70272.6144241625 }, { "content": "#[derive(Serialize)]\n\nstruct ProcessMasterParams {\n\n app: String,\n\n}\n\n\n", "file_path": "src/cmd/export/daemon.rs", "rank": 28, "score": 68211.05719407943 }, { "content": "#[derive(Serialize)]\n\nstruct ProcessMasterParams {\n\n app: String,\n\n}\n\n\n", "file_path": "src/cmd/export/upstart.rs", "rank": 29, "score": 68211.05719407943 }, { "content": "#[derive(Serialize)]\n\nstruct ProcessServiceParams {\n\n app: String,\n\n user: String,\n\n work_dir: String,\n\n port: String,\n\n process_name: String,\n\n process_command: String,\n\n env_without_port: Vec<EnvParameter>,\n\n timeout: String,\n\n}\n\n\n\nimpl Default for Exporter {\n\n fn default() -> Self {\n\n Exporter {\n\n procfile: Procfile {\n\n data: HashMap::new(),\n\n },\n\n opts: ExportOpts {\n\n format: String::from(\"\"),\n\n location: PathBuf::from(\"location\"),\n", "file_path": "src/cmd/export/systemd.rs", "rank": 30, "score": 68211.05719407943 }, { "content": "#[derive(Serialize)]\n\nstruct MasterTargetParams {\n\n service_names: String,\n\n}\n\n\n", "file_path": "src/cmd/export/systemd.rs", "rank": 31, "score": 68211.05719407943 }, { "content": "// https://stackoverflow.com/questions/34439977/lifetime-of-variables-passed-to-a-new-thread\n\npub fn build_exec_and_output_thread<F>(yielder: F) -> JoinHandle<()>\n\nwhere\n\n F: FnOnce() + Sync + Send + 'static,\n\n{\n\n thread::Builder::new()\n\n .name(String::from(\"handle exec and output\"))\n\n .spawn(move || {\n\n yielder();\n\n })\n\n .expect(\"failed exec and output\")\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 33, "score": 66494.52119850478 }, { "content": "fn parse_procfile(file: &File) -> Result<Procfile, Box<dyn std::error::Error>> {\n\n let procfile_re = Regex::new(PROCFILE_REGEXP).unwrap();\n\n let mut pf = Procfile {\n\n data: HashMap::<String, ProcfileEntry>::new(),\n\n };\n\n let buf_reader = BufReader::new(file);\n\n\n\n for line in buf_reader.lines() {\n\n for cap in procfile_re.captures_iter(&line.unwrap()) {\n\n let name = (&cap[1]).to_string();\n\n pf.data.insert(\n\n name,\n\n ProcfileEntry {\n\n command: (&cap[2]).to_string(),\n\n concurrency: Cell::new(1),\n\n },\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/procfile.rs", "rank": 34, "score": 63170.98693581867 }, { "content": "enum ExportFormat {\n\n Upstart,\n\n Systemd,\n\n Supervisord,\n\n Runit,\n\n Launchd,\n\n Daemon,\n\n}\n\n\n", "file_path": "src/cmd/export/mod.rs", "rank": 35, "score": 57537.99687156777 }, { "content": "fn main() {\n\n let page = Roff::new(\"ultraman\", 1, \"December 2020\", \"Ultraman 0.1.0\", \"Ultraman Manual\")\n\n .section(\"name\", &[\"ultraman - modify files by randomly changing bits\"])\n\n .section(\"synopsis\", &[\n\n lf(&[bold(\"ultraman\"), \" \".into(), bold(\"start\"), \" \".into(), \"[process]\".into()]),\n\n lf(&[bold(\"ultraman\"), \" \".into(), bold(\"run\"), \" \".into(), \"<command>\".into()]),\n\n lf(&[bold(\"ultraman\"), \" \".into(), bold(\"export\"),\" \".into(), \"<format>\".into(), \" \".into(), \"[location]\".into()]),\n\n ])\n\n .section(\"description\", &[\n\n bold(\"ultraman\"), \" \".into(),\n\n \"is a manager for Procfile-based applications. Its aim is to abstract away the details of the Procfile format, and allow you to either run your application directly or export it to some other process management format.\".into(),\n\n ])\n\n .section(\"start\", &[\n\n p(&[\"If no additional parameters are passed\".into(), \", \".into(), bold(\"ultraman\"), \" \".into(), \"will run one instance of each type of process defined in your Procfile.\".into()]),\n\n p(&[\"The following options control how the application is run:\"]),\n\n p(&[\n\n list(\n\n &[bold(\"-m\"), \", \".into(), bold(\"--formation\"), \" \".into(), \"[default: all=1]\".into()],\n\n &[\"Specify the number of each process type to run. The value passed in should be in the format process=num,process=num\"]\n\n )\n", "file_path": "man/main.rs", "rank": 36, "score": 47794.60989005606 }, { "content": "pub trait Printable {\n\n fn output(&self, proc_name: &str, content: &str);\n\n fn error(&self, proc_name: &str, err: &dyn std::error::Error);\n\n}\n\n\n\npub struct Log;\n\n\n\n#[derive(Clone)]\n\npub struct LogOpt {\n\n pub is_color: bool,\n\n pub padding: usize,\n\n pub is_timestamp: bool,\n\n}\n\n\n\nimpl Log {\n\n pub fn new(index: usize, opt: &LogOpt) -> Box<dyn Printable + Sync + Send> {\n\n if opt.is_color {\n\n let mut color = color::Log::boxed_new();\n\n color.index = index;\n\n color.opts = Self::display_opts(opt);\n", "file_path": "src/log/mod.rs", "rank": 37, "score": 45201.4871402249 }, { "content": "fn trap_signal_at_multithred(\n\n procs: Arc<Mutex<Vec<Arc<Mutex<Process>>>>>,\n\n timeout: u64,\n\n opts: DisplayOpts,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let signals = Signals::new(&[SIGALRM, SIGHUP, SIGINT, SIGTERM])?;\n\n\n\n for sig in signals.forever() {\n\n match sig {\n\n SIGINT => {\n\n // 2 is 「^C」 of 「^Csystem | SIGINT received, starting shutdown」\n\n log::output(\n\n \"system\",\n\n \"SIGINT received, starting shutdown\",\n\n None,\n\n &LogOpt {\n\n is_color: false,\n\n padding: opts.padding - 2,\n\n is_timestamp: opts.is_timestamp,\n\n },\n", "file_path": "src/signal.rs", "rank": 38, "score": 45103.181274249735 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let opt = Opt::from_args();\n\n\n\n if let Some(subcommand) = opt.subcommands {\n\n match subcommand {\n\n Ultraman::Start(opts) => cmd::start::run(opts).expect(\"failed ultraman start\"),\n\n Ultraman::Run(opts) => cmd::run::run(opts),\n\n Ultraman::Export(opts) => cmd::export::run(opts).expect(\"failed ultraman export\"),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 39, "score": 34209.84221580197 }, { "content": " #[test]\n\n fn test_read_env() -> anyhow::Result<()> {\n\n let dir = tempdir()?;\n\n let file_path = dir.path().join(\".env\");\n\n let mut file = File::create(file_path.clone())?;\n\n writeln!(\n\n file,\n\n r#\"\n\nPORT=5000\n\nPS=1\n\n \"#\n\n )\n\n .unwrap();\n\n\n\n let result = read_env(file_path).expect(\"failed read .env\");\n\n\n\n assert_eq!(result.get(\"PORT\").unwrap(), \"5000\");\n\n assert_eq!(result.get(\"PS\").unwrap(), \"1\");\n\n assert_eq!(result.get(\"CARGO_PKG_VERSION\"), None);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/env.rs", "rank": 40, "score": 31782.315040857284 }, { "content": "use dotenv;\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\n\n\npub type Env = HashMap<String, String>;\n\n\n", "file_path": "src/env.rs", "rank": 41, "score": 31782.18228142726 }, { "content": "use crate::cmd::export::ExportOpts;\n\nuse crate::cmd::run::RunOpts;\n\nuse crate::cmd::start::StartOpts;\n\nuse structopt::{clap, StructOpt};\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(long_version(option_env!(\"LONG_VERSION\").unwrap_or(env!(\"CARGO_PKG_VERSION\"))))]\n\n#[structopt(setting(clap::AppSettings::ColoredHelp))]\n\npub struct Opt {\n\n #[structopt(subcommand)]\n\n pub subcommands: Option<Ultraman>,\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(\n\n name = \"ultraman\",\n\n about = \"Ultraman is a manager for Procfile-based applications. Its aim is to abstract away the details of the Procfile format, and allow you to either run your application directly or export it to some other process management format.\"\n\n)]\n\npub enum Ultraman {\n\n #[structopt(name = \"start\", about = \"Start the application\")]\n", "file_path": "src/opt.rs", "rank": 42, "score": 31653.30631494417 }, { "content": " pub is_timestamp: bool,\n\n}\n\n\n\nimpl Default for DisplayOpts {\n\n fn default() -> Self {\n\n DisplayOpts {\n\n padding: 0,\n\n is_timestamp: true,\n\n }\n\n }\n\n}\n", "file_path": "src/opt.rs", "rank": 43, "score": 31649.102973041987 }, { "content": " Start(StartOpts),\n\n\n\n #[structopt(\n\n name = \"run\",\n\n about = \"Run a command using your application's environment\"\n\n )]\n\n Run(RunOpts),\n\n\n\n #[structopt(\n\n name = \"export\",\n\n about = \"Export the application to another process management format\"\n\n )]\n\n Export(ExportOpts),\n\n}\n\n\n\n///// Options not related to commands /////\n\n\n\n#[derive(Clone)]\n\npub struct DisplayOpts {\n\n pub padding: usize,\n", "file_path": "src/opt.rs", "rank": 44, "score": 31648.440456016615 }, { "content": "use regex::Regex;\n\nuse std::cell::Cell;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::path::PathBuf;\n\n\n\nconst PROCFILE_REGEXP: &'static str = r\"\\A([A-Za-z0-9_-]+):\\s*(.+)$\";\n\nconst DEFAULT_FORMATION: &'static str = \"all=1\";\n\n\n\npub struct ProcfileEntry {\n\n pub command: String,\n\n pub concurrency: Cell<usize>,\n\n}\n\n\n", "file_path": "src/procfile.rs", "rank": 45, "score": 31490.44663662934 }, { "content": " .values()\n\n .map(|pe| pe.concurrency.get())\n\n .fold(0, |sum, a| sum + a)\n\n }\n\n\n\n pub fn find_by(&self, name: &str) -> &ProcfileEntry {\n\n let pe = self\n\n .data\n\n .get(name)\n\n .expect(&format!(\"Can't find process called: {}\", name));\n\n pe\n\n }\n\n\n\n pub fn set_concurrency(&self, formation: &str) {\n\n // e.g.) all=1\n\n if formation == DEFAULT_FORMATION {\n\n return ();\n\n }\n\n\n\n // e.g.) all=2\n", "file_path": "src/procfile.rs", "rank": 46, "score": 31486.45220011864 }, { "content": " }\n\n\n\n for (name, pe) in self.data.iter() {\n\n let pe_name = name;\n\n let concurrency = formation_data\n\n .get(&pe_name.to_string())\n\n .unwrap_or_else(|| &0)\n\n .clone();\n\n pe.concurrency.set(concurrency);\n\n }\n\n }\n\n\n\n fn parse_formation(&self, formation: &str) -> HashMap<String, usize> {\n\n let mut fm = formation.to_string();\n\n self.remove_whitespace(&mut fm);\n\n\n\n let pairs: Vec<&str> = fm.split(\",\").collect();\n\n let mut result = HashMap::<String, usize>::new();\n\n\n\n for pair in pairs {\n", "file_path": "src/procfile.rs", "rank": 47, "score": 31485.339972308542 }, { "content": " let data: Vec<&str> = formation.split(\"=\").collect();\n\n let name = data[0];\n\n if name == \"all\" {\n\n let concurrency = data[1].parse::<usize>().unwrap();\n\n for (_, pe) in self.data.iter() {\n\n pe.concurrency.set(concurrency);\n\n }\n\n return ();\n\n }\n\n\n\n let formation_data = self.parse_formation(formation);\n\n\n\n // https://doc.rust-lang.org/std/collections/hash_map/struct.HashMap.html#examples-14\n\n let formation_apps = formation_data.keys().clone().collect::<Vec<_>>();\n\n let valid_formation = formation_apps\n\n .iter()\n\n .all(|key| self.data.contains_key(key.clone()));\n\n\n\n if valid_formation == false {\n\n panic!(\"Do not support formation: {}\", formation);\n", "file_path": "src/procfile.rs", "rank": 48, "score": 31480.39128257008 }, { "content": " let data: Vec<&str> = pair.split(\"=\").collect();\n\n let name = data[0];\n\n let concurrency = data[1];\n\n result.insert(String::from(name), concurrency.parse::<usize>().unwrap());\n\n }\n\n\n\n result\n\n }\n\n\n\n fn remove_whitespace(&self, s: &mut String) {\n\n s.retain(|c| !c.is_whitespace());\n\n }\n\n}\n\n\n", "file_path": "src/procfile.rs", "rank": 49, "score": 31479.511723445354 }, { "content": " r#\"\n\napp: ./app.sh\n\nweb: ./web.sh\n\n \"#\n\n )\n\n .expect(\"failed write temp Procfile\");\n\n\n\n let read_file = File::open(procfile_path)?;\n\n let result = parse_procfile(&read_file).expect(\"failed parse_procfile\");\n\n\n\n assert!(result.data.contains_key(\"app\"));\n\n assert!(result.data.contains_key(\"web\"));\n\n assert_eq!(result.data.get(\"app\").unwrap().command, \"./app.sh\");\n\n assert_eq!(result.data.get(\"app\").unwrap().concurrency.get(), 1);\n\n assert_eq!(result.data.get(\"web\").unwrap().command, \"./web.sh\");\n\n assert_eq!(result.data.get(\"web\").unwrap().concurrency.get(), 1);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/procfile.rs", "rank": 50, "score": 31478.795670522522 }, { "content": " data: hashmap! [\n\n String::from(\"app\") => ProcfileEntry {\n\n command: String::from(\"./app.sh\"),\n\n concurrency: Cell::new(1),\n\n },\n\n String::from(\"web\") => ProcfileEntry {\n\n command: String::from(\"./app.sh\"),\n\n concurrency: Cell::new(1),\n\n }\n\n ],\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_padding() -> anyhow::Result<()> {\n\n let pf = create_procfile();\n\n let result = pf.padding();\n\n assert_eq!(result, 6);\n\n\n\n Ok(())\n", "file_path": "src/procfile.rs", "rank": 51, "score": 31477.232797412304 }, { "content": " assert_eq!(pf.data.get(\"web\").unwrap().concurrency.get(), 10);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"Do not support formation: hoge=1,fuga=2\")]\n\n fn test_set_concurrency_when_panic() {\n\n let formation = \"hoge=1,fuga=2\";\n\n let pf = create_procfile();\n\n pf.set_concurrency(formation);\n\n }\n\n\n\n #[test]\n\n fn test_parse_procfile() -> anyhow::Result<()> {\n\n let dir = tempdir()?;\n\n let procfile_path = dir.path().join(\"Procfile\");\n\n let mut file = File::create(procfile_path.clone())?;\n\n writeln!(\n\n file,\n", "file_path": "src/procfile.rs", "rank": 52, "score": 31475.01284453442 }, { "content": "\n\n #[test]\n\n fn test_set_concurrency() -> anyhow::Result<()> {\n\n let formation = \"app=2, web=3\";\n\n let pf = create_procfile();\n\n\n\n pf.set_concurrency(formation);\n\n assert_eq!(pf.data.get(\"app\").unwrap().concurrency.get(), 2);\n\n assert_eq!(pf.data.get(\"web\").unwrap().concurrency.get(), 3);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_set_concurrency_all() -> anyhow::Result<()> {\n\n let formation = \"all=10\";\n\n let pf = create_procfile();\n\n\n\n pf.set_concurrency(formation);\n\n assert_eq!(pf.data.get(\"app\").unwrap().concurrency.get(), 10);\n", "file_path": "src/procfile.rs", "rank": 53, "score": 31473.438057151783 }, { "content": " Ok(pf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Write;\n\n use tempfile::tempdir;\n\n\n\n // https://www.366service.com/jp/qa/3b42bd30380c728939a2e80b42d430a6\n\n macro_rules! hashmap {\n\n ($( $key: expr => $val: expr), *) =>{{\n\n let mut map = ::std::collections::HashMap::new();\n\n $( map.insert($key, $val); )*\n\n map\n\n }}\n\n }\n\n\n\n fn create_procfile() -> Procfile {\n\n Procfile {\n", "file_path": "src/procfile.rs", "rank": 54, "score": 31473.257010275353 }, { "content": " }\n\n\n\n #[test]\n\n fn test_find_by() -> anyhow::Result<()> {\n\n let pf = create_procfile();\n\n let result = pf.find_by(\"web\");\n\n assert_eq!(result.command, String::from(\"./app.sh\"));\n\n assert_eq!(result.concurrency.get(), 1);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_process_len() -> anyhow::Result<()> {\n\n let pf = create_procfile();\n\n let result = pf.process_len();\n\n assert_eq!(result, 2);\n\n\n\n Ok(())\n\n }\n", "file_path": "src/procfile.rs", "rank": 55, "score": 31471.69530677709 }, { "content": "}\n\n\n\nimpl PipeStreamReader {\n\n pub fn new(mut stream: Box<dyn io::Read + Send>) -> PipeStreamReader {\n\n PipeStreamReader {\n\n lines: {\n\n let (tx, rx) = unbounded();\n\n\n\n spawn(move || {\n\n let mut buf = Vec::new();\n\n let mut byte = [0u8];\n\n loop {\n\n match stream.read(&mut byte) {\n\n Ok(0) => {\n\n let _ = tx.send(Ok(PipedLine::EOF));\n\n break;\n\n }\n\n Ok(_) => {\n\n if byte[0] == 0x0A {\n\n tx.send(match String::from_utf8(buf.clone()) {\n", "file_path": "src/stream_read.rs", "rank": 56, "score": 30125.82848174376 }, { "content": "use crossbeam_channel::{unbounded, Receiver};\n\nuse std::io;\n\nuse std::string::FromUtf8Error;\n\nuse std::thread::spawn;\n\n\n\n#[derive(Debug)]\n\npub enum PipeError {\n\n IO(io::Error),\n\n NotUtf8(FromUtf8Error),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum PipedLine {\n\n Line(String),\n\n EOF,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PipeStreamReader {\n\n pub lines: Receiver<Result<PipedLine, PipeError>>,\n", "file_path": "src/stream_read.rs", "rank": 57, "score": 30123.786817540884 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use anyhow;\n\n use std::process::{Command, Stdio};\n\n\n\n #[test]\n\n fn test_new() -> anyhow::Result<()> {\n\n let mut child = Command::new(\"echo\")\n\n .arg(\"Test\")\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .expect(\"failed execute command\");\n\n let stream = Box::new(child.stdout.take().unwrap());\n\n let result = PipeStreamReader::new(stream);\n\n\n\n match result.lines.recv().unwrap() {\n\n Ok(piped_line) => match piped_line {\n", "file_path": "src/stream_read.rs", "rank": 58, "score": 30118.953602186102 }, { "content": " Ok(line) => Ok(PipedLine::Line(line)),\n\n Err(err) => Err(PipeError::NotUtf8(err)),\n\n })\n\n .unwrap();\n\n buf.clear()\n\n } else {\n\n buf.push(byte[0])\n\n }\n\n }\n\n Err(error) => {\n\n tx.send(Err(PipeError::IO(error))).unwrap();\n\n }\n\n }\n\n }\n\n });\n\n\n\n rx\n\n },\n\n }\n\n }\n", "file_path": "src/stream_read.rs", "rank": 59, "score": 30113.964526292977 }, { "content": " PipedLine::Line(line) => assert_eq!(line, \"Test\"),\n\n PipedLine::EOF => println!(\"EOF\"),\n\n },\n\n Err(error) => match error {\n\n PipeError::IO(err) => println!(\"{}\", err),\n\n PipeError::NotUtf8(err) => println!(\"{}\", err),\n\n },\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/stream_read.rs", "rank": 60, "score": 30113.964526292977 }, { "content": " port: port_for(\n\n self.opts.env_path.clone(),\n\n self.opts.port.clone(),\n\n index,\n\n con_index + 1,\n\n ),\n\n env_without_port: self.env_without_port(),\n\n setuid: self.username(),\n\n chdir: self.root_path().into_os_string().into_string().unwrap(),\n\n exec: pe.command.to_string(),\n\n };\n\n data.insert(\"process\".to_string(), to_json(&p));\n\n data\n\n }\n\n}\n\n\n\nimpl Default for Exporter {\n\n fn default() -> Self {\n\n Exporter {\n\n procfile: Procfile {\n", "file_path": "src/cmd/export/upstart.rs", "rank": 61, "score": 25708.12011446454 }, { "content": " data: HashMap::new(),\n\n },\n\n opts: ExportOpts {\n\n format: String::from(\"\"),\n\n location: PathBuf::from(\"location\"),\n\n app: None,\n\n formation: String::from(\"all=1\"),\n\n log_path: None,\n\n run_path: None,\n\n port: None,\n\n template_path: None,\n\n user: None,\n\n env_path: PathBuf::from(\".env\"),\n\n procfile_path: PathBuf::from(\"Procfile\"),\n\n root_path: Some(env::current_dir().unwrap()),\n\n timeout: String::from(\"5\"),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/cmd/export/upstart.rs", "rank": 62, "score": 25706.818632845156 }, { "content": " app: None,\n\n formation: String::from(\"all=1\"),\n\n log_path: None,\n\n run_path: None,\n\n port: None,\n\n template_path: None,\n\n user: None,\n\n env_path: PathBuf::from(\".env\"),\n\n procfile_path: PathBuf::from(\"Procfile\"),\n\n root_path: Some(env::current_dir().unwrap()),\n\n timeout: String::from(\"5\"),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Exporter {\n\n fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n", "file_path": "src/cmd/export/systemd.rs", "rank": 63, "score": 25704.323389254543 }, { "content": " app: None,\n\n formation: String::from(\"all=1\"),\n\n log_path: None,\n\n run_path: None,\n\n port: None,\n\n template_path: None,\n\n user: None,\n\n env_path: PathBuf::from(\".env\"),\n\n procfile_path: PathBuf::from(\"Procfile\"),\n\n root_path: Some(env::current_dir().unwrap()),\n\n timeout: String::from(\"5\"),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Exporter {\n\n fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n", "file_path": "src/cmd/export/daemon.rs", "rank": 64, "score": 25704.323389254543 }, { "content": "use super::base::{EnvParameter, Exportable, Template};\n\nuse crate::cmd::export::ExportOpts;\n\nuse crate::env::read_env;\n\nuse crate::process::port_for;\n\nuse crate::procfile::{Procfile, ProcfileEntry};\n\nuse handlebars::to_json;\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\npub struct Exporter {\n\n pub procfile: Procfile,\n\n pub opts: ExportOpts,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/cmd/export/launchd.rs", "rank": 65, "score": 25703.238574619856 }, { "content": "use super::base::{EnvParameter, Exportable, Template};\n\nuse crate::cmd::export::ExportOpts;\n\nuse crate::env::read_env;\n\nuse crate::process::port_for;\n\nuse crate::procfile::{Procfile, ProcfileEntry};\n\nuse handlebars::to_json;\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\npub struct Exporter {\n\n pub procfile: Procfile,\n\n pub opts: ExportOpts,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/cmd/export/daemon.rs", "rank": 66, "score": 25703.238574619856 }, { "content": "use super::base::{Exportable, Template};\n\nuse crate::cmd::export::ExportOpts;\n\nuse crate::env::read_env;\n\nuse crate::process::port_for;\n\nuse crate::procfile::Procfile;\n\nuse handlebars::to_json;\n\nuse regex::Regex;\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse shellwords::escape;\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\nconst ENV_REGEXP: &'static str = \"\\\\$\\\\{*(?P<envname>[A-Za-z0-9_-]+)\\\\}*\";\n\n\n\npub struct Exporter {\n\n pub procfile: Procfile,\n\n pub opts: ExportOpts,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 67, "score": 25703.21924192324 }, { "content": " formation: String::from(\"all=1\"),\n\n log_path: None,\n\n run_path: None,\n\n port: None,\n\n template_path: None,\n\n user: None,\n\n env_path: PathBuf::from(\".env\"),\n\n procfile_path: PathBuf::from(\"Procfile\"),\n\n root_path: Some(env::current_dir().unwrap()),\n\n timeout: String::from(\"5\"),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Exporter {\n\n fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n\n\n", "file_path": "src/cmd/export/launchd.rs", "rank": 68, "score": 25702.53250495317 }, { "content": " template_path: None,\n\n user: None,\n\n env_path: PathBuf::from(\".env\"),\n\n procfile_path: PathBuf::from(\"Procfile\"),\n\n root_path: Some(env::current_dir().unwrap()),\n\n timeout: String::from(\"5\"),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Exporter {\n\n fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n\n\n\n pub fn boxed_new() -> Box<Self> {\n\n Self::default().boxed()\n\n }\n\n\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 69, "score": 25702.439529671025 }, { "content": "use crate::cmd::export::ExportOpts;\n\nuse crate::env::read_env;\n\n\n\nuse handlebars::Handlebars;\n\nuse nix::unistd::{chown, User};\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::fs::{create_dir_all, remove_file};\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Serialize)]\n\npub struct EnvParameter {\n\n pub(crate) key: String,\n\n pub(crate) value: String,\n\n}\n\n\n\npub struct Template {\n\n pub(crate) template_path: PathBuf,\n\n pub(crate) data: Map<String, Json>,\n\n pub(crate) output_path: PathBuf,\n\n}\n\n\n", "file_path": "src/cmd/export/base.rs", "rank": 70, "score": 25702.342091342543 }, { "content": " port: port_for(\n\n self.opts.env_path.clone(),\n\n self.opts.port.clone(),\n\n index,\n\n con_index + 1,\n\n ),\n\n process_name: process_name.to_string(),\n\n process_command: pe.command.to_string(),\n\n env_without_port: self.env_without_port(),\n\n timeout: self.opts.timeout.clone(),\n\n };\n\n data.insert(\"process_service\".to_string(), to_json(&ps));\n\n data\n\n }\n\n}\n\n\n\nimpl Exportable for Exporter {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.base_export().expect(\"failed execute base_export\");\n\n\n", "file_path": "src/cmd/export/systemd.rs", "rank": 72, "score": 25701.819950025045 }, { "content": " let data = pe.command.split(\" \").collect::<Vec<_>>();\n\n let mut result = vec![];\n\n for item in data {\n\n result.push(item.to_string())\n\n }\n\n result\n\n }\n\n\n\n fn environment(&self, index: usize, con_index: usize) -> Vec<EnvParameter> {\n\n let port = port_for(\n\n self.opts.env_path.clone(),\n\n self.opts.port.clone(),\n\n index,\n\n con_index + 1,\n\n );\n\n let mut env = read_env(self.opts.env_path.clone()).expect(\"failed read .env\");\n\n env.insert(\"PORT\".to_string(), port);\n\n\n\n let mut result = vec![];\n\n for (key, val) in env.iter() {\n", "file_path": "src/cmd/export/daemon.rs", "rank": 73, "score": 25701.530928342792 }, { "content": "use super::base::{EnvParameter, Exportable, Template};\n\nuse crate::cmd::export::ExportOpts;\n\nuse crate::process::port_for;\n\nuse crate::procfile::{Procfile, ProcfileEntry};\n\n\n\nuse handlebars::to_json;\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\npub struct Exporter {\n\n pub procfile: Procfile,\n\n pub opts: ExportOpts,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/cmd/export/upstart.rs", "rank": 74, "score": 25701.41062660374 }, { "content": "use super::base::{EnvParameter, Exportable, Template};\n\nuse crate::cmd::export::ExportOpts;\n\nuse crate::process::port_for;\n\nuse crate::procfile::{Procfile, ProcfileEntry};\n\nuse handlebars::to_json;\n\nuse serde_derive::Serialize;\n\nuse serde_json::value::{Map, Value as Json};\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\npub struct Exporter {\n\n pub procfile: Procfile,\n\n pub opts: ExportOpts,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/cmd/export/systemd.rs", "rank": 75, "score": 25701.41062660374 }, { "content": " pub fn boxed_new() -> Box<Self> {\n\n Self::default().boxed()\n\n }\n\n\n\n fn launchd_tmpl_path(&self) -> PathBuf {\n\n let mut path = self.project_root_path();\n\n let tmpl_path = PathBuf::from(\"src/cmd/export/templates/launchd/launchd.plist.hbs\");\n\n path.push(tmpl_path);\n\n path\n\n }\n\n\n\n fn make_launchd_data(\n\n &self,\n\n pe: &ProcfileEntry,\n\n service_name: &str,\n\n index: usize,\n\n con_index: usize,\n\n ) -> Map<String, Json> {\n\n let mut data = Map::new();\n\n let log_display = self.log_path().into_os_string().into_string().unwrap();\n", "file_path": "src/cmd/export/launchd.rs", "rank": 76, "score": 25700.58309246499 }, { "content": " }\n\n\n\n fn environment(&self, index: usize, con_index: usize) -> Vec<EnvParameter> {\n\n let port = port_for(\n\n self.opts.env_path.clone(),\n\n self.opts.port.clone(),\n\n index,\n\n con_index + 1,\n\n );\n\n let mut env = read_env(self.opts.env_path.clone()).expect(\"failed read .env\");\n\n env.insert(\"PORT\".to_string(), port);\n\n\n\n let mut result = vec![];\n\n for (key, val) in env.iter() {\n\n result.push(EnvParameter {\n\n key: key.to_string(),\n\n value: val.to_string(),\n\n });\n\n }\n\n\n", "file_path": "src/cmd/export/launchd.rs", "rank": 77, "score": 25699.891758362963 }, { "content": "\n\n /// Specify the pid file directory, defaults to /var/run/<application>\n\n #[structopt(name = \"RUN\", short = \"r\", long = \"run\", parse(from_os_str))]\n\n pub run_path: Option<PathBuf>,\n\n\n\n /// Specify which port to use as the base for this application. Should be a multiple of 1000\n\n #[structopt(name = \"PORT\", short = \"p\", long = \"port\")]\n\n pub port: Option<String>,\n\n\n\n /// Specify an template to use for creating export files\n\n #[structopt(name = \"TEMPLATE\", short = \"T\", long = \"template\")]\n\n pub template_path: Option<PathBuf>,\n\n\n\n /// Specify the user the application should be run as. Defaults to the app name\n\n #[structopt(name = \"USER\", short = \"u\", long = \"user\")]\n\n pub user: Option<String>,\n\n\n\n /// Specify an environment file to load\n\n #[structopt(\n\n name = \"ENV\",\n", "file_path": "src/cmd/export/mod.rs", "rank": 78, "score": 25699.513022000836 }, { "content": " let mut data = Map::new();\n\n let mt = MasterTargetParams {\n\n service_names: service_names.join(\" \"),\n\n };\n\n data.insert(\"master_target\".to_string(), to_json(&mt));\n\n data\n\n }\n\n\n\n fn make_process_service_data(\n\n &self,\n\n pe: &ProcfileEntry,\n\n process_name: &str,\n\n index: usize,\n\n con_index: usize,\n\n ) -> Map<String, Json> {\n\n let mut data = Map::new();\n\n let ps = ProcessServiceParams {\n\n app: self.app(),\n\n user: self.username(),\n\n work_dir: self.root_path().into_os_string().into_string().unwrap(),\n", "file_path": "src/cmd/export/systemd.rs", "rank": 79, "score": 25699.414438858235 }, { "content": "use crate::cmd::export::base::Exportable;\n\nuse crate::procfile::read_procfile;\n\nuse std::path::PathBuf;\n\nuse structopt::{clap, StructOpt};\n\n\n\npub mod base;\n\npub mod daemon;\n\npub mod launchd;\n\npub mod runit;\n\npub mod supervisord;\n\npub mod systemd;\n\npub mod upstart;\n\n\n\n#[derive(StructOpt, Debug, Default, Clone)]\n\n#[structopt(setting(clap::AppSettings::ColoredHelp))]\n\npub struct ExportOpts {\n\n /// Specify process management format\n\n #[structopt(name = \"FORMAT\")]\n\n pub format: String,\n\n\n", "file_path": "src/cmd/export/mod.rs", "rank": 80, "score": 25699.20851020771 }, { "content": "\n\n fn make_process_master_data(&self) -> Map<String, Json> {\n\n let mut data = Map::new();\n\n let pm = ProcessMasterParams { app: self.app() };\n\n data.insert(\"process_master\".to_string(), to_json(&pm));\n\n data\n\n }\n\n\n\n fn make_process_data(\n\n &self,\n\n pe: &ProcfileEntry,\n\n app_name: &str,\n\n index: usize,\n\n con_index: usize,\n\n ) -> Map<String, Json> {\n\n let mut data = Map::new();\n\n\n\n let p = ProcessParams {\n\n app: self.app(),\n\n name: app_name.to_string(),\n", "file_path": "src/cmd/export/upstart.rs", "rank": 82, "score": 25698.5084522225 }, { "content": " result\n\n }\n\n}\n\n\n\nimpl Exportable for Exporter {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.base_export().expect(\"failed execute base_export\");\n\n\n\n let mut index = 0;\n\n let mut clean_paths: Vec<PathBuf> = vec![];\n\n let mut tmpl_data: Vec<Template> = vec![];\n\n\n\n for (name, pe) in self.procfile.data.iter() {\n\n let con = pe.concurrency.get();\n\n for n in 0..con {\n\n index += 1;\n\n let service_name = format!(\"{}-{}-{}\", self.app(), &name, n + 1);\n\n let output_path = self.opts.location.join(&service_name);\n\n\n\n clean_paths.push(output_path.clone());\n", "file_path": "src/cmd/export/launchd.rs", "rank": 83, "score": 25698.290051662272 }, { "content": " pe: &ProcfileEntry,\n\n service_name: &str,\n\n index: usize,\n\n con_index: usize,\n\n ) -> Map<String, Json> {\n\n let mut data = Map::new();\n\n let pp = ProcessParams {\n\n service_name: service_name.to_string(),\n\n env: self.environment(index, con_index),\n\n user: self.username(),\n\n work_dir: self.root_path().into_os_string().into_string().unwrap(),\n\n pid_path: self\n\n .run_path()\n\n .join(format!(\"{}.pid\", &service_name))\n\n .into_os_string()\n\n .into_string()\n\n .unwrap(),\n\n command: self.command_args(pe).get(0).unwrap().to_string(),\n\n command_args: self.command_args_str(pe),\n\n log_path: self\n", "file_path": "src/cmd/export/daemon.rs", "rank": 84, "score": 25698.10579572884 }, { "content": " }\n\n\n\n fn environment(&self, index: usize, con_index: usize) -> String {\n\n let port = port_for(\n\n self.opts.env_path.clone(),\n\n self.opts.port.clone(),\n\n index,\n\n con_index + 1,\n\n );\n\n let mut env = read_env(self.opts.env_path.clone()).expect(\"failed read .env\");\n\n env.insert(\"PORT\".to_string(), port);\n\n\n\n let mut result = vec![];\n\n for (key, val) in env.iter() {\n\n result.push(format!(\"{}=\\\"{}\\\"\", &key, escape(&val)))\n\n }\n\n\n\n result.join(\",\")\n\n }\n\n\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 85, "score": 25697.90883162956 }, { "content": " result.push(EnvParameter {\n\n key: key.to_string(),\n\n value: val.to_string(),\n\n });\n\n }\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl Exportable for Exporter {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.base_export().expect(\"failed execute base_export\");\n\n\n\n let mut clean_paths: Vec<PathBuf> = vec![];\n\n let mut tmpl_data: Vec<Template> = vec![];\n\n\n\n let output_path = self.opts.location.join(format!(\"{}.conf\", self.app()));\n\n\n\n clean_paths.push(output_path.clone());\n", "file_path": "src/cmd/export/daemon.rs", "rank": 86, "score": 25697.424572211174 }, { "content": " // http://supervisord.org/configuration.html?highlight=environment#environment-variables\n\n fn replace_env_for_supervisord(&self, command: &str) -> String {\n\n let re_env = Regex::new(ENV_REGEXP).unwrap();\n\n let result = re_env.replace_all(command, \"%(ENV_$envname)s\");\n\n result.to_string()\n\n }\n\n}\n\n\n\nimpl Exportable for Exporter {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.base_export().expect(\"failed execute base_export\");\n\n\n\n let mut index = 0;\n\n let mut service_names = vec![];\n\n let mut data: Vec<AppConfDataParams> = vec![];\n\n for (name, pe) in self.procfile.data.iter() {\n\n index += 1;\n\n let con = pe.concurrency.get();\n\n for n in 0..con {\n\n let program = format!(\"{}-{}-{}\", self.app(), &name, n + 1);\n", "file_path": "src/cmd/export/supervisord.rs", "rank": 87, "score": 25696.44249374685 }, { "content": " tmpl_data.push(Template {\n\n template_path: self.master_tmpl_path(),\n\n data: self.make_master_data(),\n\n output_path,\n\n });\n\n\n\n let mut index = 0;\n\n for (name, pe) in self.procfile.data.iter() {\n\n let con = pe.concurrency.get();\n\n let service_name = format!(\"{}-{}\", self.app(), &name);\n\n let output_path = self\n\n .opts\n\n .location\n\n .join(format!(\"{}-{}.conf\", self.app(), &name));\n\n\n\n clean_paths.push(output_path.clone());\n\n tmpl_data.push(Template {\n\n template_path: self.process_master_tmpl_path(),\n\n data: self.make_process_master_data(),\n\n output_path,\n", "file_path": "src/cmd/export/daemon.rs", "rank": 90, "score": 25695.04191158154 }, { "content": "\n\nimpl Exportable for Exporter {\n\n fn export(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n self.base_export().expect(\"failed execute base_export\");\n\n\n\n let mut clean_paths: Vec<PathBuf> = vec![];\n\n let mut tmpl_data: Vec<Template> = vec![];\n\n\n\n let master_file = format!(\"{}.conf\", self.app());\n\n let output_path = self.output_path(master_file);\n\n\n\n clean_paths.push(output_path.clone());\n\n tmpl_data.push(Template {\n\n template_path: self.master_tmpl_path(),\n\n data: Map::new(),\n\n output_path,\n\n });\n\n\n\n let mut index = 0;\n\n for (name, pe) in self.procfile.data.iter() {\n", "file_path": "src/cmd/export/upstart.rs", "rank": 92, "score": 25694.769842454036 }, { "content": " location.join(filename)\n\n }\n\n\n\n fn env_without_port(&self) -> Vec<EnvParameter> {\n\n let mut env = read_env(self.ref_opts().env_path.clone()).expect(\"failed read .env\");\n\n env.remove(\"PORT\");\n\n let mut env_without_port: Vec<EnvParameter> = vec![];\n\n for (key, value) in env {\n\n env_without_port.push(EnvParameter { key, value });\n\n }\n\n env_without_port\n\n }\n\n\n\n fn create_dir_recursive(&self, dir_path: &PathBuf) {\n\n let display = dir_path.clone().into_os_string().into_string().unwrap();\n\n create_dir_all(dir_path).expect(&format!(\"Could not create: {}\", display))\n\n }\n\n}\n", "file_path": "src/cmd/export/base.rs", "rank": 93, "score": 25693.999068009194 }, { "content": " /// Specift the path to export\n\n #[structopt(name = \"LOCATION\")]\n\n pub location: PathBuf,\n\n\n\n /// Use this name rather than the application's root directory name as the name of the application when exporting\n\n #[structopt(name = \"APP\", short = \"a\", long = \"app\")]\n\n pub app: Option<String>,\n\n\n\n /// Specify the number of each process type to run. The value passed in should be in the format process=num,process=num\n\n #[structopt(\n\n name = \"APP=NUMBER\",\n\n short = \"m\",\n\n long = \"formation\",\n\n default_value = \"all=1\"\n\n )]\n\n pub formation: String,\n\n\n\n /// Specify the directory to place process logs in\n\n #[structopt(name = \"LOG\", short = \"l\", long = \"log\", parse(from_os_str))]\n\n pub log_path: Option<PathBuf>,\n", "file_path": "src/cmd/export/mod.rs", "rank": 94, "score": 25693.138775878742 }, { "content": " let lp = LaunchdParams {\n\n label: service_name.to_string(),\n\n env: self.environment(index, con_index),\n\n command_args: self.command_args(pe),\n\n stdout_path: format!(\"{}/{}.log\", &log_display, &service_name),\n\n stderr_path: format!(\"{}/{}.error.log\", &log_display, &service_name),\n\n user: self.username(),\n\n work_dir: self.root_path().into_os_string().into_string().unwrap(),\n\n };\n\n data.insert(\"launchd\".to_string(), to_json(&lp));\n\n data\n\n }\n\n\n\n fn command_args(&self, pe: &ProcfileEntry) -> Vec<String> {\n\n let data = pe.command.split(\" \").collect::<Vec<_>>();\n\n let mut result = vec![];\n\n for item in data {\n\n result.push(item.to_string())\n\n }\n\n result\n", "file_path": "src/cmd/export/launchd.rs", "rank": 95, "score": 25692.72458497008 }, { "content": " short = \"e\",\n\n long = \"env\",\n\n parse(from_os_str),\n\n default_value = \".env\"\n\n )]\n\n pub env_path: PathBuf,\n\n\n\n /// Specify an Procfile to load\n\n #[structopt(\n\n name = \"PROCFILE\",\n\n short = \"f\",\n\n long = \"procfile\",\n\n parse(from_os_str),\n\n default_value = \"Procfile\"\n\n )]\n\n pub procfile_path: PathBuf,\n\n\n\n /// Specify an alternate application root. This defaults to the directory containing the Procfile.\n\n #[structopt(name = \"ROOT\", short = \"d\", long = \"root\", parse(from_os_str))]\n\n pub root_path: Option<PathBuf>,\n", "file_path": "src/cmd/export/mod.rs", "rank": 96, "score": 25691.762326105385 }, { "content": " });\n\n\n\n for n in 0..con {\n\n index += 1;\n\n let process_name = format!(\"{}-{}-{}.conf\", self.app(), &name, n + 1);\n\n let output_path = self.opts.location.join(&process_name);\n\n\n\n clean_paths.push(output_path.clone());\n\n tmpl_data.push(Template {\n\n template_path: self.process_tmpl_path(),\n\n data: self.make_process_data(pe, &service_name, index, n),\n\n output_path,\n\n });\n\n }\n\n }\n\n\n\n for path in clean_paths {\n\n self.clean(&path);\n\n }\n\n\n", "file_path": "src/cmd/export/daemon.rs", "rank": 98, "score": 25691.342220603787 }, { "content": " tmpl_data.push(Template {\n\n template_path: self.launchd_tmpl_path(),\n\n data: self.make_launchd_data(pe, &service_name, index, n),\n\n output_path,\n\n });\n\n }\n\n }\n\n\n\n for path in clean_paths {\n\n self.clean(&path);\n\n }\n\n\n\n for tmpl in tmpl_data {\n\n self.write_template(tmpl);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn ref_opts(&self) -> &ExportOpts {\n\n &self.opts\n\n }\n\n}\n", "file_path": "src/cmd/export/launchd.rs", "rank": 99, "score": 25691.22219706606 } ]
Rust
src/config.rs
selvakn/wg-port-forward
1ecdcc8a1af1df3f4e906d991592a7ef693587ec
use std::fmt::{Display, Formatter}; use std::fs::read_to_string; use std::net::{IpAddr, SocketAddr, ToSocketAddrs}; use std::sync::Arc; use anyhow::Context; use boringtun::crypto::{X25519PublicKey, X25519SecretKey}; use clap::{App, Arg}; #[derive(Clone, Debug)] pub struct Config { pub private_key: Arc<X25519SecretKey>, pub endpoint_public_key: Arc<X25519PublicKey>, pub endpoint_addr: SocketAddr, pub source_peer_ip: IpAddr, pub keepalive_seconds: Option<u16>, pub max_transmission_unit: usize, pub ports_to_forward: Vec<u16>, } impl Config { pub fn from( private_key: &str, endpoint_public_key: &str, endpoint_addr: &str, source_peer_ip: &str, keepalive_seconds: u16, max_transmission_unit: usize, ports_to_forward: Vec<u16>, ) -> anyhow::Result<Self> { let config = Config { private_key: Arc::new(parse_private_key(private_key)?), endpoint_public_key: Arc::new(parse_public_key(Some(endpoint_public_key))?), endpoint_addr: parse_addr(Some(endpoint_addr))?, source_peer_ip: parse_ip(Some(source_peer_ip))?, keepalive_seconds: Some(keepalive_seconds), max_transmission_unit: max_transmission_unit, ports_to_forward: ports_to_forward, }; Ok(config) } pub fn from_args() -> anyhow::Result<Self> { let matches = App::new("p2p-port-forward") .version(env!("CARGO_PKG_VERSION")) .args(&[ Arg::with_name("private-key") .required_unless("private-key-file") .takes_value(true) .long("private-key") .help("The private key of this peer. The corresponding public key should be registered in the WireGuard endpoint. \ You can also use '--private-key-file' to specify a file containing the key instead."), Arg::with_name("private-key-file") .takes_value(true) .long("private-key-file") .help("The path to a file containing the private key of this peer. The corresponding public key should be registered in the WireGuard endpoint."), Arg::with_name("endpoint-public-key") .required(true) .takes_value(true) .long("endpoint-public-key") .help("The public key of the WireGuard endpoint (remote)."), Arg::with_name("endpoint-addr") .required(true) .takes_value(true) .long("endpoint-addr") .help("The address (IP + port) of the WireGuard endpoint (remote). Example: 1.2.3.4:51820"), Arg::with_name("source-peer-ip") .required(true) .takes_value(true) .long("source-peer-ip") .help("The source IP to identify this peer as (local). Example: 192.168.4.3"), Arg::with_name("keep-alive") .required(false) .takes_value(true) .long("keep-alive") .help("Configures a persistent keep-alive for the WireGuard tunnel, in seconds."), Arg::with_name("max-transmission-unit") .required(false) .takes_value(true) .long("max-transmission-unit") .default_value("1420") .help("Configures the max-transmission-unit (MTU) of the WireGuard tunnel."), Arg::with_name("ports-to-forward") .required(true) .multiple(true) .takes_value(true) .long("ports-to-forward") .help("Configures the ports to forward. Example: --ports-to-forward 22,80,443"), ]).get_matches(); let private_key = if let Some(private_key_file) = matches.value_of("private-key-file") { read_to_string(private_key_file) .map(|s| s.trim().to_string()) .with_context(|| "Failed to read private key file") } else { matches .value_of("private-key") .map(String::from) .with_context(|| "Missing private key") }?; Ok(Self { private_key: Arc::new( parse_private_key(&private_key).with_context(|| "Invalid private key")?, ), endpoint_public_key: Arc::new( parse_public_key(matches.value_of("endpoint-public-key")) .with_context(|| "Invalid endpoint public key")?, ), endpoint_addr: parse_addr(matches.value_of("endpoint-addr")) .with_context(|| "Invalid endpoint address")?, source_peer_ip: parse_ip(matches.value_of("source-peer-ip")) .with_context(|| "Invalid source peer IP")?, keepalive_seconds: parse_keep_alive(matches.value_of("keep-alive")) .with_context(|| "Invalid keep-alive value")?, max_transmission_unit: parse_mtu(matches.value_of("max-transmission-unit")) .with_context(|| "Invalid max-transmission-unit value")?, ports_to_forward: matches .values_of("ports-to-forward") .unwrap() .map(|s| s.parse::<u16>().unwrap()) .collect(), }) } } fn parse_addr(s: Option<&str>) -> anyhow::Result<SocketAddr> { s.with_context(|| "Missing address")? .to_socket_addrs() .with_context(|| "Invalid address")? .next() .with_context(|| "Could not lookup address") } fn parse_ip(s: Option<&str>) -> anyhow::Result<IpAddr> { s.with_context(|| "Missing IP")? .parse::<IpAddr>() .with_context(|| "Invalid IP address") } fn parse_private_key(s: &str) -> anyhow::Result<X25519SecretKey> { s.parse::<X25519SecretKey>() .map_err(|e| anyhow::anyhow!("{}", e)) } fn parse_public_key(s: Option<&str>) -> anyhow::Result<X25519PublicKey> { s.with_context(|| "Missing public key")? .parse::<X25519PublicKey>() .map_err(|e| anyhow::anyhow!("{}", e)) .with_context(|| "Invalid public key") } fn parse_keep_alive(s: Option<&str>) -> anyhow::Result<Option<u16>> { if let Some(s) = s { let parsed: u16 = s.parse().with_context(|| { format!( "Keep-alive must be a number between 0 and {} seconds", u16::MAX ) })?; Ok(Some(parsed)) } else { Ok(None) } } fn parse_mtu(s: Option<&str>) -> anyhow::Result<usize> { s.with_context(|| "Missing MTU")? .parse() .with_context(|| "Invalid MTU") } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)] pub enum PortProtocol { Tcp, Icmp, } impl Display for PortProtocol { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { Self::Tcp => "TCP", Self::Icmp => "Icmp", } ) } }
use std::fmt::{Display, Formatter}; use std::fs::read_to_string; use std::net::{IpAddr, SocketAddr, ToSocketAddrs}; use std::sync::Arc; use anyhow::Context; use boringtun::crypto::{X25519PublicKey, X25519SecretKey}; use clap::{App, Arg}; #[derive(Clone, Debug)] pub struct Config { pub private_key: Arc<X25519SecretKey>, pub endpoint_public_key: Arc<X25519PublicKey>, pub endpoint_addr: SocketAddr, pub source_peer_ip: IpAddr, pub keepalive_seconds: Option<u16>, pub max_transmission_unit: usize, pub ports_to_forward: Vec<u16>, } impl Config { pub fn from( private_key: &str, endpoint_public_key: &str, endpoint_addr: &str, source_peer_ip: &str, keepalive_seconds: u16, max_transmission_unit: usize, ports_to_forward: Vec<u16>, ) -> anyhow::Result<Self> { let config = Config { private_key: Arc::new(parse_private_key(private_key)?), endpoint_public_key: Arc::new(parse_public_key(Some(endpoint_public_key))?), endpoint_addr: parse_addr(Some(endpoint_addr))?, source_peer_ip: parse_ip(Some(source_peer_ip))?, keepalive_seconds: Some(keepalive_seconds), max_transmission_unit: max_transmission_unit, ports_to_forward: ports_to_forward, }; Ok(config) } pub fn from_args() -> anyhow::Result<Self> { let matches = App::new("p2p-port-forward") .version(env!("CARGO_PKG_VERSION")) .args(&[ Arg::with_name("private-key") .required_unless("private-key-file") .takes_value(true) .long("private-key") .help("The private key of this peer. The corresponding public key should be registered in the WireGuard endpoint. \ You can also use '--private-key-file' to specify a file containing the key instead."), Arg::with_name("private-key-file") .takes_value(true) .long("private-key-file") .help("The path to a file containing the private key of this peer. The corresponding public key should be registered in the WireGuard endpoint."), Arg::with_name("endpoint-public-key") .required(true) .takes_value(true) .long("endpoint-public-key") .help("The public key of the WireGuard endpoint (remote)."), Arg::with_name("endpoint-addr") .required(true) .takes_value(true) .long("endpoint-addr") .help("The address (IP + port) of the WireGuard endpoint (remote). Example: 1.2.3.4:51820"), Arg::with_name("source-peer-ip") .required(true) .takes_value(true) .long("source-peer-ip") .help("The source IP to identify this peer as (local). Example: 192.168.4.3"), Arg::with_name("keep-alive") .required(false) .takes_value(true) .long("keep-alive") .help("Configures a persistent keep-alive for the WireGuard tunnel, in seconds."), Arg::with_name("max-transmission-unit") .required(false) .takes_value(true) .long("max-transmission-unit") .default_value("1420") .help("Configures the max-transmission-unit (MTU) of the WireGuard tunnel."), Arg::with_name("ports-to-forward") .required(true) .multiple(true) .takes_value(true) .long("ports-to-forward") .help("Configures the ports to forward. Example: --ports-to-forward 22,80,443"), ]).get_matches(); let private_key = if let Some(private_key_file) = matches.value_of("private-key-file") { read_to_string(private_key_file) .map(|s| s.trim().to_string()) .with_context(|| "Failed to read private key file") } else { matches .value_of("private-key") .map(String::from) .with_context(|| "Missing private key") }?; Ok(Self { private_key: Arc::new( parse_private_key(&private_key).with_context(|| "Invalid private key")?, ), endpoint_public_key: Arc::new( parse_public_key(matches.value_of("endpoint-public-key")) .with_context(|| "Invalid endpoint public key")?, ), endpoint_addr: parse_addr(matches.value_of("endpoint-addr")) .with_context(|| "Invalid endpoint address")?, source_peer_ip: parse_ip(matches.value_of("source-peer-ip")) .with_context(|| "Invalid source peer IP")?, keepalive_seconds: parse_keep_alive(matches.value_of("keep-alive")) .with_context(|| "Invalid keep-alive value")?, max_transmission_unit: parse_mtu(matches.value_of("max-transmission-unit")) .with_context(|| "Invalid max-transmission-unit value")?, ports_to_forward: matches .values_of("ports-to-forward") .unwrap() .map(|s| s.parse::<u16>().unwrap()) .collect(), }) } } fn parse_addr(s: Option<&str>) -> anyhow::Result<SocketAddr> { s.with_context(|| "Missing address")? .to_socket_addrs() .with_context(|| "Invalid address")? .next() .with_context(|| "Could not lookup address") } fn parse_ip(s: Option<&str>) -> anyhow::Result<IpAddr> { s.with_context(|| "Missing IP")? .parse::<IpAddr>() .with_context(|| "Invalid IP address") } fn parse_private_key(s: &str) -> anyhow::Result<X25519SecretKey> { s.parse::<X25519SecretKey>() .map_err(|e| anyhow::anyhow!("{}", e)) } fn parse_public_key(s: Option<&str>) -> anyhow::Resul
fn parse_keep_alive(s: Option<&str>) -> anyhow::Result<Option<u16>> { if let Some(s) = s { let parsed: u16 = s.parse().with_context(|| { format!( "Keep-alive must be a number between 0 and {} seconds", u16::MAX ) })?; Ok(Some(parsed)) } else { Ok(None) } } fn parse_mtu(s: Option<&str>) -> anyhow::Result<usize> { s.with_context(|| "Missing MTU")? .parse() .with_context(|| "Invalid MTU") } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)] pub enum PortProtocol { Tcp, Icmp, } impl Display for PortProtocol { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { Self::Tcp => "TCP", Self::Icmp => "Icmp", } ) } }
t<X25519PublicKey> { s.with_context(|| "Missing public key")? .parse::<X25519PublicKey>() .map_err(|e| anyhow::anyhow!("{}", e)) .with_context(|| "Invalid public key") }
function_block-function_prefixed
[ { "content": "pub fn new_listener_socket<'a>(port: u16) -> anyhow::Result<TcpSocket<'a>> {\n\n let rx_data = vec![0u8; MAX_PACKET];\n\n let tx_data = vec![0u8; MAX_PACKET];\n\n let tcp_rx_buffer = TcpSocketBuffer::new(rx_data);\n\n let tcp_tx_buffer = TcpSocketBuffer::new(tx_data);\n\n let mut socket = TcpSocket::new(tcp_rx_buffer, tcp_tx_buffer);\n\n socket.listen(port)?;\n\n Ok(socket)\n\n}\n\n\n\n#[async_trait]\n\nimpl VirtualInterfacePoll for TcpVirtualInterface {\n\n async fn poll_loop(self) -> anyhow::Result<()> {\n\n debug!(\"VirtualInterfacePoll::poll_loop\");\n\n let mut readiness_notifier = Some(self.readiness_notifier);\n\n let mut receiver_rx = self.receiver_rx;\n\n let wg = self.wg.clone();\n\n\n\n let device =\n\n VirtualIpDevice::new_direct(VirtualPort(self.port, PortProtocol::Tcp), true, self.wg)\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 5, "score": 70975.07228315382 }, { "content": "fn trace_ip_packet(message: &str, packet: &[u8]) {\n\n if log_enabled!(Level::Trace) {\n\n use smoltcp::wire::*;\n\n\n\n match IpVersion::of_packet(packet) {\n\n Ok(IpVersion::Ipv4) => trace!(\n\n \"{}: {}\",\n\n message,\n\n PrettyPrinter::<Ipv4Packet<&mut [u8]>>::new(\"\", &packet)\n\n ),\n\n Ok(IpVersion::Ipv6) => trace!(\n\n \"{}: {}\",\n\n message,\n\n PrettyPrinter::<Ipv6Packet<&mut [u8]>>::new(\"\", &packet)\n\n ),\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/wg.rs", "rank": 7, "score": 60869.0470671299 }, { "content": "#[async_trait]\n\npub trait VirtualInterfacePoll {\n\n async fn poll_loop(mut self) -> anyhow::Result<()>;\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct VirtualPort(pub u16, pub PortProtocol);\n\n\n\nimpl Display for VirtualPort {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"[{}:{}]\", self.0, self.1)\n\n }\n\n}\n", "file_path": "src/virtual_iface/mod.rs", "rank": 8, "score": 28741.528682593384 }, { "content": "# wg-port-forward\n\n\n\nA cross-platform, user-space WireGuard port-forwarder (to expose local services to a wireguard network) that requires no system network configurations.\n\n\n\n## Use-case\n\n\n\n- You have an existing WireGuard endpoint (router), accessible using its UDP endpoint (typically port 51820); and\n\n- You have a service (only TCP for now) on a port accessible locally and\n\n- You want to expose this service to the wireguard peer (or other peers connected to it) without installing wireguard systemwide (without tun/tap or kernel module)\n\n\n\nFor example, this can be useful for exposing local service during development of the service\n\n\n\n## Usage\n\n\n\n```\n\n./wg-port-forward --ports-to-forward <port> [<ports>] \\\n\n --endpoint-addr <public WireGuard endpoint address> \\\n\n --endpoint-public-key <the public key of the peer on the endpoint> \\\n\n --private-key <private key assigned to wg-port-forward> \\\n\n --source-peer-ip <IP assigned to wg-port-forward> \\\n\n --keep-alive <optional persistent keep-alive in seconds>\n\n```\n\n\n\n### Example\n\n\n\nSuppose your WireGuard endpoint has the following configuration, and is accessible from `a.b.c.d:51820`:\n\n\n\n```\n\n# /etc/wireguard/wg0.conf\n\n\n\n[Interface]\n\nPrivateKey = ********************************************\n\nListenPort = 51820\n\nAddress = 192.168.4.1\n\n\n\n# A friendly peer that wants to reach the TCP service on your local\n\n[Peer]\n\nPublicKey = AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n\nAllowedIPs = 192.168.4.2/32\n\n\n", "file_path": "README.md", "rank": 9, "score": 24754.814178974608 }, { "content": "# Peer assigned to wg-port-forward (local)\n\n[Peer]\n\nPublicKey = BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\n\nAllowedIPs = 192.168.4.3/32\n\n```\n\n\n\nWe can use **wg-port-forward** to expose the local ports , say `127.0.0.1:8080`, that will tunnel through WireGuard and made available to other peers:\n\n\n\n```shell\n\n./wg-port-forward --ports-to-forward 8080 [2222] \\\n\n --endpoint-addr a.b.c.d:51820 \\\n\n --endpoint-public-key 'PUB_****************************************' \\\n\n --private-key 'PRIV_BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB' \\\n\n --source-peer-ip 192.168.4.3 \\\n\n --keep-alive 10\n\n```\n\n\n\n\n\n\n\n## Architecture\n\n\n\nwg-port-forward uses [boringtun](https://github.com/cloudflare/boringtun), [tokio](https://github.com/tokio-rs/tokio), [smoltcp](https://github.com/smoltcp-rs/smoltcp) and heavily inspired from [onetun](https://github.com/aramperes/onetun).\n\nSpecial thanks to the developers of those libraries.\n\n\n\n### UDP\n\n\n\nUDP is not supported at the moment. Might come in the future.\n\n\n\n## License\n\n\n\nMIT. See `LICENSE` for details.\n", "file_path": "README.md", "rank": 10, "score": 24746.27644850983 }, { "content": "use crate::virtual_device::VirtualIpDevice;\n\nuse crate::wg::WireGuardTunnel;\n\nuse smoltcp::iface::InterfaceBuilder;\n\nuse std::sync::Arc;\n\nuse tokio::time::Duration;\n\n\n\npub async fn run_ip_sink_interface(wg: Arc<WireGuardTunnel>) -> ! {\n\n let device = VirtualIpDevice::new_sink(wg)\n\n .await\n\n .expect(\"Failed to initialize VirtualIpDevice for sink interface\");\n\n\n\n let mut virtual_interface = InterfaceBuilder::new(device, vec![])\n\n .ip_addrs([])\n\n .finalize();\n\n\n\n loop {\n\n let loop_start = smoltcp::time::Instant::now();\n\n match virtual_interface.poll(loop_start) {\n\n Ok(processed) if processed => {\n\n trace!(\"[SINK] Virtual interface polled some packets to be processed\",);\n", "file_path": "src/ip_sink.rs", "rank": 17, "score": 21403.848252568187 }, { "content": " tokio::time::sleep(Duration::from_millis(1)).await;\n\n }\n\n Err(e) => {\n\n error!(\"[SINK] Virtual interface poll error: {:?}\", e);\n\n }\n\n _ => {\n\n tokio::time::sleep(Duration::from_millis(5)).await;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/ip_sink.rs", "rank": 18, "score": 21392.713980132554 }, { "content": "pub struct WireGuardTunnel {\n\n pub(crate) source_peer_ip: IpAddr,\n\n /// `boringtun` peer/tunnel implementation, used for crypto & WG protocol.\n\n peer: Box<Tunn>,\n\n /// The UDP socket for the public WireGuard endpoint to connect to.\n\n udp: UdpSocket,\n\n /// The address of the public WireGuard endpoint (UDP).\n\n pub(crate) endpoint: SocketAddr,\n\n /// Maps virtual ports to the corresponding IP packet dispatcher.\n\n virtual_port_ip_tx: dashmap::DashMap<VirtualPort, tokio::sync::mpsc::Sender<Vec<u8>>>,\n\n /// IP packet dispatcher for unroutable packets. `None` if not initialized.\n\n sink_ip_tx: RwLock<Option<tokio::sync::mpsc::Sender<Vec<u8>>>>,\n\n /// The max transmission unit for WireGuard.\n\n pub(crate) max_transmission_unit: usize,\n\n}\n\n\n\nimpl WireGuardTunnel {\n\n /// Initialize a new WireGuard tunnel.\n\n pub async fn new(config: &Config) -> anyhow::Result<Self> {\n\n let source_peer_ip = config.source_peer_ip;\n", "file_path": "src/wg.rs", "rank": 19, "score": 23.536985073050385 }, { "content": "pub mod wg;\n\n\n\nconst MAX_PACKET: usize = 65536;\n\n\n\n#[tokio::main]\n\nasync fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n let config = Config::from_args().with_context(|| \"Failed to read config\")?;\n\n\n\n loop {\n\n start_wireguard(&config)\n\n .await\n\n .unwrap_or_else(|e| error!(\"recovering from: {}\", e));\n\n }\n\n}\n\n\n\npub async fn start_wireguard(config: &Config) -> anyhow::Result<()> {\n\n let wg = WireGuardTunnel::new(config)\n\n .await\n", "file_path": "src/main.rs", "rank": 20, "score": 19.13653346942762 }, { "content": "use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};\n\nuse std::time::Duration;\n\n\n\nuse anyhow::Context;\n\nuse boringtun::noise::{Tunn, TunnResult};\n\nuse log::Level;\n\nuse smoltcp::wire::{IpProtocol, IpVersion, Ipv4Packet, Ipv6Packet, TcpPacket, Icmpv4Packet};\n\nuse tokio::net::UdpSocket;\n\nuse tokio::sync::RwLock;\n\n\n\nuse crate::config::{Config, PortProtocol};\n\nuse crate::virtual_iface::VirtualPort;\n\n\n\n/// The capacity of the channel for received IP packets.\n\npub const DISPATCH_CAPACITY: usize = 1_000;\n\nconst MAX_PACKET: usize = 65536;\n\n\n\n/// A WireGuard tunnel. Encapsulates and decapsulates IP packets\n\n/// to be sent to and received from a remote UDP endpoint.\n\n/// This tunnel supports at most 1 peer IP at a time, but supports simultaneous ports.\n", "file_path": "src/wg.rs", "rank": 21, "score": 17.135586561370395 }, { "content": " });\n\n }\n\n RouteResult::Drop => {\n\n trace!(\"Dropped unroutable IP packet received from WireGuard endpoint\");\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n\n\n fn create_tunnel(config: &Config) -> anyhow::Result<Box<Tunn>> {\n\n Tunn::new(\n\n config.private_key.clone(),\n\n config.endpoint_public_key.clone(),\n\n None,\n\n config.keepalive_seconds,\n\n 0,\n\n None,\n", "file_path": "src/wg.rs", "rank": 22, "score": 15.853736655749273 }, { "content": " let peer = Self::create_tunnel(config)?;\n\n let endpoint = config.endpoint_addr;\n\n let udp = UdpSocket::bind(match endpoint {\n\n SocketAddr::V4(_) => \"0.0.0.0:0\",\n\n SocketAddr::V6(_) => \"[::]:0\",\n\n })\n\n .await\n\n .with_context(|| \"Failed to create UDP socket for WireGuard connection\")?;\n\n let virtual_port_ip_tx = Default::default();\n\n\n\n Ok(Self {\n\n source_peer_ip,\n\n peer,\n\n udp,\n\n endpoint,\n\n virtual_port_ip_tx,\n\n sink_ip_tx: RwLock::new(None),\n\n max_transmission_unit: config.max_transmission_unit,\n\n })\n\n }\n", "file_path": "src/wg.rs", "rank": 23, "score": 15.727246190791396 }, { "content": " .with_context(|| \"Failed to create virtual IP device\")?;\n\n\n\n let mut virtual_interface = InterfaceBuilder::new(device, vec![])\n\n .ip_addrs([IpCidr::new(IpAddress::from(wg.source_peer_ip), 32)])\n\n .finalize();\n\n\n\n let mut socket_listeners = std::collections::HashMap::new();\n\n\n\n let socket = new_listener_socket(self.port)?;\n\n let client_handle = virtual_interface.add_socket(socket);\n\n let docket_listener_handle = SocketListenerHandle::new(client_handle);\n\n socket_listeners.insert(docket_listener_handle.identifier, docket_listener_handle);\n\n\n\n let mut listen_next = false;\n\n\n\n let mut tx_extra_identifier = None;\n\n let mut tx_extra = Vec::new();\n\n loop {\n\n let loop_start = smoltcp::time::Instant::now();\n\n\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 24, "score": 15.12993751507878 }, { "content": "use crate::config::PortProtocol;\n\nuse crate::virtual_device::VirtualIpDevice;\n\nuse crate::virtual_iface::{VirtualInterfacePoll, VirtualPort};\n\nuse crate::wg::WireGuardTunnel;\n\nuse anyhow::Context;\n\nuse async_trait::async_trait;\n\nuse smoltcp::iface::{InterfaceBuilder, SocketHandle};\n\nuse smoltcp::socket::{TcpSocket, TcpSocketBuffer, TcpState};\n\nuse smoltcp::wire::{IpAddress, IpCidr};\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\nuse uuid::Uuid;\n\n\n\nconst MAX_PACKET: usize = 65536;\n\n\n\npub struct TcpVirtualInterface {\n\n port: u16,\n\n wg: Arc<WireGuardTunnel>,\n\n sender_tx: tokio::sync::mpsc::Sender<(Uuid, Vec<u8>)>,\n\n receiver_rx: tokio::sync::mpsc::Receiver<(Uuid, Vec<u8>)>,\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 25, "score": 15.03990800846404 }, { "content": "use crate::config::PortProtocol;\n\nuse crate::virtual_iface::VirtualPort;\n\nuse crate::wg::{WireGuardTunnel, DISPATCH_CAPACITY};\n\nuse anyhow::Context;\n\nuse smoltcp::phy::{Device, DeviceCapabilities, Medium};\n\nuse smoltcp::time::Instant;\n\nuse std::sync::Arc;\n\n\n\npub struct VirtualIpDevice {\n\n pub wg: Arc<WireGuardTunnel>,\n\n ip_dispatch_rx: tokio::sync::mpsc::Receiver<Vec<u8>>,\n\n}\n\n\n\nimpl VirtualIpDevice {\n\n pub fn new(\n\n wg: Arc<WireGuardTunnel>,\n\n ip_dispatch_rx: tokio::sync::mpsc::Receiver<Vec<u8>>,\n\n ) -> Self {\n\n Self { wg, ip_dispatch_rx }\n\n }\n", "file_path": "src/virtual_device.rs", "rank": 26, "score": 14.721410112822031 }, { "content": " .unwrap_or(RouteResult::Drop)\n\n }\n\n Ok(IpVersion::Ipv6) => {\n\n Ipv6Packet::new_checked(&packet)\n\n .ok()\n\n // Only care if the packet is destined for this tunnel\n\n .filter(|packet| Ipv6Addr::from(packet.dst_addr()) == self.source_peer_ip)\n\n .map(|packet| match packet.next_header() {\n\n IpProtocol::Tcp => Some(self.route_tcp_segment(packet.payload())),\n\n // Unrecognized protocol, so we cannot determine where to route\n\n _ => Some(RouteResult::Drop),\n\n })\n\n .flatten()\n\n .unwrap_or(RouteResult::Drop)\n\n }\n\n _ => RouteResult::Drop,\n\n }\n\n }\n\n\n\n fn route_icmp_packet(&self, segment: &[u8]) -> RouteResult {\n", "file_path": "src/wg.rs", "rank": 27, "score": 13.901648878618097 }, { "content": "\n\n pub fn new_direct(\n\n virtual_port: VirtualPort,\n\n enable_icmp: bool,\n\n wg: Arc<WireGuardTunnel>,\n\n ) -> anyhow::Result<Self> {\n\n let (ip_dispatch_tx, ip_dispatch_rx) = tokio::sync::mpsc::channel(DISPATCH_CAPACITY);\n\n\n\n if enable_icmp {\n\n wg.register_virtual_interface(VirtualPort(0, PortProtocol::Icmp), ip_dispatch_tx.clone())\n\n .with_context(|| \"Failed to register IP dispatch for virtual interface\")?;\n\n }\n\n\n\n wg.register_virtual_interface(virtual_port, ip_dispatch_tx)\n\n .with_context(|| \"Failed to register IP dispatch for virtual interface\")?;\n\n\n\n Ok(Self { wg, ip_dispatch_rx })\n\n }\n\n\n\n pub async fn new_sink(wg: Arc<WireGuardTunnel>) -> anyhow::Result<Self> {\n", "file_path": "src/virtual_device.rs", "rank": 28, "score": 13.563385829646919 }, { "content": "#[macro_use]\n\nextern crate log;\n\n\n\nuse std::sync::Arc;\n\n\n\nuse crate::config::Config;\n\nuse crate::config::PortProtocol;\n\nuse crate::virtual_iface::tcp::TcpVirtualInterface;\n\nuse crate::virtual_iface::VirtualInterfacePoll;\n\nuse crate::virtual_iface::VirtualPort;\n\nuse crate::wg::WireGuardTunnel;\n\nuse anyhow::Context;\n\nuse rand::{thread_rng, Rng};\n\nuse tokio::net::TcpStream;\n\nuse uuid::Uuid;\n\n\n\npub mod config;\n\npub mod ip_sink;\n\npub mod virtual_device;\n\npub mod virtual_iface;\n", "file_path": "src/main.rs", "rank": 29, "score": 13.102798051192858 }, { "content": " }\n\n }\n\n }\n\n }\n\n TunnResult::WriteToTunnelV4(packet, _) | TunnResult::WriteToTunnelV6(packet, _) => {\n\n debug!(\n\n \"WireGuard endpoint sent IP packet of {} bytes\",\n\n packet.len()\n\n );\n\n\n\n // For debugging purposes: parse packet\n\n trace_ip_packet(\"Received IP packet\", packet);\n\n\n\n match self.route_ip_packet(packet) {\n\n RouteResult::Dispatch(port) => {\n\n let sender = self.virtual_port_ip_tx.get(&port);\n\n if let Some(sender_guard) = sender {\n\n let sender = sender_guard.value();\n\n match sender.send(packet.to_vec()).await {\n\n Ok(_) => {\n", "file_path": "src/wg.rs", "rank": 30, "score": 13.054242641784251 }, { "content": " )\n\n .map_err(|s| anyhow::anyhow!(\"{}\", s))\n\n .with_context(|| \"Failed to initialize boringtun Tunn\")\n\n }\n\n\n\n /// Makes a decision on the handling of an incoming IP packet.\n\n fn route_ip_packet(&self, packet: &[u8]) -> RouteResult {\n\n match IpVersion::of_packet(packet) {\n\n Ok(IpVersion::Ipv4) => {\n\n Ipv4Packet::new_checked(&packet)\n\n .ok()\n\n // Only care if the packet is destined for this tunnel\n\n .filter(|packet| Ipv4Addr::from(packet.dst_addr()) == self.source_peer_ip)\n\n .map(|packet| match packet.protocol() {\n\n IpProtocol::Tcp => Some(self.route_tcp_segment(packet.payload())),\n\n IpProtocol::Icmp => Some(self.route_icmp_packet(packet.payload())),\n\n // Unrecognized protocol, so we cannot determine where to route\n\n _ => Some(RouteResult::Drop),\n\n })\n\n .flatten()\n", "file_path": "src/wg.rs", "rank": 31, "score": 12.262675111238355 }, { "content": " upstreams: &mut std::collections::HashMap<Uuid, Arc<TcpStream>>,\n\n id: Uuid,\n\n port: u16,\n\n) -> Arc<TcpStream> {\n\n let upstream_fd = upstreams.get(&id);\n\n if upstream_fd.is_none() {\n\n debug!(\"[{}] Creating upstream connection\", port);\n\n let s = Arc::new(TcpStream::connect(format!(\"127.0.0.1:{}\", port)).await.unwrap());\n\n upstreams.insert(id, s.clone());\n\n return s;\n\n }\n\n upstream_fd.unwrap().clone()\n\n // let b = unsafe { TcpStream::from_std(std::net::TcpStream::from_raw_fd(upstream_fd.unwrap())) };\n\n // debug!(\"[{}] Upstream fd is {} and convertion successfule\", port, upstream_fd.unwrap());\n\n // b.unwrap()\n\n}\n\n\n\nasync fn pipe_to_port(\n\n port: u16,\n\n data_to_virtual_server_tx: tokio::sync::mpsc::Sender<(Uuid, Vec<u8>)>,\n", "file_path": "src/main.rs", "rank": 32, "score": 12.111420360030161 }, { "content": "pub mod tcp;\n\n\n\nuse crate::config::PortProtocol;\n\nuse async_trait::async_trait;\n\nuse std::fmt::{Display, Formatter};\n\n\n\n#[async_trait]\n", "file_path": "src/virtual_iface/mod.rs", "rank": 33, "score": 12.09585253021919 }, { "content": " .with_context(|| \"Failed to initialize WireGuard tunnel\")?;\n\n let wg = Arc::new(wg);\n\n\n\n for port in config.ports_to_forward.clone() {\n\n {\n\n let wg = wg.clone();\n\n tokio::spawn(async move { forward_port(wg, port).await });\n\n }\n\n }\n\n\n\n {\n\n let wg = wg.clone();\n\n tokio::spawn(async move { ip_sink::run_ip_sink_interface(wg).await });\n\n }\n\n\n\n {\n\n let wg = wg.clone();\n\n tokio::spawn(async move { wg.consume_task().await });\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 11.196324155631414 }, { "content": " // Ignored\n\n }\n\n other => {\n\n error!(\n\n \"Unexpected WireGuard state during encapsulation: {:?}\",\n\n other\n\n );\n\n }\n\n };\n\n Ok(())\n\n }\n\n\n\n /// Register a virtual interface (using its assigned virtual port) with the given IP packet `Sender`.\n\n pub fn register_virtual_interface(\n\n &self,\n\n virtual_port: VirtualPort,\n\n sender: tokio::sync::mpsc::Sender<Vec<u8>>,\n\n ) -> anyhow::Result<()> {\n\n self.virtual_port_ip_tx.insert(virtual_port, sender);\n\n Ok(())\n", "file_path": "src/wg.rs", "rank": 35, "score": 11.014532121274158 }, { "content": " }\n\n\n\n /// Register a virtual interface (using its assigned virtual port) with the given IP packet `Sender`.\n\n pub async fn register_sink_interface(\n\n &self,\n\n ) -> anyhow::Result<tokio::sync::mpsc::Receiver<Vec<u8>>> {\n\n let (sender, receiver) = tokio::sync::mpsc::channel(DISPATCH_CAPACITY);\n\n\n\n let mut sink_ip_tx = self.sink_ip_tx.write().await;\n\n *sink_ip_tx = Some(sender);\n\n\n\n Ok(receiver)\n\n }\n\n\n\n /// Releases the virtual interface from IP dispatch.\n\n pub fn release_virtual_interface(&self, virtual_port: VirtualPort) {\n\n self.virtual_port_ip_tx.remove(&virtual_port);\n\n }\n\n\n\n /// WireGuard Routine task. Handles Handshake, keep-alive, etc.\n", "file_path": "src/wg.rs", "rank": 36, "score": 10.771517536417074 }, { "content": " .ok()\n\n .map(|tcp| {\n\n if self\n\n .virtual_port_ip_tx\n\n .get(&VirtualPort(tcp.dst_port(), PortProtocol::Tcp))\n\n .is_some()\n\n {\n\n RouteResult::Dispatch(VirtualPort(tcp.dst_port(), PortProtocol::Tcp))\n\n } else if tcp.rst() {\n\n RouteResult::Drop\n\n } else {\n\n RouteResult::Sink\n\n }\n\n })\n\n .unwrap_or(RouteResult::Drop)\n\n }\n\n\n\n /// Route a packet to the IP sink interface.\n\n async fn route_ip_sink(&self, packet: &[u8]) -> anyhow::Result<()> {\n\n let ip_sink_tx = self.sink_ip_tx.read().await;\n", "file_path": "src/wg.rs", "rank": 37, "score": 10.327533415654447 }, { "content": "\n\n /// Encapsulates and sends an IP packet through to the WireGuard endpoint.\n\n pub async fn send_ip_packet(&self, packet: &[u8]) -> anyhow::Result<()> {\n\n trace_ip_packet(\"Sending IP packet\", packet);\n\n let mut send_buf = [0u8; MAX_PACKET];\n\n match self.peer.encapsulate(packet, &mut send_buf) {\n\n TunnResult::WriteToNetwork(packet) => {\n\n self.udp\n\n .send_to(packet, self.endpoint)\n\n .await\n\n .with_context(|| \"Failed to send encrypted IP packet to WireGuard endpoint.\")?;\n\n debug!(\n\n \"Sent {} bytes to WireGuard endpoint (encrypted IP packet)\",\n\n packet.len()\n\n );\n\n }\n\n TunnResult::Err(e) => {\n\n error!(\"Failed to encapsulate IP packet: {:?}\", e);\n\n }\n\n TunnResult::Done => {\n", "file_path": "src/wg.rs", "rank": 38, "score": 10.321793719602308 }, { "content": " let ip_dispatch_rx = wg\n\n .register_sink_interface()\n\n .await\n\n .with_context(|| \"Failed to register IP dispatch for sink virtual interface\")?;\n\n Ok(Self { wg, ip_dispatch_rx })\n\n }\n\n}\n\n\n\nimpl<'a> Device<'a> for VirtualIpDevice {\n\n type RxToken = RxToken;\n\n type TxToken = TxToken;\n\n\n\n fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> {\n\n match self.ip_dispatch_rx.try_recv() {\n\n Ok(buffer) => Some((\n\n Self::RxToken { buffer },\n\n Self::TxToken {\n\n wg: self.wg.clone(),\n\n },\n\n )),\n", "file_path": "src/virtual_device.rs", "rank": 39, "score": 9.80789831514335 }, { "content": " listen_next = true;\n\n if let Some(readiness_notifier) = readiness_notifier.take() {\n\n debug!(\"sending READY\");\n\n readiness_notifier\n\n .send(socket_listener_handle.identifier.clone())\n\n .expect(\"Failed to notify real client that virtual client is ready\");\n\n }\n\n }\n\n if client_socket.can_recv() {\n\n match client_socket.recv(|buffer| (buffer.len(), buffer.to_vec())) {\n\n Ok(data) => {\n\n trace!(\n\n \"[{}] Virtual client received {} bytes of data\",\n\n self.port,\n\n data.len()\n\n );\n\n // Send it to the real client\n\n if let Err(e) = self\n\n .sender_tx\n\n .send((socket_listener_handle.identifier.clone(), data))\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 40, "score": 9.742789863608104 }, { "content": " trace!(\n\n \"Dispatched received IP packet to virtual port {}\",\n\n port\n\n );\n\n }\n\n Err(e) => {\n\n error!(\n\n \"Failed to dispatch received IP packet to virtual port {}: {}\",\n\n port, e\n\n );\n\n }\n\n }\n\n } else {\n\n warn!(\"[{}] Race condition: failed to get virtual port sender after it was dispatched\", port);\n\n }\n\n }\n\n RouteResult::Sink => {\n\n trace!(\"Sending unroutable IP packet received from WireGuard endpoint to sink interface\");\n\n self.route_ip_sink(packet).await.unwrap_or_else(|e| {\n\n error!(\"Failed to send unroutable IP packet to sink: {:?}\", e)\n", "file_path": "src/wg.rs", "rank": 41, "score": 9.59114106260276 }, { "content": "\n\n // for id in ids \n\n {\n\n let id = ids[thread_rng().gen_range(0..ids.len())];\n\n debug!(\"[{}] cheking upstream {}\", port, id);\n\n\n\n let upstream = tcp_stream(&mut upstreams, id, port).await;\n\n upstream.readable().await?;\n\n\n\n let mut buffer = Vec::with_capacity(MAX_PACKET);\n\n match upstream.try_read_buf(&mut buffer) {\n\n Ok(size) if size > 0 => {\n\n let data = &buffer[..size];\n\n debug!(\n\n \"[{}] Read {} bytes of TCP data from real client\",\n\n port, size\n\n );\n\n if let Err(e) = data_to_virtual_server_tx.send((id, data.to_vec())).await {\n\n error!(\n\n \"[{}] Failed to dispatch data to virtual interface: {:?}\",\n", "file_path": "src/main.rs", "rank": 42, "score": 9.434258058200978 }, { "content": " buffer: Vec<u8>,\n\n}\n\n\n\nimpl smoltcp::phy::RxToken for RxToken {\n\n fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> smoltcp::Result<R>\n\n where\n\n F: FnOnce(&mut [u8]) -> smoltcp::Result<R>,\n\n {\n\n f(&mut self.buffer)\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct TxToken {\n\n wg: Arc<WireGuardTunnel>,\n\n}\n\n\n\nimpl smoltcp::phy::TxToken for TxToken {\n\n fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> smoltcp::Result<R>\n\n where\n", "file_path": "src/virtual_device.rs", "rank": 43, "score": 9.030477771607519 }, { "content": " socket_listener_handle.connected = false;\n\n client_socket.abort();\n\n socket_listeners.remove(socket_identifier);\n\n continue;\n\n }\n\n }\n\n\n\n let mut to_transfer = None;\n\n\n\n if tx_extra.is_empty() {\n\n if let Ok((id, data)) = receiver_rx.try_recv() {\n\n tx_extra_identifier = Some(id);\n\n to_transfer = Some(data);\n\n }\n\n }\n\n let to_transfer_slice = to_transfer.as_ref().unwrap_or(&tx_extra).as_slice();\n\n\n\n if !to_transfer_slice.is_empty() {\n\n\n\n if !socket_listeners.contains_key(&tx_extra_identifier.unwrap()) {\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 44, "score": 8.812380508260583 }, { "content": "}\n\n\n\nasync fn listen_and_forward(port: u16, wg: Arc<WireGuardTunnel>) -> anyhow::Result<()> {\n\n let (client_rediness_tx, client_rediness_rx) = tokio::sync::oneshot::channel();\n\n\n\n let (client_socket_tx, data_to_real_client_rx) = tokio::sync::mpsc::channel(1_000);\n\n let (data_to_virtual_server_tx, listener_socket_rx) = tokio::sync::mpsc::channel(1_000);\n\n\n\n {\n\n let virtual_interface = TcpVirtualInterface::new(\n\n port,\n\n wg,\n\n client_socket_tx,\n\n listener_socket_rx,\n\n client_rediness_tx,\n\n );\n\n\n\n tokio::spawn(async move {\n\n virtual_interface.poll_loop().await.unwrap_or_else(|e| {\n\n error!(\"Virtual interface poll loop failed unexpectedly: {}\", e);\n", "file_path": "src/main.rs", "rank": 45, "score": 8.691518647347298 }, { "content": " readiness_notifier: tokio::sync::oneshot::Sender<Uuid>,\n\n}\n\n\n\nimpl TcpVirtualInterface {\n\n pub fn new(\n\n port: u16,\n\n wg: Arc<WireGuardTunnel>,\n\n sender_tx: tokio::sync::mpsc::Sender<(Uuid, Vec<u8>)>,\n\n receiver_rx: tokio::sync::mpsc::Receiver<(Uuid, Vec<u8>)>,\n\n readiness_notifier: tokio::sync::oneshot::Sender<Uuid>,\n\n ) -> Self {\n\n Self {\n\n port,\n\n wg,\n\n sender_tx,\n\n receiver_rx,\n\n readiness_notifier,\n\n }\n\n }\n\n}\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 46, "score": 8.543365681909354 }, { "content": " pub async fn routine_task(&self) -> anyhow::Result<()> {\n\n let mut send_buf = [0u8; MAX_PACKET];\n\n match self.peer.update_timers(&mut send_buf) {\n\n TunnResult::WriteToNetwork(packet) => {\n\n debug!(\n\n \"Sending routine packet of {} bytes to WireGuard endpoint\",\n\n packet.len()\n\n );\n\n self.udp\n\n .send_to(packet, self.endpoint)\n\n .await\n\n .with_context(|| \"Failed to send routine packet to WireGuard endpoint.\")\n\n .and_then(|_| Ok(()))\n\n }\n\n TunnResult::Err(e) => {\n\n // todo: recover from this\n\n error!(\n\n \"Failed to prepare routine packet for WireGuard endpoint: {:?}\",\n\n e\n\n );\n", "file_path": "src/wg.rs", "rank": 47, "score": 8.378263384026011 }, { "content": " {\n\n let wg = wg.clone();\n\n loop {\n\n wg.routine_task().await?\n\n }\n\n }\n\n}\n\n\n\nasync fn forward_port(wg: Arc<WireGuardTunnel>, port: u16) {\n\n loop {\n\n let result = listen_and_forward(port, wg.clone()).await;\n\n\n\n if let Err(e) = result {\n\n error!(\"[{}] Connection dropped un-gracefully: {:?}\", port, e);\n\n } else {\n\n info!(\"[{}] Connection closed by client\", port);\n\n }\n\n\n\n wg.release_virtual_interface(VirtualPort(port, PortProtocol::Tcp));\n\n }\n", "file_path": "src/main.rs", "rank": 48, "score": 8.050587118799584 }, { "content": " F: FnOnce(&mut [u8]) -> smoltcp::Result<R>,\n\n {\n\n let mut buffer = Vec::new();\n\n buffer.resize(len, 0);\n\n let result = f(&mut buffer);\n\n tokio::spawn(async move {\n\n match self.wg.send_ip_packet(&buffer).await {\n\n Ok(_) => {}\n\n Err(e) => {\n\n error!(\"Failed to send IP packet to WireGuard endpoint: {:?}\", e);\n\n }\n\n }\n\n });\n\n result\n\n }\n\n}\n", "file_path": "src/virtual_device.rs", "rank": 49, "score": 7.982402563479683 }, { "content": " listen_next = false;\n\n }\n\n\n\n for socket_identifier in socket_listeners.clone().keys() {\n\n let socket_listener_handle = socket_listeners.get_mut(socket_identifier).unwrap();\n\n let client_socket =\n\n virtual_interface.get_socket::<TcpSocket>(socket_listener_handle.socket_handle);\n\n\n\n if socket_listener_handle.connected && client_socket.state() == TcpState::Closed {\n\n trace!(\"[{}] Client socket closed TcpState::Closed\", self.port);\n\n socket_listener_handle.connected = false;\n\n client_socket.abort();\n\n socket_listeners.remove(socket_identifier);\n\n continue;\n\n }\n\n\n\n if !socket_listener_handle.connected\n\n && client_socket.state() == TcpState::Established\n\n {\n\n socket_listener_handle.mark_as_connected();\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 50, "score": 7.944722673614582 }, { "content": " match virtual_interface.poll(loop_start) {\n\n Ok(processed) if processed => {\n\n trace!(\n\n \"[{}] Virtual interface polled some packets to be processed\",\n\n self.port\n\n );\n\n }\n\n Err(e) => {\n\n error!(\"[{}] Virtual interface poll error: {:?}\", self.port, e);\n\n }\n\n _ => {}\n\n }\n\n\n\n \n\n if listen_next {\n\n info!(\"accepting next connection\");\n\n let socket = new_listener_socket(self.port)?;\n\n let client_handle = virtual_interface.add_socket(socket);\n\n let docket_listener_handle = SocketListenerHandle::new(client_handle);\n\n socket_listeners.insert(docket_listener_handle.identifier, docket_listener_handle);\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 51, "score": 7.819569610416654 }, { "content": " debug!(\n\n \"route_icmp_packet called with segment of {} bytes\",\n\n segment.len()\n\n );\n\n\n\n Icmpv4Packet::new_checked(segment)\n\n .ok()\n\n .map(|_icmp| {\n\n RouteResult::Dispatch(VirtualPort(0, PortProtocol::Icmp))\n\n })\n\n .unwrap_or(RouteResult::Drop)\n\n }\n\n /// Makes a decision on the handling of an incoming TCP segment.\n\n fn route_tcp_segment(&self, segment: &[u8]) -> RouteResult {\n\n // debug!(\n\n // \"route_tcp_segment called with segment of {} bytes\",\n\n // segment.len()\n\n // );\n\n\n\n TcpPacket::new_checked(segment)\n", "file_path": "src/wg.rs", "rank": 52, "score": 7.6446206809886155 }, { "content": "\n\n#[derive(Clone, Copy)]\n\npub struct SocketListenerHandle {\n\n identifier: Uuid,\n\n socket_handle: SocketHandle,\n\n connected: bool,\n\n}\n\n\n\nimpl SocketListenerHandle {\n\n pub fn mark_as_connected(&mut self) {\n\n self.connected = true;\n\n }\n\n\n\n pub fn new(handle: SocketHandle) -> SocketListenerHandle {\n\n Self {\n\n identifier: Uuid::new_v4(),\n\n socket_handle: handle,\n\n connected: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 53, "score": 7.20152375791805 }, { "content": " }\n\n }\n\n\n\n Ok::<_, anyhow::Error>(())\n\n } => {}\n\n\n\n data_recv_result = data_to_real_client_rx.recv() => {\n\n match data_recv_result {\n\n Some((id, data)) => {\n\n debug!(\n\n \"[{}] Received {} bytes of TCP data from virtual server\",\n\n port, data.len()\n\n );\n\n let upstream = tcp_stream(&mut upstreams, id, port).await;\n\n\n\n match upstream.try_write(&data) {\n\n Ok(size) => {\n\n debug!(\n\n \"[{}] Wrote {} bytes of TCP data to real client\",\n\n port, size\n", "file_path": "src/main.rs", "rank": 54, "score": 7.034593230267718 }, { "content": " trace!(\"Starting WireGuard consumption task\");\n\n\n\n loop {\n\n let mut recv_buf = [0u8; MAX_PACKET];\n\n let mut send_buf = [0u8; MAX_PACKET];\n\n\n\n let size = match self.udp.recv(&mut recv_buf).await {\n\n Ok(size) => size,\n\n Err(e) => {\n\n error!(\"Failed to read from WireGuard endpoint: {:?}\", e);\n\n // Sleep a little bit and try again\n\n tokio::time::sleep(Duration::from_millis(1)).await;\n\n continue;\n\n }\n\n };\n\n\n\n let data = &recv_buf[..size];\n\n match self.peer.decapsulate(None, data, &mut send_buf) {\n\n TunnResult::WriteToNetwork(packet) => {\n\n match self.udp.send_to(packet, self.endpoint).await {\n", "file_path": "src/wg.rs", "rank": 55, "score": 6.842412289768938 }, { "content": " tx_extra = Vec::new();\n\n } else {\n\n let socket_listener_handle = socket_listeners\n\n .get_mut(&tx_extra_identifier.unwrap())\n\n .unwrap();\n\n let client_handle = socket_listener_handle.socket_handle;\n\n let (client_socket, _context) =\n\n virtual_interface.get_socket_and_context::<TcpSocket>(client_handle);\n\n\n\n let total = to_transfer_slice.len();\n\n match client_socket.send_slice(to_transfer_slice) {\n\n Ok(sent) => {\n\n trace!(\n\n \"[{}] Sent {}/{} bytes via virtual client socket\",\n\n self.port,\n\n sent,\n\n total,\n\n );\n\n tx_extra = Vec::from(&to_transfer_slice[sent..total]);\n\n }\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 56, "score": 6.586409600055966 }, { "content": " mut data_to_real_client_rx: tokio::sync::mpsc::Receiver<(Uuid, Vec<u8>)>,\n\n) -> anyhow::Result<()> {\n\n let mut upstreams: std::collections::HashMap<Uuid, Arc<TcpStream>> =\n\n std::collections::HashMap::new();\n\n\n\n loop {\n\n tokio::select! {\n\n _ = async {\n\n loop {\n\n info!(\"tokio::select lopop\");\n\n if upstreams.len() == 0 {\n\n info!(\"empty upstreams\");\n\n return std::future::pending().await;\n\n }\n\n let ids = upstreams.clone().into_keys().collect::<Vec<Uuid>>();\n\n\n\n // let upstreams_clone = upstreams.clone();\n\n // let rs: Vec<Box<dyn std::future::Future<Output=std::io::Result<()>>>> = upstreams_clone.iter().map(|(k,v)| -> Box<dyn std::future::Future<Output=std::io::Result<()>>> {\n\n // Box::new(v.readable())\n\n // }).collect();\n", "file_path": "src/main.rs", "rank": 57, "score": 6.529857918495712 }, { "content": " .await\n\n {\n\n error!(\"[{}] Failed to dispatch data from virtual client to real client: {:?}\", self.port, e);\n\n }\n\n }\n\n Err(e) => {\n\n error!(\n\n \"[{}] Failed to read from virtual client socket: {:?}\",\n\n self.port, e\n\n );\n\n }\n\n }\n\n }\n\n if client_socket.state() == TcpState::CloseWait {\n\n // No data to be sent in this loop. If the client state is CLOSE-WAIT (because of a server FIN),\n\n // the interface is shutdown.\n\n trace!(\n\n \"[{}] client sent no more data, and server sent FIN (CLOSE-WAIT)\",\n\n self.port\n\n );\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 58, "score": 6.497760050682737 }, { "content": " port, e\n\n );\n\n }\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n\n // ignore\n\n }\n\n Err(e) => {\n\n error!(\n\n \"[{}] Failed to read from client TCP socket: {:?}\",\n\n port, e\n\n );\n\n trace!(\"break1\");\n\n upstreams.remove(&id);\n\n }\n\n _ => {\n\n trace!(\"break2\");\n\n upstreams.remove(&id);\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 59, "score": 6.450590317211312 }, { "content": " Ok(_) => {}\n\n Err(e) => {\n\n error!(\"Failed to send decapsulation-instructed packet to WireGuard endpoint: {:?}\", e);\n\n continue;\n\n }\n\n };\n\n loop {\n\n let mut send_buf = [0u8; MAX_PACKET];\n\n match self.peer.decapsulate(None, &[], &mut send_buf) {\n\n TunnResult::WriteToNetwork(packet) => {\n\n match self.udp.send_to(packet, self.endpoint).await {\n\n Ok(_) => {}\n\n Err(e) => {\n\n error!(\"Failed to send decapsulation-instructed packet to WireGuard endpoint: {:?}\", e);\n\n break;\n\n }\n\n };\n\n }\n\n _ => {\n\n break;\n", "file_path": "src/wg.rs", "rank": 60, "score": 6.293982271981161 }, { "content": " Err(e) => {\n\n error!(\n\n \"[{}] Failed to send slice via virtual client socket: {:?}\",\n\n self.port, e\n\n );\n\n }\n\n }\n\n }\n\n }\n\n\n\n match virtual_interface.poll_delay(loop_start) {\n\n Some(smoltcp::time::Duration::ZERO) => {\n\n continue;\n\n }\n\n _ => {\n\n tokio::time::sleep(Duration::from_millis(1)).await;\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/virtual_iface/tcp.rs", "rank": 61, "score": 5.806709913672869 }, { "content": "\n\n if let Some(ip_sink_tx) = &*ip_sink_tx {\n\n ip_sink_tx\n\n .send(packet.to_vec())\n\n .await\n\n .with_context(|| \"Failed to dispatch IP packet to sink interface\")\n\n } else {\n\n warn!(\n\n \"Could not dispatch unroutable IP packet to sink because interface is not active.\"\n\n );\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/wg.rs", "rank": 62, "score": 5.038014271981552 }, { "content": " Err(anyhow::anyhow!(\n\n \"Failed to prepare routine packet for WireGuard endpoint: {:?}\",\n\n e\n\n ))\n\n }\n\n TunnResult::Done => {\n\n // Sleep for a bit\n\n tokio::time::sleep(Duration::from_millis(1)).await;\n\n Ok(())\n\n }\n\n other => {\n\n warn!(\"Unexpected WireGuard routine task state: {:?}\", other);\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n /// WireGuard consumption task. Receives encrypted packets from the WireGuard endpoint,\n\n /// decapsulates them, and dispatches newly received IP packets.\n\n pub async fn consume_task(&self) -> ! {\n", "file_path": "src/wg.rs", "rank": 63, "score": 4.8556292011471704 }, { "content": " );\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {\n\n }\n\n Err(e) => {\n\n error!(\n\n \"[{}] Failed to write to client TCP socket: {:?}\",\n\n port, e\n\n );\n\n }\n\n }\n\n },\n\n None => {\n\n },\n\n }\n\n }\n\n }\n\n }\n\n\n\n trace!(\"[{}] TCP socket handler task terminated\", port);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 64, "score": 4.365398221897878 }, { "content": " Err(_) => None,\n\n }\n\n }\n\n\n\n fn transmit(&'a mut self) -> Option<Self::TxToken> {\n\n Some(TxToken {\n\n wg: self.wg.clone(),\n\n })\n\n }\n\n\n\n fn capabilities(&self) -> DeviceCapabilities {\n\n let mut cap = DeviceCapabilities::default();\n\n cap.medium = Medium::Ip;\n\n cap.max_transmission_unit = self.wg.max_transmission_unit;\n\n cap\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct RxToken {\n", "file_path": "src/virtual_device.rs", "rank": 65, "score": 3.5801758734848406 }, { "content": " })\n\n });\n\n }\n\n\n\n client_rediness_rx\n\n .await\n\n .with_context(|| \"Virtual client dropped before being ready.\")?;\n\n trace!(\"[{}] first client connected\", port);\n\n\n\n pipe_to_port(\n\n port,\n\n data_to_virtual_server_tx,\n\n data_to_real_client_rx,\n\n )\n\n .await?;\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn tcp_stream(\n", "file_path": "src/main.rs", "rank": 66, "score": 2.4679534866922435 } ]
Rust
prelude/src/lib.rs
ferristseng/cryogen
b0bee906d2c2ea8f6f328edc71cf2f509b06dbf0
extern crate clap; extern crate serde; #[cfg(feature = "markdown")] extern crate serde_yaml; #[cfg(feature = "markdown")] #[macro_use] extern crate serde_derive; use clap::{Arg, ArgMatches}; use serde::Serialize; use std::{cmp, borrow::Cow, io::{self, Read}}; #[cfg(feature = "markdown")] pub mod markdown; #[macro_export] macro_rules! args { ( $($name: ident [$help: expr]);*; ) => { vec![ $( Arg::with_name($name).long($name).help($help), )* ] }; } #[derive(Debug)] pub struct VarMapping<'a> { var_name: &'a str, arg_value: &'a str, } impl<'a> VarMapping<'a> { pub fn from_str(s: &'a str) -> Result<VarMapping<'a>, String> { let mut splits = s.splitn(2, ":"); let var_name = if let Some(var_name) = splits.next() { var_name } else { return Err(format!("Expected a variable name to bind to in ({})", s)); }; let arg_value = if let Some(arg_value) = splits.next() { arg_value } else { return Err(format!( "Expected a value to bind to ({}) in ({})", var_name, s )); }; Ok(VarMapping { var_name, arg_value, }) } #[inline] pub fn arg_value(&self) -> &'a str { self.arg_value } #[inline] pub fn var_name(&self) -> &'a str { self.var_name } } pub enum Interpretation { Raw, Path, } pub enum Source<'a, R> where R: Read, { Raw(&'a str, usize), File(R), } impl<'a, R> Source<'a, R> where R: Read, { pub fn consume(self) -> Result<Cow<'a, str>, String> { match self { Source::Raw(raw, _) => Ok(Cow::Borrowed(raw)), Source::File(mut reader) => { let mut buf = String::new(); reader.read_to_string(&mut buf).map_err(|e| e.to_string())?; Ok(Cow::Owned(buf)) } } } } impl<'a, R> Read for Source<'a, R> where R: Read, { fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> { match self { &mut Source::Raw(raw, ref mut index) => { let current = *index; let slice = &raw.as_bytes()[current..]; let copy_num = cmp::min(buf.len(), slice.len()); &buf[..copy_num].copy_from_slice(&slice[..copy_num]); *index = current + copy_num; Ok(copy_num) } &mut Source::File(ref mut reader) => reader.read(buf), } } } pub trait CompileVariablePlugin { type RenderValue: Serialize; const PLUGIN_NAME: &'static str; const ARG_NAME: &'static str; const ARG_INTERPRETATION: Interpretation; const HELP: &'static str; fn additional_args() -> Vec<Arg<'static, 'static>>; fn from_args<'a>(args: &'a ArgMatches<'a>) -> Self; fn read<'a, R>(&self, src: Source<'a, R>) -> Result<Self::RenderValue, String> where R: Read; }
extern crate clap; extern crate serde; #[cfg(feature = "markdown")] extern crate serde_yaml; #[cfg(feature = "markdown")] #[macro_use] extern crate serde_derive; use clap::{Arg, ArgMatches}; use serde::Serialize; use std::{cmp, borrow::Cow, io::{self, Read}}; #[cfg(feature = "markdown")] pub mod markdown; #[macro_export] macro_rules! args { ( $($name: ident [$help: expr]);*; ) => { vec![ $( Arg::with_name($name).long($name).help($help), )* ] }; } #[derive(Debug)] pub struct VarMapping<'a> { var_name: &'a str, arg_value: &'a str, } impl<'a> VarMapping<'a> { pub fn from_str(s: &'a str) -> Result<VarMapping<'a>, String> { let mut splits = s.splitn(2, ":"); let var_name = if let Some(var_name) = splits.next() { var_name } else { return Err(format!("Expected a variable name to bind
arg_value, }) } #[inline] pub fn arg_value(&self) -> &'a str { self.arg_value } #[inline] pub fn var_name(&self) -> &'a str { self.var_name } } pub enum Interpretation { Raw, Path, } pub enum Source<'a, R> where R: Read, { Raw(&'a str, usize), File(R), } impl<'a, R> Source<'a, R> where R: Read, { pub fn consume(self) -> Result<Cow<'a, str>, String> { match self { Source::Raw(raw, _) => Ok(Cow::Borrowed(raw)), Source::File(mut reader) => { let mut buf = String::new(); reader.read_to_string(&mut buf).map_err(|e| e.to_string())?; Ok(Cow::Owned(buf)) } } } } impl<'a, R> Read for Source<'a, R> where R: Read, { fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> { match self { &mut Source::Raw(raw, ref mut index) => { let current = *index; let slice = &raw.as_bytes()[current..]; let copy_num = cmp::min(buf.len(), slice.len()); &buf[..copy_num].copy_from_slice(&slice[..copy_num]); *index = current + copy_num; Ok(copy_num) } &mut Source::File(ref mut reader) => reader.read(buf), } } } pub trait CompileVariablePlugin { type RenderValue: Serialize; const PLUGIN_NAME: &'static str; const ARG_NAME: &'static str; const ARG_INTERPRETATION: Interpretation; const HELP: &'static str; fn additional_args() -> Vec<Arg<'static, 'static>>; fn from_args<'a>(args: &'a ArgMatches<'a>) -> Self; fn read<'a, R>(&self, src: Source<'a, R>) -> Result<Self::RenderValue, String> where R: Read; }
to in ({})", s)); }; let arg_value = if let Some(arg_value) = splits.next() { arg_value } else { return Err(format!( "Expected a value to bind to ({}) in ({})", var_name, s )); }; Ok(VarMapping { var_name,
function_block-random_span
[]
Rust
src/memory/mmu/mod.rs
FelixMcFelix/rs2
194a5b6c87d025cf7fa40700b7750d7fbbaf9e58
pub mod tlb; use crate::core::{cop0::*, exceptions::L1Exception}; use tlb::Tlb; pub struct Mmu { pub tlb: Tlb, pub page_mask: u32, pub wired: u8, pub index: u8, pub context: u32, pub asid: u8, } const VPN_ALWAYS_ACTIVE_BITS: u32 = 0b1111_1110_0000_0000_0000_0000_0000_0000; const OFFSET_ALWAYS_ACTIVE_BITS: u32 = 0b0000_0000_0000_0000_0000_1111_1111_1111; const RAW_MASK_4KB: u32 = 0b0000_0000_0000; const RAW_MASK_16KB: u32 = 0b0000_0000_0011; const RAW_MASK_64KB: u32 = 0b0000_0000_1111; const RAW_MASK_256KB: u32 = 0b0000_0011_1111; const RAW_MASK_1MB: u32 = 0b0000_1111_1111; const RAW_MASK_4MB: u32 = 0b0011_1111_1111; const RAW_MASK_16MB: u32 = 0b1111_1111_1111; const PAGE_MASK_4KB: u32 = RAW_MASK_4KB << 13; const PAGE_MASK_16KB: u32 = RAW_MASK_16KB << 13; const PAGE_MASK_64KB: u32 = RAW_MASK_64KB << 13; const PAGE_MASK_256KB: u32 = RAW_MASK_256KB << 13; const PAGE_MASK_1MB: u32 = RAW_MASK_1MB << 13; const PAGE_MASK_4MB: u32 = RAW_MASK_4MB << 13; const PAGE_MASK_16MB: u32 = RAW_MASK_16MB << 13; #[inline] pub fn page_mask_shift_amount(p_mask: u32) -> u32 { 12 + match p_mask { PAGE_MASK_4KB => 0, PAGE_MASK_16KB => 2, PAGE_MASK_64KB => 4, PAGE_MASK_256KB => 6, PAGE_MASK_1MB => 8, PAGE_MASK_4MB => 10, PAGE_MASK_16MB => 12, _ => unreachable!(), } } pub fn page_mask_size(p_mask: u32) -> &'static str { match p_mask { PAGE_MASK_4KB => "4KB", PAGE_MASK_16KB => "16KB", PAGE_MASK_64KB => "64KB", PAGE_MASK_256KB => "256KB", PAGE_MASK_1MB => "1MB", PAGE_MASK_4MB => "4MB", PAGE_MASK_16MB => "16MB", _ => unreachable!(), } } const SPR_SHIFT_AMOUNT: u32 = 12 + 2 + 1; impl Mmu { pub fn translate_address(&self, v_addr: u32, load: bool) -> MmuAddress { let vpn_shift_amount = page_mask_shift_amount(self.page_mask); let vpn = v_addr >> vpn_shift_amount; let vpn2 = (vpn >> 1) << (vpn_shift_amount - 12); let spr_vpn2 = (v_addr >> SPR_SHIFT_AMOUNT) << (SPR_SHIFT_AMOUNT - 12 - 1); let even_page = (vpn & 1) == 0; trace!("Translating {} -- VPN: {}", v_addr, vpn2); let line = self.tlb.find_match(vpn2, spr_vpn2); let out = line .and_then(|line| { if !line.global && line.asid != self.asid { return None; } let indiv_page = if even_page { &line.even } else { &line.odd }; if !indiv_page.valid { if load { return Some(MmuAddress::Exception(L1Exception::TlbFetchLoadInvalid( v_addr, ))); } else { return Some(MmuAddress::Exception(L1Exception::TlbStoreInvalid(v_addr))); } } else if !indiv_page.dirty && !load { return Some(MmuAddress::Exception(L1Exception::TlbModified(v_addr))); } Some(if line.scratchpad { let offset = v_addr & (OFFSET_ALWAYS_ACTIVE_BITS | (PAGE_MASK_16KB >> 1)); MmuAddress::Scratchpad(offset) } else { let offset = v_addr & (OFFSET_ALWAYS_ACTIVE_BITS | (self.page_mask >> 1)); MmuAddress::Address((indiv_page.page_frame_number << vpn_shift_amount) | offset) }) }) .unwrap_or_else(|| { MmuAddress::Exception(if load { L1Exception::TlbFetchLoadRefill(v_addr) } else { L1Exception::TlbStoreRefill(v_addr) }) }); trace!("Result: {:?}", out); out } pub fn write_index(&mut self, entry_hi: u32, entry_lo0: u32, entry_lo1: u32) { self.tlb.lines[self.index as usize].update(self.page_mask, entry_hi, entry_lo0, entry_lo1); trace!( "Put into line {}: {:?}", self.index, self.tlb.lines[self.index as usize] ); } pub fn write_random( &mut self, random_index: u32, entry_hi: u32, entry_lo0: u32, entry_lo1: u32, ) { self.tlb.lines[random_index as usize].update( self.page_mask, entry_hi, entry_lo0, entry_lo1, ); trace!( "Put into line {}: {:?}", self.index, self.tlb.lines[self.index as usize] ); } } #[derive(Debug, PartialEq)] pub enum MmuAddress { Address(u32), Scratchpad(u32), Exception(L1Exception), } impl Default for Mmu { fn default() -> Self { Self { tlb: Default::default(), page_mask: 0, wired: WIRED_DEFAULT as u8, index: 0, context: 0, asid: 0, } } }
pub mod tlb; use crate::core::{cop0::*, exceptions::L1Exception}; use tlb::Tlb; pub struct Mmu { pub tlb: Tlb, pub page_mask: u32, pub wired: u8, pub index: u8, pub context: u32, pub asid: u8, } const VPN_ALWAYS_ACTIVE_BITS: u32 = 0b1111_1110_0000_0000_0000_0000_0000_0000; const OFFSET_ALWAYS_ACTIVE_BITS: u32 = 0b0000_0000_0000_0000_0000_1111_1111_1111; const RAW_MASK_4KB: u32 = 0b0000_0000_0000; const RAW_MASK_16KB: u32 = 0b0000_0000_0011; const RAW_MASK_64KB: u32 = 0b0000_0000_1111; const RAW_MASK_256KB: u32 = 0b0000_0011_1111; const RAW_MASK_1MB: u32 = 0b0000_1111_1111; const RAW_MASK_4MB: u32 = 0b0011_1111_1111; const RAW_MASK_16MB: u32 = 0b1111_1111_1111; const PAGE_MASK_4KB: u32 = RAW_MASK_4KB << 13; const PAGE_MASK_16KB: u32 = RAW_MASK_16KB << 13; const PAGE_MASK_64KB: u32 = RAW_MASK_64KB << 13; const PAGE_MASK_256KB: u32 = RAW_MASK_256KB << 13; const PAGE_MASK_1MB: u32 = RAW_MASK_1MB << 13; const PAGE_MASK_4MB: u32 = RAW_MASK_4MB << 13; const PAGE_MASK_16MB: u32 = RAW_MASK_16MB << 13; #[inline] pub fn page_mask_shift_amount(p_mask: u32) -> u32 { 12 + match p_mask { PAGE_MASK_4KB => 0, PAGE_MASK_16KB => 2, PAGE_MASK_64KB => 4, PAGE_MASK_256KB => 6, PAGE_MASK_1MB => 8, PAGE_MASK_4MB => 10, PAGE_MASK_16MB => 12, _ => unreachable!(), } } pub fn page_mask_size(p_mask: u32) -> &'static str { match p_mask { PAGE_MASK_4KB => "4KB", PAGE_MASK_16KB => "16KB", PAGE_MASK_64KB => "64KB", PAGE_MASK_256KB => "256KB", PAGE_MASK_1MB => "1MB", PAGE_MASK_4MB => "4MB", PAGE_MASK_16MB => "16MB", _ => unreachable!(), } } const SPR_SHIFT_AMOUNT: u32 = 12 + 2 + 1; impl Mmu { pub fn translate_address(&self, v_addr: u32, load: bool) -> MmuAddress { let vpn_shift_amount = page_mask_shift_amount(self.page_mask); let vpn = v_addr >> vpn_shift_amount; let vpn2 = (vpn >> 1) << (vpn_shift_amount - 12); let spr_vpn2 = (v_addr >> SPR_SHIFT_AMOUNT) << (SPR_SHIFT_AMOUNT - 12 - 1); let even_page = (vpn & 1) == 0; trace!("Translating {} -- VPN: {}", v_addr, vpn2); let line = self.tlb.find_match(vpn2, spr_vpn2); let out = line .and_then(|line| { if !line.global && line.asid != self.asid { return None; } let indiv_page = if even_page { &line.even } else { &line.odd }; if !indiv_page.valid { if load { return Some(MmuAddress::Exception(L1Exception::TlbFetchLoadInvalid( v_addr, ))); } else { return Some(MmuAddress::Exception(L1Exception::TlbStoreInvalid(v_addr))); } } else if !indiv_page.dirty && !load { return Some(MmuAddress::Exception(L1Exception::TlbModified(v_addr))); } Some(if line.scratchpad { let offset = v_addr & (OFFSET_ALWAYS_ACTIVE_BITS | (PAGE_MASK_16KB >> 1)); MmuAddress::Scratchpad(offset) } else { let offset = v_addr & (OFFSET_ALWAYS_ACTIVE_BITS | (self.page_mask >> 1)); MmuAddress::Address((indiv_page.page_frame_number << vpn_shift_amount) | offset) }) }) .unwrap_or_else(|| { MmuAddress::Exception(if load { L1Exception::TlbFetchLoadRefill(v_addr) } else { L1Exception::TlbStoreRefill(v_addr) }) }); trace!("Result: {:?}", out); out } pub fn write_index(&mut self, entry_hi: u32, entry_lo0: u32, entry_lo1: u32) { self.tlb.lines[self.index as usize].update(self.page_mask, entry_hi, entry_lo0, entry_lo1); trace!( "Put into line {}: {:?}", self.index, self.tlb.lines[self.index as usize] ); } pub fn write_random( &mut self, random_index: u32, entry_hi: u32, entry_lo0: u32, entry_lo1: u32, ) { self.tlb.lines[random_index as usize].update( self.page_mask, entry_hi, entry_lo0, entry_lo1, ); trace!( "Put into line {}: {:?}", self.index, self.tlb.lines[self.index as usize] ); } } #[derive(Debug, PartialEq)] pub enum MmuAddress { Address(u32), Scratchpad(u32), Exception(L1Exception), } impl Default for Mmu {
}
fn default() -> Self { Self { tlb: Default::default(), page_mask: 0, wired: WIRED_DEFAULT as u8, index: 0, context: 0, asid: 0, } }
function_block-function_prefixed
[]
Rust
ref-farming/tests/common/actions.rs
ParasHQ/paras-nft-farming-contract
1fd7065c05706fb47d0e5b1ead9e194c64ab78ca
use near_sdk::json_types::{U128}; use near_sdk::{Balance}; use near_sdk_sim::{call, to_yocto, ContractAccount, UserAccount, DEFAULT_GAS}; use test_token::ContractContract as TestToken; use ref_farming::{ContractContract as Farming}; use ref_farming::{HRSimpleFarmTerms}; use near_sdk::serde_json::Value; use near_sdk::serde_json::json; use super::init::*; use super::utils::*; #[allow(dead_code)] pub(crate) fn prepair_pool_and_liquidity( root: &UserAccount, owner: &UserAccount, farming_id: String, lps: Vec<&UserAccount>, ) -> (UserAccount, ContractAccount<TestToken>, ContractAccount<TestToken>) { let pool = deploy_pool(&root, swap(), owner.account_id()); let token1 = deploy_token(&root, dai(), vec![swap()]); let token2 = deploy_token(&root, eth(), vec![swap()]); owner.call( pool.account_id(), "extend_whitelisted_tokens", &json!({ "tokens": vec![to_va(dai()), to_va(eth())] }).to_string().into_bytes(), DEFAULT_GAS, 0 ).assert_success(); root.call( pool.account_id(), "add_simple_pool", &json!({ "tokens": vec![to_va(dai()), to_va(eth())], "fee": 25 }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); root.call( pool.account_id(), "mft_register", &json!({ "token_id": ":0".to_string(), "account_id": farming_id }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); for lp in lps { add_liqudity(lp, &pool, &token1, &token2, 0); } (pool,token1, token2) } #[allow(dead_code)] pub(crate) fn prepair_pool( root: &UserAccount, owner: &UserAccount, ) -> (UserAccount, ContractAccount<TestToken>, ContractAccount<TestToken>) { let pool = deploy_pool(&root, swap(), owner.account_id()); let token1 = deploy_token(&root, dai(), vec![swap()]); let token2 = deploy_token(&root, eth(), vec![swap()]); owner.call( pool.account_id(), "extend_whitelisted_tokens", &json!({ "tokens": vec![to_va(dai()), to_va(eth())] }).to_string().into_bytes(), DEFAULT_GAS, 0 ); root.call( pool.account_id(), "add_simple_pool", &json!({ "tokens": vec![to_va(dai()), to_va(eth())], "fee": 25 }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); (pool, token1, token2) } #[allow(dead_code)] pub(crate) fn prepair_farm( root: &UserAccount, owner: &UserAccount, token: &ContractAccount<TestToken>, total_reward: Balance, ) -> (ContractAccount<Farming>, String) { let farming = deploy_farming(&root, farming_id(), owner.account_id()); let out_come = call!( owner, farming.create_simple_farm(HRSimpleFarmTerms{ seed_id: format!("{}@0", swap()), reward_token: to_va(token.account_id()), start_at: 0, reward_per_session: to_yocto("1").into(), session_interval: 60, }, Some(U128(1000000000000000000)), None, None), deposit = to_yocto("1") ); out_come.assert_success(); let farm_id: String; if let Value::String(farmid) = out_come.unwrap_json_value() { farm_id = farmid.clone(); } else { farm_id = String::from("N/A"); } call!( root, token.storage_deposit(Some(to_va(farming_id())), None), deposit = to_yocto("1") ) .assert_success(); mint_token(&token, &root, total_reward.into()); call!( root, token.ft_transfer_call(to_va(farming_id()), total_reward.into(), None, farm_id.clone()), deposit = 1 ) .assert_success(); (farming, farm_id) } #[allow(dead_code)] pub(crate) fn prepair_multi_farms( root: &UserAccount, owner: &UserAccount, token: &ContractAccount<TestToken>, total_reward: Balance, farm_count: u32, ) -> (ContractAccount<Farming>, Vec<String>) { let farming = deploy_farming(&root, farming_id(), owner.account_id()); let mut farm_ids: Vec<String> = vec![]; call!( root, token.storage_deposit(Some(to_va(farming_id())), None), deposit = to_yocto("1") ) .assert_success(); mint_token(&token, &root, to_yocto("100000")); for _ in 0..farm_count { let out_come = call!( owner, farming.create_simple_farm(HRSimpleFarmTerms{ seed_id: format!("{}@0", swap()), reward_token: to_va(token.account_id()), start_at: 0, reward_per_session: to_yocto("1").into(), session_interval: 60, }, Some(U128(1000000000000000000)), None, None), deposit = to_yocto("1") ); out_come.assert_success(); let farm_id: String; if let Value::String(farmid) = out_come.unwrap_json_value() { farm_id = farmid.clone(); } else { farm_id = String::from("N/A"); } call!( root, token.ft_transfer_call(to_va(farming_id()), total_reward.into(), None, farm_id.clone()), deposit = 1 ) .assert_success(); farm_ids.push(farm_id.clone()); println!(" Farm {} created and running at Height#{}", farm_id.clone(), root.borrow_runtime().current_block().block_height); } (farming, farm_ids) } pub(crate) fn add_liqudity( user: &UserAccount, pool: &UserAccount, token1: &ContractAccount<TestToken>, token2: &ContractAccount<TestToken>, pool_id: u64, ) { mint_token(&token1, user, to_yocto("105")); mint_token(&token2, user, to_yocto("105")); user.call( pool.account_id(), "storage_deposit", &json!({}).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); call!( user, token1.ft_transfer_call(to_va(swap()), to_yocto("100").into(), None, "".to_string()), deposit = 1 ) .assert_success(); call!( user, token2.ft_transfer_call(to_va(swap()), to_yocto("100").into(), None, "".to_string()), deposit = 1 ) .assert_success(); user.call( pool.account_id(), "add_liquidity", &json!({ "pool_id": pool_id, "amounts": vec![U128(to_yocto("100")), U128(to_yocto("100"))] }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("0.01") ).assert_success(); } pub(crate) fn mint_token(token: &ContractAccount<TestToken>, user: &UserAccount, amount: Balance) { call!( user, token.mint(to_va(user.account_id.clone()), amount.into()) ).assert_success(); }
use near_sdk::json_types::{U128}; use near_sdk::{Balance}; use near_sdk_sim::{call, to_yocto, ContractAccount, UserAccount, DEFAULT_GAS}; use test_token::ContractContract as TestToken; use ref_farming::{ContractContract as Farming}; use ref_farming::{HRSimpleFarmTerms}; use near_sdk::serde_json::Value; use near_sdk::serde_json::json; use super::init::*; use super::utils::*; #[allow(dead_code)] pub(crate) fn prepair_pool_and_liquidity( root: &UserAccount, owner: &UserAccount, farming_id: String, lps: Vec<&UserAccount>, ) -> (UserAccount, ContractAccount<TestToken>, ContractAccount<TestToken>) { let pool = deploy_pool(&root, swap(), owner.account_id()); let token1 = deploy_token(&root, dai(), vec![swap()]); let token2 = deploy_token(&root, eth(), vec![swap()]); owner.call( pool.account_id(), "extend_whitelisted_tokens", &json!({ "tokens": vec![to_va(dai()), to_va(eth())] }).to_string().into_bytes(), DEFAULT_GAS, 0 ).assert_success(); root.call( pool.account_id(), "add_simple_pool", &json!({ "tokens": vec![to_va(dai()), to_va(eth())], "fee": 25 }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); root.call( pool.account_id(), "mft_register", &json!({ "token_id": ":0".to_string(), "account_id": farming_id }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); for lp in lps { add_liqudity(lp, &pool, &token1, &token2, 0); } (pool,token1, token2) } #[allow(dead_code)] pub(crate) fn prepair_pool( root: &UserAccount, owne
#[allow(dead_code)] pub(crate) fn prepair_farm( root: &UserAccount, owner: &UserAccount, token: &ContractAccount<TestToken>, total_reward: Balance, ) -> (ContractAccount<Farming>, String) { let farming = deploy_farming(&root, farming_id(), owner.account_id()); let out_come = call!( owner, farming.create_simple_farm(HRSimpleFarmTerms{ seed_id: format!("{}@0", swap()), reward_token: to_va(token.account_id()), start_at: 0, reward_per_session: to_yocto("1").into(), session_interval: 60, }, Some(U128(1000000000000000000)), None, None), deposit = to_yocto("1") ); out_come.assert_success(); let farm_id: String; if let Value::String(farmid) = out_come.unwrap_json_value() { farm_id = farmid.clone(); } else { farm_id = String::from("N/A"); } call!( root, token.storage_deposit(Some(to_va(farming_id())), None), deposit = to_yocto("1") ) .assert_success(); mint_token(&token, &root, total_reward.into()); call!( root, token.ft_transfer_call(to_va(farming_id()), total_reward.into(), None, farm_id.clone()), deposit = 1 ) .assert_success(); (farming, farm_id) } #[allow(dead_code)] pub(crate) fn prepair_multi_farms( root: &UserAccount, owner: &UserAccount, token: &ContractAccount<TestToken>, total_reward: Balance, farm_count: u32, ) -> (ContractAccount<Farming>, Vec<String>) { let farming = deploy_farming(&root, farming_id(), owner.account_id()); let mut farm_ids: Vec<String> = vec![]; call!( root, token.storage_deposit(Some(to_va(farming_id())), None), deposit = to_yocto("1") ) .assert_success(); mint_token(&token, &root, to_yocto("100000")); for _ in 0..farm_count { let out_come = call!( owner, farming.create_simple_farm(HRSimpleFarmTerms{ seed_id: format!("{}@0", swap()), reward_token: to_va(token.account_id()), start_at: 0, reward_per_session: to_yocto("1").into(), session_interval: 60, }, Some(U128(1000000000000000000)), None, None), deposit = to_yocto("1") ); out_come.assert_success(); let farm_id: String; if let Value::String(farmid) = out_come.unwrap_json_value() { farm_id = farmid.clone(); } else { farm_id = String::from("N/A"); } call!( root, token.ft_transfer_call(to_va(farming_id()), total_reward.into(), None, farm_id.clone()), deposit = 1 ) .assert_success(); farm_ids.push(farm_id.clone()); println!(" Farm {} created and running at Height#{}", farm_id.clone(), root.borrow_runtime().current_block().block_height); } (farming, farm_ids) } pub(crate) fn add_liqudity( user: &UserAccount, pool: &UserAccount, token1: &ContractAccount<TestToken>, token2: &ContractAccount<TestToken>, pool_id: u64, ) { mint_token(&token1, user, to_yocto("105")); mint_token(&token2, user, to_yocto("105")); user.call( pool.account_id(), "storage_deposit", &json!({}).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); call!( user, token1.ft_transfer_call(to_va(swap()), to_yocto("100").into(), None, "".to_string()), deposit = 1 ) .assert_success(); call!( user, token2.ft_transfer_call(to_va(swap()), to_yocto("100").into(), None, "".to_string()), deposit = 1 ) .assert_success(); user.call( pool.account_id(), "add_liquidity", &json!({ "pool_id": pool_id, "amounts": vec![U128(to_yocto("100")), U128(to_yocto("100"))] }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("0.01") ).assert_success(); } pub(crate) fn mint_token(token: &ContractAccount<TestToken>, user: &UserAccount, amount: Balance) { call!( user, token.mint(to_va(user.account_id.clone()), amount.into()) ).assert_success(); }
r: &UserAccount, ) -> (UserAccount, ContractAccount<TestToken>, ContractAccount<TestToken>) { let pool = deploy_pool(&root, swap(), owner.account_id()); let token1 = deploy_token(&root, dai(), vec![swap()]); let token2 = deploy_token(&root, eth(), vec![swap()]); owner.call( pool.account_id(), "extend_whitelisted_tokens", &json!({ "tokens": vec![to_va(dai()), to_va(eth())] }).to_string().into_bytes(), DEFAULT_GAS, 0 ); root.call( pool.account_id(), "add_simple_pool", &json!({ "tokens": vec![to_va(dai()), to_va(eth())], "fee": 25 }).to_string().into_bytes(), DEFAULT_GAS, to_yocto("1") ).assert_success(); (pool, token1, token2) }
function_block-function_prefixed
[ { "content": "fn parse_token_id(token_id: String) -> TokenOrPool {\n\n if let Ok(pool_id) = try_identify_sub_token_id(&token_id) {\n\n TokenOrPool::Pool(pool_id)\n\n } else {\n\n TokenOrPool::Token(token_id)\n\n }\n\n}\n\n\n\n/// seed token deposit\n\n#[near_bindgen]\n\nimpl MFTTokenReceiver for Contract {\n\n /// Callback on receiving tokens by this contract.\n\n fn mft_on_transfer(\n\n &mut self,\n\n token_id: String,\n\n sender_id: AccountId,\n\n amount: U128,\n\n msg: String,\n\n ) -> PromiseOrValue<U128> {\n\n let seed_id: String;\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 0, "score": 193813.14398780602 }, { "content": "/// wrap token_id into correct format in MFT standard\n\npub fn wrap_mft_token_id(token_id: &str) -> String {\n\n format!(\":{}\", token_id)\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 1, "score": 152095.0129788904 }, { "content": "pub fn deploy_pool(root: &UserAccount, contract_id: AccountId, owner_id: AccountId) -> UserAccount {\n\n\n\n let pool = root.deploy(\n\n &EXCHANGE_WASM_BYTES,\n\n contract_id.clone(),\n\n to_yocto(\"100\")\n\n );\n\n\n\n pool.call(\n\n contract_id,\n\n \"new\",\n\n &json!({\n\n \"owner_id\": owner_id,\n\n \"exchange_fee\": 4,\n\n \"referral_fee\": 1\n\n }).to_string().into_bytes(),\n\n DEFAULT_GAS / 2,\n\n 0\n\n );\n\n\n\n pool\n\n}\n\n\n", "file_path": "ref-farming/tests/common/init.rs", "rank": 2, "score": 143153.43984522021 }, { "content": "/// a sub token would use a format \":<u64>\"\n\nfn try_identify_sub_token_id(token_id: &String) -> Result<u64, &'static str> {\n\n if token_id.starts_with(\":\") {\n\n if let Ok(pool_id) = str::parse::<u64>(&token_id[1..token_id.len()]) {\n\n Ok(pool_id)\n\n } else {\n\n Err(\"Illegal pool id\")\n\n }\n\n } else {\n\n Err(\"Illegal pool id\")\n\n }\n\n}\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 3, "score": 141671.3073187094 }, { "content": "pub fn deploy_farming(root: &UserAccount, farming_id: AccountId, owner_id: AccountId) -> ContractAccount<Farming> {\n\n let farming = deploy!(\n\n contract: Farming,\n\n contract_id: farming_id,\n\n bytes: &FARM_WASM_BYTES,\n\n signer_account: root,\n\n init_method: new(to_va(owner_id))\n\n );\n\n farming\n\n}\n\n\n", "file_path": "ref-farming/tests/common/init.rs", "rank": 4, "score": 140111.04637874686 }, { "content": "// return receiver_id, token_id\n\npub fn parse_seed_id(lpt_id: &str) -> (String, String) {\n\n let v: Vec<&str> = lpt_id.split(MFT_TAG).collect();\n\n if v.len() == 2 { // receiver_id@pool_id\n\n (v[0].to_string(), v[1].to_string())\n\n } else if v.len() == 1 { // receiver_id\n\n (v[0].to_string(), v[0].to_string())\n\n } else {\n\n env::panic(format!(\"{}\", ERR33_INVALID_SEED_ID).as_bytes())\n\n }\n\n}\n\n\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 5, "score": 113591.52782622928 }, { "content": "// ============= internal methods ================\n\nfn get_farminfo(farming: &ContractAccount<Farming>, farm_id: String) -> FarmInfo {\n\n view!(farming.get_farm(farm_id)).unwrap_json::<FarmInfo>()\n\n}\n\n\n", "file_path": "ref-farming/tests/common/views.rs", "rank": 6, "score": 112518.53741150875 }, { "content": "fn get_outdated_farminfo(farming: &ContractAccount<Farming>, farm_id: String) -> FarmInfo {\n\n view!(farming.get_outdated_farm(farm_id)).unwrap_json::<FarmInfo>()\n\n}\n", "file_path": "ref-farming/tests/common/views.rs", "rank": 7, "score": 111033.06861316647 }, { "content": "pub fn parse_farm_id(farm_id: &FarmId) -> (String, usize) {\n\n let v: Vec<&str> = farm_id.split(\"#\").collect();\n\n if v.len() != 2 {\n\n env::panic(format!(\"{}\", ERR42_INVALID_FARM_ID).as_bytes())\n\n }\n\n (v[0].to_string(), v[1].parse::<usize>().unwrap())\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 8, "score": 103751.78281325041 }, { "content": "pub fn deploy_token(\n\n root: &UserAccount,\n\n token_id: AccountId,\n\n accounts_to_register: Vec<AccountId>,\n\n) -> ContractAccount<TestToken> {\n\n let t = deploy!(\n\n contract: TestToken,\n\n contract_id: token_id,\n\n bytes: &TEST_TOKEN_WASM_BYTES,\n\n signer_account: root\n\n );\n\n call!(root, t.new()).assert_success();\n\n // call!(\n\n // root,\n\n // t.mint(to_va(root.account_id.clone()), to_yocto(\"10000\").into())\n\n // )\n\n // .assert_success();\n\n for account_id in accounts_to_register {\n\n call!(\n\n root,\n\n t.storage_deposit(Some(to_va(account_id)), None),\n\n deposit = to_yocto(\"1\")\n\n )\n\n .assert_success();\n\n }\n\n t\n\n}\n\n\n", "file_path": "ref-farming/tests/common/init.rs", "rank": 9, "score": 98713.16107513863 }, { "content": "#[test]\n\nfn failure_e41_when_deposit_reward_token() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 10, "score": 96545.16354821832 }, { "content": "enum TokenOrPool {\n\n Token(AccountId),\n\n Pool(u64),\n\n}\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 11, "score": 94551.53057188251 }, { "content": "#[test]\n\nfn single_farm_startat_180() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and farmer prepared.\");\n\n\n\n println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n println!(\"<<----- The pool prepaired.\");\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n\n\n\n // create farm\n", "file_path": "ref-farming/tests/test_single_farm.rs", "rank": 12, "score": 74715.74897505279 }, { "content": "#[test]\n\nfn single_farm_startat_0() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1, &farmer2]);\n\n println!(\"<<----- The pool prepaired.\");\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer2, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n", "file_path": "ref-farming/tests/test_single_farm.rs", "rank": 13, "score": 74715.74897505279 }, { "content": "#[test]\n\nfn single_paras_farm() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n // println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (_, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1, &farmer2]);\n\n // println!(\"<<----- The pool prepaired.\");\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer2, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n", "file_path": "ref-farming/tests/test_paras_farm.rs", "rank": 14, "score": 74715.74897505279 }, { "content": "#[test]\n\nfn multi_farm_with_different_state() {\n\n let root = init_simulator(None);\n\n\n\n // prepair users\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer = root.create_user(\"farmer\".to_string(), to_yocto(\"100\"));\n\n println!(\"----->> owner and farmer prepaired.\");\n\n\n\n // prepair pool and tokens\n\n let(pool, token1, token2) = prepair_pool(&root, &owner);\n\n println!(\"----->> The pool prepaired.\");\n\n\n\n // farmer add liqidity \n\n add_liqudity(&farmer, &pool, &token1, &token2, 0);\n\n let mft_balance: U128 = root.view(\n\n pool.account_id(),\n\n \"mft_balance_of\",\n\n &json!({\n\n \"token_id\": \":0\".to_string(),\n\n \"account_id\": farmer.account_id.clone()\n", "file_path": "ref-farming/tests/test_multi_farms.rs", "rank": 15, "score": 73460.97210451248 }, { "content": "#[test]\n\nfn multi_farm_in_single_seed() {\n\n let root = init_simulator(None);\n\n\n\n // prepair users\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer = root.create_user(\"farmer\".to_string(), to_yocto(\"100\"));\n\n println!(\"----->> owner and farmer prepaired.\");\n\n\n\n // prepair pool and tokens\n\n let(pool, token1, token2) = prepair_pool(&root, &owner);\n\n println!(\"----->> The pool prepaired.\");\n\n\n\n // farmer add liqidity \n\n add_liqudity(&farmer, &pool, &token1, &token2, 0);\n\n let mft_balance: U128 = pool.view(\n\n pool.account_id(),\n\n \"mft_balance_of\",\n\n &json!({\n\n \"token_id\": \":0\".to_string(),\n\n \"account_id\": farmer.account_id.clone()\n", "file_path": "ref-farming/tests/test_multi_farms.rs", "rank": 16, "score": 73460.97210451248 }, { "content": "#[test]\n\nfn test_farm_with_nft_mappings() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n // println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (_, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1, &farmer2]);\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer2, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n\n\n", "file_path": "ref-farming/tests/test_paras_farm.rs", "rank": 17, "score": 73460.97210451248 }, { "content": "fn claim_user_reward_from_farm(\n\n farm: &mut Farm, \n\n farmer: &mut Farmer, \n\n total_seeds: &Balance,\n\n silent: bool,\n\n) {\n\n let user_seeds = farmer.seeds.get(&farm.get_seed_id()).unwrap_or(&0_u128);\n\n let user_rps = farmer.get_rps(&farm.get_farm_id());\n\n let (new_user_rps, reward_amount) = farm.claim_user_reward(&user_rps, user_seeds, total_seeds, silent);\n\n if !silent {\n\n env::log(\n\n format!(\n\n \"user_rps@{} increased to {}\",\n\n farm.get_farm_id(), U256::from_little_endian(&new_user_rps),\n\n )\n\n .as_bytes(),\n\n );\n\n }\n\n \n\n farmer.set_rps(&farm.get_farm_id(), new_user_rps);\n", "file_path": "ref-farming/src/actions_of_reward.rs", "rank": 18, "score": 72209.71437509608 }, { "content": "#[test]\n\nfn failure_e11_create_farm() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n\n }, None, None, None),\n\n deposit = to_yocto(\"0.00001\")\n\n );\n\n assert!(!out_come.is_ok());\n\n // println!(\"{:#?}\", out_come.promise_results());\n\n let ex_status = format!(\"{:?}\", out_come.promise_errors()[0].as_ref().unwrap().status());\n\n assert!(ex_status.contains(\"E11: insufficient $NEAR storage deposit\"));\n\n}\n\n\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 19, "score": 72209.71437509608 }, { "content": "#[test]\n\nfn compound_single_paras_farm() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n // println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (_, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n // println!(\"<<----- The pool prepaired.\");\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n\n\n\n // create farm\n", "file_path": "ref-farming/tests/test_compound.rs", "rank": 20, "score": 72209.71437509608 }, { "content": "#[test]\n\nfn test_claim_and_withdraw() {\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n // println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (_, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1, &farmer2]);\n\n // println!(\"<<----- The pool prepaired.\");\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer2, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n", "file_path": "ref-farming/tests/test_paras_farm.rs", "rank": 21, "score": 72209.71437509608 }, { "content": "#[test]\n\nfn failure_e42_when_force_clean_farm() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 22, "score": 70889.36996996752 }, { "content": "#[test]\n\nfn test_maximum_nft_mappings() {\n\n\n\n let root = init_simulator(None);\n\n\n\n println!(\"----->> Prepare accounts.\");\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n println!(\"<<----- owner and 2 farmers prepared.\");\n\n\n\n // println!(\"----->> Prepare ref-exchange and swap pool.\");\n\n let (_, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1, &farmer2]);\n\n\n\n // deploy farming contract and register user\n\n println!(\"----->> Deploy farming and register farmers.\");\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer2, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n println!(\"<<----- farming deployed, farmers registered.\");\n", "file_path": "ref-farming/tests/test_paras_farm.rs", "rank": 23, "score": 70889.36996996752 }, { "content": "#[test]\n\nfn failure_e33() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0@3\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n\n }, None, None, None),\n\n deposit = to_yocto(\"1\")\n\n );\n\n assert!(!out_come.is_ok());\n\n let ex_status = format!(\"{:?}\", out_come.promise_errors()[0].as_ref().unwrap().status());\n\n // println!(\"ex_status: {}\", ex_status);\n\n assert!(ex_status.contains(\"E33: invalid seed id\"));\n\n}\n\n\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 24, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn storage_stake() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n let farmer2 = root.create_user(\"farmer2\".to_string(), to_yocto(\"100\"));\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n // farmer1 register with only_register set to false\n\n let out_come = call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\"));\n\n out_come.assert_success();\n\n let sb = out_come.unwrap_json::<StorageBalance>();\n\n assert_eq!(sb.total.0, to_yocto(\"1\"));\n\n assert_eq!(sb.available.0, to_yocto(\"0.99908\"));\n\n assert!(farmer1.account().unwrap().amount < to_yocto(\"99\"));\n\n\n\n // farmer1 withdraw storage\n\n let out_come = call!(farmer1, farming.storage_withdraw(None), deposit = 1);\n\n out_come.assert_success();\n", "file_path": "ref-farming/tests/test_storage.rs", "rank": 25, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn failure_e14() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n let out_come = call!(farmer1, farming.storage_deposit(None, Some(true)), deposit = to_yocto(\"1\"));\n\n assert!(!out_come.is_ok());\n\n let ex_status = format!(\"{:?}\", out_come.promise_errors()[0].as_ref().unwrap().status());\n\n assert!(ex_status.contains(\"E14: account already registered\"));\n\n\n\n call!(farmer1, farming.storage_deposit(None, Some(false)), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let sb = show_storage_balance(&farming, farmer1.account_id(), false);\n\n assert_eq!(sb.total.0, to_yocto(\"2\"));\n\n}\n\n\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 26, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn test_upgrade() {\n\n let root = init_simulator(None);\n\n let test_user = root.create_user(\"test\".to_string(), to_yocto(\"100\"));\n\n let farming = deploy!(\n\n contract: Farming,\n\n contract_id: \"farming\".to_string(),\n\n bytes: &PREV_FARMING_WASM_BYTES,\n\n signer_account: root,\n\n init_method: new(ValidAccountId::try_from(root.account_id.clone()).unwrap())\n\n );\n\n\n\n // Failed upgrade with no permissions.\n\n let result = test_user\n\n .call(\n\n farming.user_account.account_id.clone(),\n\n \"upgrade\",\n\n &PREV_FARMING_WASM_BYTES,\n\n near_sdk_sim::DEFAULT_GAS,\n\n 0,\n\n )\n", "file_path": "ref-farming/tests/test_migrate.rs", "rank": 27, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn failure_e43() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 28, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn failure_e44() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, token2) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 29, "score": 70133.29635131334 }, { "content": "#[test]\n\nfn failure_e11_claim() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 30, "score": 68578.86387138287 }, { "content": "#[test]\n\nfn failure_e21_e22() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 31, "score": 68578.86387138287 }, { "content": "#[test]\n\nfn failure_e11_stake() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farmer1, farming.storage_withdraw(None), deposit = 1).assert_success();\n\n // call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 32, "score": 68578.86387138287 }, { "content": "#[test]\n\nfn failure_e12_e13() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 33, "score": 68578.86387138287 }, { "content": "/// reward 10.pow(17), seed: 10.pow(38) \n\n/// rps 10.pow(3) \n\nfn seed_amount_huge() {\n\n // println!(\"{}\", u128::MAX);\n\n // 340282366920938.463463374607431768211455\n\n\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, token2) = prepair_pool(&root, &owner);\n\n root.call(\n\n pool.account_id(),\n\n \"mft_register\",\n\n &json!({\n\n \"token_id\": \":0\".to_string(),\n\n \"account_id\": farming_id()\n\n }).to_string().into_bytes(),\n\n DEFAULT_GAS,\n\n to_yocto(\"1\")\n\n ).assert_success();\n", "file_path": "ref-farming/tests/test_seed_amount.rs", "rank": 34, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e42_when_claim_reward() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 35, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e31_unstake_seed() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 36, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e34_stake_below_minimum() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 37, "score": 67117.47275764015 }, { "content": "/// reward: 10.pow(33), seed: 10.pow(0)\n\n/// rps: 10.pow(57)\n\nfn seed_amount_little() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1000000000\").into(),\n", "file_path": "ref-farming/tests/test_seed_amount.rs", "rank": 38, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e10_unstake_before_register() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n\n }, None, None, None),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 39, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e31_stake_seed() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, _, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n\n\n let out_come = farmer1.call(\n\n pool.account_id(),\n\n \"mft_transfer_call\",\n\n &json!({\n\n \"token_id\": \":0\".to_string(),\n\n \"receiver_id\": farming_id(),\n\n \"amount\": to_yocto(\"0.5\").to_string(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 40, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e25_withdraw_reward() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n \n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 41, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn lpt_stake_unstake() {\n\n let root = init_simulator(None);\n\n\n\n // prepair users\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(&root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let (farming, farm_id) = prepair_farm(&root, &owner, &token1, to_yocto(\"500\"));\n\n let farm_info = show_farminfo(&farming, farm_id.clone(), false);\n\n assert_farming(&farm_info, \"Running\".to_string(), to_yocto(\"500\"), 0, 0, 0, 0, 0);\n\n\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\"))\n\n .assert_success();\n\n\n\n let out_come = farmer1.call(\n\n pool.account_id(),\n\n \"mft_transfer_call\",\n\n &json!({\n", "file_path": "ref-farming/tests/test_stake_unstake.rs", "rank": 42, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e32_unstake_over_balance() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 43, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e10_stake_before_register() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n\n }, None, None, None),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 44, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e11_register_new() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n // let (pool, token1, token2) = prepair_pool_and_liquidity(\n\n // &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n let out_come = call!(farmer1, farming.storage_deposit(None, Some(true)), deposit = to_yocto(\"0.0001\"));\n\n assert!(!out_come.is_ok());\n\n let ex_status = format!(\"{:?}\", out_come.promise_errors()[0].as_ref().unwrap().status());\n\n assert!(ex_status.contains(\"E11: insufficient $NEAR storage deposit\"));\n\n}\n\n\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 45, "score": 67117.47275764015 }, { "content": "#[test]\n\nfn failure_e10_claim_before_register() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n \n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: token1.valid_account_id(),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n\n session_interval: 60,\n\n }, None, None, None),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 46, "score": 67117.47275764015 }, { "content": "/// Assert that 1 yoctoNEAR was attached.\n\npub fn assert_one_yocto() {\n\n assert_eq!(env::attached_deposit(), 1, \"Requires attached deposit of exactly 1 yoctoNEAR\")\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 47, "score": 66008.32803693102 }, { "content": "#[test]\n\nfn failure_e10_storage_withdraw_before_register() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n // let (pool, token1, token2) = prepair_pool_and_liquidity(\n\n // &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n\n\n\n\n let out_come = call!(\n\n farmer1,\n\n farming.storage_withdraw(None),\n\n deposit = 1\n\n );\n\n assert!(!out_come.is_ok());\n\n let ex_status = format!(\"{:?}\", out_come.promise_errors()[0].as_ref().unwrap().status());\n\n assert!(ex_status.contains(\"E10: account not registered\"));\n\n}\n\n\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 48, "score": 65741.01218958592 }, { "content": "#[test]\n\nfn failure_e25_withdraw_seed_ft() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (_, token1, token2) = prepair_pool(&root, &owner);\n\n\n\n call!(\n\n root, token2.mint(farmer1.valid_account_id(), to_yocto(\"10000\").into())\n\n )\n\n .assert_success();\n\n\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n call!(farming.user_account, token2.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 49, "score": 65741.01218958592 }, { "content": "pub fn deploy_nft_contract(\n\n root: &UserAccount,\n\n nft_token_id: AccountId,\n\n farmer_id: AccountId,\n\n) -> (ContractAccount<TestNFT>) {\n\n // uses default values for deposit and gas\n\n let nft = deploy!(\n\n // Contract Proxy\n\n contract: TestNFT,\n\n // Contract account id\n\n contract_id: nft_token_id,\n\n // Bytes of contract\n\n bytes: &NFT_WASM_BYTES,\n\n // User deploying the contract,\n\n signer_account: root,\n\n // init method\n\n init_method: new_default_meta(\n\n root.valid_account_id()\n\n )\n\n );\n", "file_path": "ref-farming/tests/common/init.rs", "rank": 50, "score": 64546.93692318832 }, { "content": "pub fn get_nft_balance_equivalent(\n\n nft_balance: NftBalance,\n\n nft_staked: ContractNFTTokenId\n\n) -> Option<Balance> {\n\n // split x.paras.near@1:1\n\n // to \"x.paras.near@1\", \":1\"\n\n let mut result: Option<Balance> = None;\n\n\n\n if let Some(nft_balance_equivalent) = nft_balance.get(&nft_staked.to_string()) {\n\n result = Some(nft_balance_equivalent.0);\n\n } else if nft_staked.contains(PARAS_SERIES_DELIMETER) {\n\n let contract_token_series_id_split: Vec<&str> = nft_staked.split(PARAS_SERIES_DELIMETER).collect();\n\n if let Some(nft_balance_equivalent) = nft_balance.get(&contract_token_series_id_split[0].to_string()) {\n\n result = Some(nft_balance_equivalent.0);\n\n } else {\n\n let contract_token_series_id_split: Vec<&str> = nft_staked.split(NFT_DELIMETER).collect();\n\n if let Some(nft_balance_equivalent) = nft_balance.get(&contract_token_series_id_split[0].to_string()) {\n\n result = Some(nft_balance_equivalent.0);\n\n } else {\n\n result = None;\n", "file_path": "ref-farming/src/utils.rs", "rank": 51, "score": 64546.93692318832 }, { "content": "#[test]\n\nfn failure_e42_when_remove_user_rps_and_view_unclaim_reward() {\n\n let root = init_simulator(None);\n\n\n\n let owner = root.create_user(\"owner\".to_string(), to_yocto(\"100\"));\n\n let farmer1 = root.create_user(\"farmer1\".to_string(), to_yocto(\"100\"));\n\n\n\n let (pool, token1, _) = prepair_pool_and_liquidity(\n\n &root, &owner, farming_id(), vec![&farmer1]);\n\n\n\n let farm_id = \"swap@0#0\".to_string();\n\n let farming = deploy_farming(&root, farming_id(), owner.account_id());\n\n call!(farmer1, farming.storage_deposit(None, None), deposit = to_yocto(\"1\")).assert_success();\n\n\n\n let out_come = call!(\n\n owner,\n\n farming.create_simple_farm(HRSimpleFarmTerms{\n\n seed_id: format!(\"{}@0\", pool.account_id()),\n\n reward_token: to_va(token1.account_id()),\n\n start_at: 0,\n\n reward_per_session: to_yocto(\"1\").into(),\n", "file_path": "ref-farming/tests/test_failures.rs", "rank": 52, "score": 62053.112769001884 }, { "content": "pub trait MFTTokenReceiver {\n\n fn mft_on_transfer(\n\n &mut self,\n\n token_id: String,\n\n sender_id: AccountId,\n\n amount: U128,\n\n msg: String,\n\n ) -> PromiseOrValue<U128>;\n\n}\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 53, "score": 57804.961673440426 }, { "content": "pub fn gen_farm_id(seed_id: &SeedId, index: usize) -> FarmId {\n\n format!(\"{}#{}\", seed_id, index)\n\n}\n\n\n\npub(crate) fn to_nano(timestamp: TimestampSec) -> Timestamp {\n\n Timestamp::from(timestamp) * 10u64.pow(9)\n\n}\n\n\n\npub(crate) fn to_sec(timestamp: Timestamp) -> TimestampSec {\n\n (timestamp / 10u64.pow(9)) as u32\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 54, "score": 57730.07482913739 }, { "content": "use crate::*;\n\n\n\nuse near_sdk::json_types::U128;\n\n\n\n#[near_bindgen]\n\nimpl Contract {\n\n pub fn set_owner(&mut self, owner_id: ValidAccountId) {\n\n self.assert_owner();\n\n self.data_mut().owner_id = owner_id.into();\n\n }\n\n\n\n /// force clean \n\n pub fn force_clean_farm(&mut self, farm_id: String) -> bool {\n\n self.assert_owner();\n\n self.internal_remove_farm_by_farm_id(&farm_id)\n\n }\n\n\n\n pub fn modify_seed_min_deposit(&mut self, seed_id: String, min_deposit: U128) {\n\n self.assert_owner();\n\n let mut farm_seed = self.get_seed(&seed_id);\n", "file_path": "ref-farming/src/owner.rs", "rank": 55, "score": 56288.33260589778 }, { "content": " this\n\n } else {\n\n panic!(\"Dont need upgrade\");\n\n };\n\n }\n\n\n\n pub(crate) fn assert_owner(&self) {\n\n assert_eq!(\n\n env::predecessor_account_id(),\n\n self.data().owner_id,\n\n \"ERR_NOT_ALLOWED\"\n\n );\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nmod upgrade {\n\n use near_sdk::env::BLOCKCHAIN_INTERFACE;\n\n use near_sdk::Gas;\n\n\n", "file_path": "ref-farming/src/owner.rs", "rank": 56, "score": 56278.37390535055 }, { "content": " use super::*;\n\n\n\n const BLOCKCHAIN_INTERFACE_NOT_SET_ERR: &str = \"Blockchain interface not set.\";\n\n\n\n /// Gas for calling migration call.\n\n pub const GAS_FOR_MIGRATE_CALL: Gas = 10_000_000_000_000;\n\n\n\n /// Self upgrade and call migrate, optimizes gas by not loading into memory the code.\n\n /// Takes as input non serialized set of bytes of the code.\n\n #[no_mangle]\n\n pub extern \"C\" fn upgrade() {\n\n env::setup_panic_hook();\n\n env::set_blockchain_interface(Box::new(near_blockchain::NearBlockchain {}));\n\n let contract: Contract = env::state_read().expect(\"ERR_CONTRACT_IS_NOT_INITIALIZED\");\n\n contract.assert_owner();\n\n let current_id = env::current_account_id().into_bytes();\n\n let method_name = \"migrate\".as_bytes().to_vec();\n\n unsafe {\n\n BLOCKCHAIN_INTERFACE.with(|b| {\n\n // Load input into register 0.\n", "file_path": "ref-farming/src/owner.rs", "rank": 57, "score": 56274.60558370757 }, { "content": " b.borrow()\n\n .as_ref()\n\n .expect(BLOCKCHAIN_INTERFACE_NOT_SET_ERR)\n\n .input(0);\n\n let promise_id = b\n\n .borrow()\n\n .as_ref()\n\n .expect(BLOCKCHAIN_INTERFACE_NOT_SET_ERR)\n\n .promise_batch_create(current_id.len() as _, current_id.as_ptr() as _);\n\n b.borrow()\n\n .as_ref()\n\n .expect(BLOCKCHAIN_INTERFACE_NOT_SET_ERR)\n\n .promise_batch_action_deploy_contract(promise_id, u64::MAX as _, 0);\n\n let attached_gas = env::prepaid_gas() - env::used_gas() - GAS_FOR_MIGRATE_CALL;\n\n b.borrow()\n\n .as_ref()\n\n .expect(BLOCKCHAIN_INTERFACE_NOT_SET_ERR)\n\n .promise_batch_action_function_call(\n\n promise_id,\n\n method_name.len() as _,\n", "file_path": "ref-farming/src/owner.rs", "rank": 58, "score": 56272.43140255214 }, { "content": " farm_seed.get_ref_mut().min_deposit = min_deposit.into();\n\n }\n\n\n\n\n\n /// Migration function between versions.\n\n /// For next version upgrades, change this function.\n\n #[init(ignore_state)]\n\n #[private]\n\n pub fn migrate() -> Self {\n\n assert_eq!(\n\n env::predecessor_account_id(),\n\n env::current_account_id(),\n\n \"ERR_NOT_ALLOWED\"\n\n );\n\n let contract: Contract = env::state_read().expect(\"ERR_NOT_INITIALIZED\");\n\n return if contract.need_upgrade() {\n\n let contract_data_upgraded = contract.upgrade();\n\n let this = Contract {\n\n data: VersionedContractData::CurrentV2(contract_data_upgraded)\n\n };\n", "file_path": "ref-farming/src/owner.rs", "rank": 59, "score": 56272.098155527114 }, { "content": " method_name.as_ptr() as _,\n\n 0 as _,\n\n 0 as _,\n\n 0 as _,\n\n attached_gas,\n\n );\n\n });\n\n }\n\n }\n\n}", "file_path": "ref-farming/src/owner.rs", "rank": 60, "score": 56269.86553755261 }, { "content": "use crate::errors::*;\n\nuse crate::farm_seed::SeedType;\n\nuse crate::utils::{MFT_TAG, FT_INDEX_TAG};\n\nuse crate::*;\n\nuse near_sdk::json_types::U128;\n\nuse near_sdk::serde::{Deserialize, Serialize};\n\nuse near_sdk::PromiseOrValue;\n\n\n\nuse near_contract_standards::non_fungible_token::core::NonFungibleTokenReceiver;\n\nuse near_contract_standards::fungible_token::receiver::FungibleTokenReceiver;\n\n\n\npub type TokenId = String;\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct FarmArgs {\n\n pub transfer_type: String, // \"seed\", reward must use string only for farm_id\n\n pub seed_id: String,\n\n}\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 61, "score": 54178.21176250937 }, { "content": " match parse_token_id(token_id.clone()) {\n\n TokenOrPool::Pool(pool_id) => {\n\n seed_id = format!(\"{}{}{}\", env::predecessor_account_id(), MFT_TAG, pool_id);\n\n }\n\n TokenOrPool::Token(_) => {\n\n // for seed deposit, using mft to transfer 'root' token is not supported.\n\n env::panic(ERR35_ILLEGAL_TOKEN_ID.as_bytes());\n\n }\n\n }\n\n\n\n assert!(msg.is_empty(), \"ERR_MSG_INCORRECT\");\n\n\n\n // if seed not exist, it will panic\n\n let amount: u128 = amount.into();\n\n let seed_farm = self.get_seed(&seed_id);\n\n\n\n assert_eq!(seed_farm.get_ref().seed_type, SeedType::MFT, \"Cannot deposit MFT to this seed\");\n\n\n\n if amount < seed_farm.get_ref().min_deposit {\n\n env::panic(\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 62, "score": 54176.533950781464 }, { "content": " \"Paras(farming): nft_on_approve should only be called via cross-contract call\"\n\n );\n\n\n\n assert_eq!(\n\n previous_owner_id,\n\n signer_id,\n\n \"Paras(farming): owner_id should be signer_id\"\n\n );\n\n\n\n let deposit_res = self.internal_nft_deposit(&msg, &previous_owner_id.to_string(), &nft_contract_id, &token_id);\n\n if !deposit_res {\n\n panic!(\"Paras(farming): nft token does not exist on seed\");\n\n }\n\n PromiseOrValue::Value(false)\n\n }\n\n}\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 63, "score": 54173.82880371915 }, { "content": "#[near_bindgen]\n\nimpl FungibleTokenReceiver for Contract {\n\n /// Callback on receiving tokens by this contract.\n\n /// transfer reward token with specific msg indicate\n\n /// which farm to be deposited to.\n\n fn ft_on_transfer(\n\n &mut self,\n\n sender_id: ValidAccountId,\n\n amount: U128,\n\n msg: String,\n\n ) -> PromiseOrValue<U128> {\n\n let sender: AccountId = sender_id.into();\n\n let amount: u128 = amount.into();\n\n\n\n if msg.is_empty() {\n\n // ****** seed Token deposit in ********\n\n\n\n // if seed not exist, it will panic\n\n let seed_farm = self.get_seed(&env::predecessor_account_id());\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 64, "score": 54169.36961642069 }, { "content": "\n\n PromiseOrValue::Value(U128(0))\n\n }\n\n}\n\n\n\n// Receiving NFTs\n\n#[near_bindgen]\n\nimpl NonFungibleTokenReceiver for Contract {\n\n fn nft_on_transfer(\n\n &mut self,\n\n sender_id: AccountId,\n\n previous_owner_id: AccountId,\n\n token_id: TokenId,\n\n msg: String,\n\n ) -> PromiseOrValue<bool> {\n\n let nft_contract_id = env::predecessor_account_id();\n\n let signer_id = env::signer_account_id();\n\n\n\n assert_ne!(\n\n nft_contract_id, signer_id,\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 65, "score": 54168.877283478694 }, { "content": " assert_eq!(\n\n farm.get_reward_token(),\n\n env::predecessor_account_id(),\n\n \"{}\",\n\n ERR44_INVALID_FARM_REWARD\n\n );\n\n if let Some(cur_remain) = farm.add_reward(&amount) {\n\n self.data_mut().farms.insert(&farm_id, &farm);\n\n let old_balance = self\n\n .data()\n\n .reward_info\n\n .get(&env::predecessor_account_id())\n\n .unwrap_or(0);\n\n self.data_mut()\n\n .reward_info\n\n .insert(&env::predecessor_account_id(), &(old_balance + amount));\n\n\n\n env::log(\n\n format!(\n\n \"{} added {} Reward Token, Now has {} left\",\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 66, "score": 54167.08082885374 }, { "content": " self.assert_storage_usage(&sender);\n\n\n\n env::log(\n\n format!(\n\n \"{} deposit FT seed {} with amount {}.\",\n\n sender,\n\n env::predecessor_account_id(),\n\n amount,\n\n )\n\n .as_bytes(),\n\n );\n\n PromiseOrValue::Value(U128(0))\n\n } else {\n\n // ****** reward Token deposit in ********\n\n let farm_id = msg\n\n .parse::<FarmId>()\n\n .expect(&format!(\"{}\", ERR42_INVALID_FARM_ID));\n\n let mut farm = self.data().farms.get(&farm_id).expect(ERR41_FARM_NOT_EXIST);\n\n\n\n // update farm\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 67, "score": 54166.51461401786 }, { "content": " assert_eq!(seed_farm.get_ref().seed_type, SeedType::FT, \"Cannot deposit FT to this seed\");\n\n\n\n if amount < seed_farm.get_ref().min_deposit {\n\n env::panic(\n\n format!(\n\n \"{} {}\",\n\n ERR34_BELOW_MIN_SEED_DEPOSITED,\n\n seed_farm.get_ref().min_deposit\n\n )\n\n .as_bytes(),\n\n )\n\n }\n\n\n\n self.internal_seed_deposit(\n\n &env::predecessor_account_id(),\n\n &sender,\n\n amount.into(),\n\n SeedType::FT,\n\n );\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 68, "score": 54163.125802084236 }, { "content": " sender, amount, cur_remain\n\n )\n\n .as_bytes(),\n\n );\n\n PromiseOrValue::Value(U128(0))\n\n } else {\n\n env::panic(format!(\"{}\", ERR43_INVALID_FARM_STATUS).as_bytes())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 69, "score": 54162.602786485564 }, { "content": " format!(\n\n \"{} {}\",\n\n ERR34_BELOW_MIN_SEED_DEPOSITED,\n\n seed_farm.get_ref().min_deposit\n\n )\n\n .as_bytes(),\n\n )\n\n }\n\n\n\n self.internal_seed_deposit(&seed_id, &sender_id, amount, SeedType::MFT);\n\n\n\n self.assert_storage_usage(&sender_id);\n\n\n\n env::log(\n\n format!(\n\n \"{} deposit MFT seed {} with amount {}.\",\n\n sender_id, seed_id, amount,\n\n )\n\n .as_bytes(),\n\n );\n", "file_path": "ref-farming/src/token_receiver.rs", "rank": 70, "score": 54162.011804679874 }, { "content": "#[ext_contract(ext_fungible_token)]\n\npub trait FungibleToken {\n\n fn ft_transfer(&mut self, receiver_id: AccountId, amount: U128, memo: Option<String>);\n\n}\n\n\n\n/// TODO: this should be in the near_standard_contracts\n", "file_path": "ref-farming/src/utils.rs", "rank": 71, "score": 50715.35411034576 }, { "content": "#[ext_contract(ext_multi_fungible_token)]\n\npub trait MultiFungibleToken {\n\n fn mft_transfer(&mut self, token_id: String, receiver_id: AccountId, amount: U128, memo: Option<String>);\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 72, "score": 49150.81328042292 }, { "content": "#[ext_contract(ext_non_fungible_token)]\n\npub trait NonFungibleToken {\n\n fn nft_transfer(\n\n &mut self,\n\n receiver_id: String,\n\n token_id: String,\n\n approval_id: Option<u64>,\n\n memo: Option<String>,\n\n );\n\n}\n\n\n", "file_path": "ref-farming/src/utils.rs", "rank": 73, "score": 49150.81328042292 }, { "content": "#[ext_contract(ext_self)]\n\npub trait TokenPostActions {\n\n fn callback_post_withdraw_reward(\n\n &mut self,\n\n token_id: AccountId,\n\n sender_id: AccountId,\n\n amount: U128,\n\n );\n\n\n\n fn callback_post_withdraw_ft_seed(\n\n &mut self,\n\n seed_id: SeedId,\n\n sender_id: AccountId,\n\n amount: U128,\n\n );\n\n\n\n fn callback_post_withdraw_mft_seed(\n\n &mut self,\n\n seed_id: SeedId,\n\n sender_id: AccountId,\n\n amount: U128,\n", "file_path": "ref-farming/src/utils.rs", "rank": 74, "score": 49146.8040608121 }, { "content": "const ownerAccountName = 'orang.testnet'\n", "file_path": "test/config.js", "rank": 75, "score": 43907.54659369636 }, { "content": "const ownerAccount = new nearAPI.Account(near.connection, ownerAccountName)\n", "file_path": "test/test-utils.js", "rank": 76, "score": 43907.54659369636 }, { "content": "use near_contract_standards::fungible_token::metadata::{\n\n FungibleTokenMetadata, FungibleTokenMetadataProvider,\n\n};\n\nuse near_contract_standards::fungible_token::FungibleToken;\n\nuse near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};\n\nuse near_sdk::json_types::{ValidAccountId, U128};\n\nuse near_sdk::{env, near_bindgen, AccountId, PanicOnDefault, PromiseOrValue};\n\n\n\nnear_sdk::setup_alloc!();\n\n\n\n#[near_bindgen]\n\n#[derive(BorshSerialize, BorshDeserialize, PanicOnDefault)]\n\npub struct Contract {\n\n token: FungibleToken,\n\n}\n\n\n\n#[near_bindgen]\n\nimpl Contract {\n\n #[init]\n\n pub fn new() -> Self {\n", "file_path": "test-token/src/lib.rs", "rank": 77, "score": 37320.59500328944 }, { "content": "#[near_bindgen]\n\nimpl FungibleTokenMetadataProvider for Contract {\n\n fn ft_metadata(&self) -> FungibleTokenMetadata {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use near_sdk::test_utils::{accounts, VMContextBuilder};\n\n use near_sdk::{env, testing_env, MockedBlockchain};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_basics() {\n\n let mut context = VMContextBuilder::new();\n\n testing_env!(context.build());\n\n let mut contract = Contract::new();\n\n testing_env!(context\n", "file_path": "test-token/src/lib.rs", "rank": 78, "score": 37316.79382897735 }, { "content": " Self {\n\n token: FungibleToken::new(b\"t\".to_vec()),\n\n }\n\n }\n\n\n\n pub fn mint(&mut self, account_id: ValidAccountId, amount: U128) {\n\n self.token.internal_register_account(account_id.as_ref());\n\n self.token\n\n .internal_deposit(account_id.as_ref(), amount.into());\n\n }\n\n\n\n pub fn burn(&mut self, account_id: ValidAccountId, amount: U128) {\n\n self.token\n\n .internal_withdraw(account_id.as_ref(), amount.into());\n\n }\n\n}\n\n\n\nnear_contract_standards::impl_fungible_token_core!(Contract, token);\n\nnear_contract_standards::impl_fungible_token_storage!(Contract, token);\n\n\n", "file_path": "test-token/src/lib.rs", "rank": 79, "score": 37313.79459487439 }, { "content": " .attached_deposit(125 * env::storage_byte_cost())\n\n .build());\n\n contract.mint(accounts(0), 1_000_000.into());\n\n assert_eq!(contract.ft_balance_of(accounts(0)), 1_000_000.into());\n\n\n\n testing_env!(context\n\n .attached_deposit(125 * env::storage_byte_cost())\n\n .build());\n\n contract.storage_deposit(Some(accounts(1)), None);\n\n testing_env!(context\n\n .attached_deposit(1)\n\n .predecessor_account_id(accounts(0))\n\n .build());\n\n contract.ft_transfer(accounts(1), 1_000.into(), None);\n\n assert_eq!(contract.ft_balance_of(accounts(1)), 1_000.into());\n\n\n\n contract.burn(accounts(1), 500.into());\n\n assert_eq!(contract.ft_balance_of(accounts(1)), 500.into());\n\n }\n\n}\n", "file_path": "test-token/src/lib.rs", "rank": 80, "score": 37308.911475625166 }, { "content": "#[ext_contract(ext_receiver)]\n\npub trait NonFungibleTokenReceiver {\n\n /// Returns true if token should be returned to `sender_id`\n\n fn nft_on_transfer(\n\n &mut self,\n\n sender_id: AccountId,\n\n previous_owner_id: AccountId,\n\n token_id: TokenId,\n\n msg: String,\n\n ) -> PromiseOrValue<bool>;\n\n}\n\n\n\nnear_sdk::setup_alloc!();\n\n\n\n#[near_bindgen]\n\n#[derive(BorshDeserialize, BorshSerialize, PanicOnDefault)]\n\npub struct Contract {\n\n tokens: NonFungibleToken,\n\n metadata: LazyOption<NFTContractMetadata>,\n\n}\n\n\n\nconst DATA_IMAGE_SVG_NEAR_ICON: &str = \"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 288 288'%3E%3Cg id='l' data-name='l'%3E%3Cpath d='M187.58,79.81l-30.1,44.69a3.2,3.2,0,0,0,4.75,4.2L191.86,103a1.2,1.2,0,0,1,2,.91v80.46a1.2,1.2,0,0,1-2.12.77L102.18,77.93A15.35,15.35,0,0,0,90.47,72.5H87.34A15.34,15.34,0,0,0,72,87.84V201.16A15.34,15.34,0,0,0,87.34,216.5h0a15.35,15.35,0,0,0,13.08-7.31l30.1-44.69a3.2,3.2,0,0,0-4.75-4.2L96.14,186a1.2,1.2,0,0,1-2-.91V104.61a1.2,1.2,0,0,1,2.12-.77l89.55,107.23a15.35,15.35,0,0,0,11.71,5.43h3.13A15.34,15.34,0,0,0,216,201.16V87.84A15.34,15.34,0,0,0,200.66,72.5h0A15.35,15.35,0,0,0,187.58,79.81Z'/%3E%3C/g%3E%3C/svg%3E\";\n\n\n", "file_path": "test-nft/src/lib.rs", "rank": 81, "score": 31726.050625299416 }, { "content": "\n\n /// Returns token contract id this farm used for reward.\n\n pub fn get_reward_token(&self) -> AccountId {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.terms.reward_token.clone(),\n\n }\n\n }\n\n\n\n pub fn get_farm_id(&self) -> FarmId {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.farm_id.clone(),\n\n }\n\n }\n\n\n\n /// Returns how many reward tokens can given farmer claim.\n\n pub fn view_farmer_unclaimed_reward(\n\n &self,\n\n user_rps: &RPS,\n\n user_seeds: &Balance,\n\n total_seeds: &Balance,\n", "file_path": "ref-farming/src/farm.rs", "rank": 82, "score": 23358.113933637902 }, { "content": "//! Wrapper of different types of farms \n\n\n\nuse near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};\n\nuse near_sdk::{AccountId, Balance};\n\n\n\nuse crate::simple_farm::{SimpleFarm, RPS};\n\nuse crate::SeedId;\n\n\n\npub(crate) type FarmId = String;\n\n\n\n/// Generic Farm, providing wrapper around different implementations of farms.\n\n/// Allows to add new types of farms just by adding extra item in the enum \n\n/// without needing to migrate the storage.\n\n#[derive(BorshSerialize, BorshDeserialize)]\n\npub enum Farm {\n\n SimpleFarm(SimpleFarm),\n\n}\n\n\n\nimpl Farm {\n\n /// Returns farm kind.\n", "file_path": "ref-farming/src/farm.rs", "rank": 83, "score": 23357.91457140687 }, { "content": " pub fn kind(&self) -> String {\n\n match self {\n\n Farm::SimpleFarm(_) => \"SIMPLE_FARM\".to_string(),\n\n }\n\n }\n\n\n\n /// return None if the farm can not accept reward anymore\n\n /// else return amount of undistributed reward \n\n pub fn add_reward(&mut self, amount: &Balance) -> Option<Balance> {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.add_reward(amount),\n\n }\n\n }\n\n\n\n /// Returns seed id this farm accepted.\n\n pub fn get_seed_id(&self) -> SeedId {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.terms.seed_id.clone(),\n\n }\n\n }\n", "file_path": "ref-farming/src/farm.rs", "rank": 84, "score": 23354.628941234743 }, { "content": "\n\n pub fn can_be_removed(&self, total_seeds: &Balance) -> bool {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.can_be_removed(total_seeds),\n\n }\n\n }\n\n\n\n pub fn move_to_clear(&mut self, total_seeds: &Balance) -> bool {\n\n match self {\n\n Farm::SimpleFarm(farm) => farm.move_to_clear(total_seeds),\n\n }\n\n }\n\n\n\n}\n", "file_path": "ref-farming/src/farm.rs", "rank": 85, "score": 23351.068711940898 }, { "content": " ) -> Balance {\n\n match self {\n\n Farm::SimpleFarm(farm) \n\n => farm.view_farmer_unclaimed_reward(user_rps, user_seeds, total_seeds),\n\n }\n\n }\n\n\n\n /// return the new user reward per seed \n\n /// and amount of reward as (user_rps, reward_amount) \n\n pub fn claim_user_reward(&mut self, \n\n user_rps: &RPS,\n\n user_seeds: &Balance, \n\n total_seeds: &Balance, \n\n silent: bool,\n\n ) -> (RPS, Balance) {\n\n match self {\n\n Farm::SimpleFarm(farm) \n\n => farm.claim_user_reward(user_rps, user_seeds, total_seeds, silent),\n\n }\n\n }\n", "file_path": "ref-farming/src/farm.rs", "rank": 86, "score": 23350.875046974543 }, { "content": "\n\nuse near_sdk::{env, near_bindgen, Promise};\n\nuse near_sdk::json_types::{U128};\n\nuse simple_farm::{SimpleFarm, HRSimpleFarmTerms};\n\nuse crate::utils::{gen_farm_id, MIN_SEED_DEPOSIT, parse_farm_id};\n\nuse crate::errors::*;\n\nuse crate::*;\n\nuse std::collections::HashMap;\n\nuse crate::farm_seed::{FarmSeedMetadata, NFTTokenId};\n\n\n\n\n\n#[near_bindgen]\n\nimpl Contract {\n\n /// create farm and pay for its storage fee\n\n #[payable]\n\n pub fn create_simple_farm(\n\n &mut self,\n\n terms: HRSimpleFarmTerms,\n\n min_deposit: Option<U128>,\n\n nft_balance: Option<HashMap<NFTTokenId, U128>>,\n", "file_path": "ref-farming/src/actions_of_farm.rs", "rank": 87, "score": 22863.178985281647 }, { "content": "//! FarmSeed stores information per seed about \n\n//! staked seed amount and farms under it.\n\n\n\nuse std::collections::HashSet;\n\nuse near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};\n\nuse near_sdk::{Balance};\n\nuse near_sdk::serde::{Deserialize, Serialize};\n\nuse near_sdk::json_types::{U128};\n\nuse crate::errors::*;\n\nuse crate::farm::FarmId;\n\nuse crate::utils::parse_seed_id;\n\nuse std::collections::HashMap;\n\nuse near_sdk::collections::LookupMap;\n\nuse crate::{Contract, StorageKeys};\n\n\n\n\n\n/// For MFT, SeedId composes of token_contract_id \n\n/// and token's inner_id in that contract. \n\n/// For FT, SeedId is the token_contract_id.\n\npub(crate) type SeedId = String;\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 88, "score": 22862.791756454815 }, { "content": "//! The SimpleFarm provide a way to gain farming rewards periodically and \n\n//! proportionally.\n\n//! The creator first wrap his reward distribution schema with \n\n//! `SimpleFarmRewardTerms`, and create the farm with it, attached enough near \n\n//! for storage fee.\n\n//! But to enable farming, the creator or someone else should deposit reward \n\n//! token to the farm, after it was created.\n\n\n\nuse near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};\n\nuse near_sdk::json_types::{U128, ValidAccountId};\n\nuse near_sdk::serde::{Deserialize, Serialize};\n\nuse near_sdk::{env, AccountId, Balance};\n\n\n\nuse crate::{SeedId, FarmId};\n\nuse crate::errors::*;\n\nuse crate::utils::*;\n\nuse uint::construct_uint;\n\n\n\nconstruct_uint! {\n\n /// 256-bit unsigned integer.\n", "file_path": "ref-farming/src/simple_farm.rs", "rank": 89, "score": 22862.044826909758 }, { "content": " seed_id: terms.seed_id.clone(),\n\n reward_token: terms.reward_token.clone().into(),\n\n start_at: terms.start_at,\n\n reward_per_session: terms.reward_per_session.into(),\n\n session_interval: terms.session_interval,\n\n }\n\n }\n\n}\n\n\n\n#[derive(BorshSerialize, BorshDeserialize, Clone)]\n\npub enum SimpleFarmStatus {\n\n Created, Running, Ended, Cleared\n\n}\n\n\n\nimpl From<&SimpleFarmStatus> for String {\n\n fn from(status: &SimpleFarmStatus) -> Self {\n\n match *status {\n\n SimpleFarmStatus::Created => { String::from(\"Created\") },\n\n SimpleFarmStatus::Running => { String::from(\"Running\") },\n\n SimpleFarmStatus::Ended => { String::from(\"Ended\") },\n", "file_path": "ref-farming/src/simple_farm.rs", "rank": 90, "score": 22857.650349987547 }, { "content": " pub struct U256(4);\n\n}\n\n\n\npub type ContractNFTTokenId = String;\n\npub type NFTTokenId = String;\n\n\n\npub type RPS = [u8; 32];\n\n\n\n// to ensure precision, all reward_per_seed would be multiplied by this DENOM\n\n// this value should be carefully choosen, now is 10**24.\n\npub const DENOM: u128 = 1_000_000_000_000_000_000_000_000;\n\n\n\n/// The terms defines how the farm works.\n\n/// In this version, we distribute reward token with a start height, a reward \n\n/// session interval, and reward amount per session. \n\n/// In this way, the farm will take the amount from undistributed reward to \n\n/// unclaimed reward each session. And all farmers would got reward token pro \n\n/// rata of their seeds.\n\n#[derive(BorshSerialize, BorshDeserialize, Clone)]\n\npub struct SimpleFarmTerms {\n", "file_path": "ref-farming/src/simple_farm.rs", "rank": 91, "score": 22856.924645791536 }, { "content": "\n\npub(crate) type NFTTokenId = String; //paras-comic-dev.testnet@6\n\n\n\npub(crate) type NftBalance = HashMap<NFTTokenId, U128>; //paras-comic-dev.testnet@6\n\n\n\n#[derive(BorshSerialize, BorshDeserialize, Clone, PartialEq, Debug)]\n\npub enum SeedType {\n\n FT,\n\n MFT,\n\n NFT\n\n}\n\n\n\n#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize, Clone)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct FarmSeedMetadata {\n\n pub title: Option<String>,\n\n pub media: Option<String>,\n\n}\n\n\n\n#[derive(BorshSerialize, BorshDeserialize)]\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 92, "score": 22855.489808237653 }, { "content": "#[cfg_attr(feature = \"test\", derive(Clone))]\n\npub struct FarmSeedV1 {\n\n /// The Farming Token this FarmSeed represented for\n\n pub seed_id: SeedId,\n\n /// The seed is a FT or MFT or NFT\n\n pub seed_type: SeedType,\n\n /// all farms that accepted this seed\n\n /// FarmId = {seed_id}#{next_index}\n\n pub farms: HashSet<FarmId>,\n\n pub next_index: u32,\n\n /// total (staked) balance of this seed (Farming Token)\n\n pub amount: Balance,\n\n pub min_deposit: Balance,\n\n pub nft_balance: Option<HashMap<NFTTokenId, U128>>,\n\n pub metadata: Option<FarmSeedMetadata>\n\n}\n\n\n\n#[derive(BorshSerialize, BorshDeserialize)]\n\n#[cfg_attr(feature = \"test\", derive(Clone))]\n\npub struct FarmSeed {\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 93, "score": 22854.01551148454 }, { "content": " fn from(fs: &FarmSeed) -> Self {\n\n let seed_type = match fs.seed_type {\n\n SeedType::FT => \"FT\".to_string(),\n\n SeedType::NFT => \"NFT\".to_string(),\n\n SeedType::MFT => \"MFT\".to_string(),\n\n };\n\n if let Some(seed_metadata) = fs.metadata.clone() {\n\n Self {\n\n seed_id: fs.seed_id.clone(),\n\n seed_type,\n\n next_index: fs.next_index,\n\n amount: fs.amount.into(),\n\n min_deposit: fs.min_deposit.into(),\n\n farms: fs.farms.iter().map(|key| key.clone()).collect(),\n\n title: Some(seed_metadata.title.unwrap_or(\"\".to_string())),\n\n media: Some(seed_metadata.media.unwrap_or(\"\".to_string())),\n\n nft_balance: None,\n\n }\n\n } else {\n\n Self {\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 94, "score": 22853.7953604683 }, { "content": "/// Implementation of simple farm, Similar to the design of \"berry farm\".\n\n/// Farmer stake their seed to farming on multiple farm accept that seed.\n\n#[derive(BorshSerialize, BorshDeserialize)]\n\npub struct SimpleFarm {\n\n\n\n pub farm_id: FarmId,\n\n \n\n pub terms: SimpleFarmTerms,\n\n\n\n pub status: SimpleFarmStatus,\n\n\n\n pub last_distribution: SimpleFarmRewardDistribution,\n\n\n\n /// total reward send into this farm by far, \n\n /// every time reward deposited in, add to this field\n\n pub amount_of_reward: Balance,\n\n /// reward token has been claimed by farmer by far\n\n pub amount_of_claimed: Balance,\n\n /// when there is no seed token staked, reward goes to beneficiary\n\n pub amount_of_beneficiary: Balance,\n", "file_path": "ref-farming/src/simple_farm.rs", "rank": 95, "score": 22853.746715998146 }, { "content": " /// The Farming Token this FarmSeed represented for\n\n pub seed_id: SeedId,\n\n /// The seed is a FT or MFT or NFT\n\n pub seed_type: SeedType,\n\n /// all farms that accepted this seed\n\n /// FarmId = {seed_id}#{next_index}\n\n pub farms: HashSet<FarmId>,\n\n pub next_index: u32,\n\n /// total (staked) balance of this seed (Farming Token)\n\n pub amount: Balance,\n\n pub min_deposit: Balance,\n\n pub metadata: Option<FarmSeedMetadata>\n\n}\n\n\n\nimpl FarmSeed {\n\n pub fn new(\n\n seed_id: &SeedId,\n\n min_deposit: Balance,\n\n is_nft_balance: bool,\n\n metadata: Option<FarmSeedMetadata>\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 96, "score": 22853.73488472442 }, { "content": " }\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct SeedInfo {\n\n pub seed_id: SeedId,\n\n pub seed_type: String,\n\n pub farms: Vec<FarmId>,\n\n pub next_index: u32,\n\n pub amount: U128,\n\n pub min_deposit: U128,\n\n pub nft_balance: Option<NftBalance>,\n\n pub title: Option<String>,\n\n pub media: Option<String>\n\n}\n\n\n\nimpl From<&FarmSeed> for SeedInfo {\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 97, "score": 22853.690672848115 }, { "content": " seed_id: fs.seed_id.clone(),\n\n seed_type,\n\n next_index: fs.next_index,\n\n amount: fs.amount.into(),\n\n min_deposit: fs.min_deposit.into(),\n\n farms: fs.farms.iter().map(|key| key.clone()).collect(),\n\n title: Some(\"\".to_string()),\n\n media: Some(\"\".to_string()),\n\n nft_balance: None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 98, "score": 22853.518530950758 }, { "content": "\n\n pub fn add_amount(&mut self, amount: Balance) {\n\n self.amount += amount;\n\n }\n\n\n\n /// return seed amount remains.\n\n pub fn sub_amount(&mut self, amount: Balance) -> Balance {\n\n assert!(self.amount >= amount, \"{}\", ERR500);\n\n self.amount -= amount;\n\n self.amount\n\n }\n\n\n\n}\n\n\n\n/// Versioned FarmSeed, used for lazy upgrade.\n\n/// Which means this structure would upgrade automatically when used.\n\n/// To achieve that, each time the new version comes in, \n\n/// each function of this enum should be carefully re-code!\n\n#[derive(BorshSerialize, BorshDeserialize)]\n\npub enum VersionedFarmSeed {\n", "file_path": "ref-farming/src/farm_seed.rs", "rank": 99, "score": 22853.240639349176 } ]
Rust
tests/hvac.rs
uber-foo/hvac
7d6680a3c0e1e09e2ba9bd4446bd3ed0d3ca67c0
use hvac::prelude::*; #[test] fn new_hvac_is_idle() { let mut hvac = Hvac::default(); let state = hvac.tick(0); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn new_hvac_enforces_min_heat_recover_constraints() { let mut hvac = Hvac::default().with_heat(None, Some(100)); let state = hvac.heat(); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); } #[test] fn new_hvac_enforces_min_cool_recover_constraints() { let mut hvac = Hvac::default().with_cool(None, Some(100)); let state = hvac.cool(); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); } #[test] fn new_hvac_enforces_min_fan_recover_constraints() { let mut hvac = Hvac::default().with_fan(None, Some(100)); let state = hvac.fan_auto(false); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, None); assert_eq!(state.fan, true); } #[test] fn hvac_fan_auto_with_heat() { let mut hvac = Hvac::default().with_heat(None, None).with_fan(None, None); let state = hvac.heat(); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn hvac_fan_auto_with_cool() { let mut hvac = Hvac::default().with_cool(None, None).with_fan(None, None); let state = hvac.cool(); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn hvac_fan_auto_sequence() { let mut hvac = Hvac::default() .with_heat(None, None) .with_cool(None, None) .with_fan(None, None); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.cool(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.cool(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); } #[test] fn hvac_fan_manual() { let mut hvac = Hvac::default() .with_heat(None, None) .with_cool(None, None) .with_fan(None, None); let state = hvac.fan_auto(false); assert_eq!(state.service, None); assert_eq!(state.fan, true); let state = hvac.heat(); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); let state = hvac.cool(); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, true); let state = hvac.fan_auto(true); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn fan_auto_min_run_carries_past_heat() { let mut hvac = Hvac::default() .with_heat(None, None) .with_fan(Some(1), None); let state = hvac.tick(0); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, true); let state = hvac.tick(1); assert_eq!(state.fan, false); }
use hvac::prelude::*; #[test] fn new_hvac_is_idle() { let mut hvac = Hvac::default(); let state = hvac.tick(0); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn new_hvac_enforces_min_heat_recover_constraints() { let mut hvac = Hvac::default().with_heat(None, Some(100)); let state = hvac.heat(); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); } #[test] fn new_hvac_enforces_min_cool_recover_constraints() { let mut hvac = Hvac::default().with_cool(None, Some(100)); let state = hvac.cool(); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); } #[test] fn new_hvac_enforces_min_fan_recover_constraints() { let mut hvac = Hvac::default().with_fan(None, Some(100)); let state = hvac.fan_auto(false); assert_eq!(state.service, None); assert_eq!(state.fan, false); for i in 0..100 { let state = hvac.tick(i); assert_eq!(state.service, None); assert_eq!(state.fan, false); } let state = hvac.tick(100); assert_eq!(state.service, None); assert_eq!(state.fan, true); } #[test] fn hvac_fan_auto_with_heat() { let mut hvac = Hvac::default().with_heat(None,
t state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn hvac_fan_auto_with_cool() { let mut hvac = Hvac::default().with_cool(None, None).with_fan(None, None); let state = hvac.cool(); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn hvac_fan_auto_sequence() { let mut hvac = Hvac::default() .with_heat(None, None) .with_cool(None, None) .with_fan(None, None); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.cool(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); let state = hvac.cool(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, false); } #[test] fn hvac_fan_manual() { let mut hvac = Hvac::default() .with_heat(None, None) .with_cool(None, None) .with_fan(None, None); let state = hvac.fan_auto(false); assert_eq!(state.service, None); assert_eq!(state.fan, true); let state = hvac.heat(); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); let state = hvac.cool(); assert_eq!(state.service, Some(HvacService::Cool)); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.service, None); assert_eq!(state.fan, true); let state = hvac.fan_auto(true); assert_eq!(state.service, None); assert_eq!(state.fan, false); } #[test] fn fan_auto_min_run_carries_past_heat() { let mut hvac = Hvac::default() .with_heat(None, None) .with_fan(Some(1), None); let state = hvac.tick(0); assert_eq!(state.fan, false); let state = hvac.heat(); assert_eq!(state.fan, true); let state = hvac.idle(); assert_eq!(state.fan, true); let state = hvac.tick(1); assert_eq!(state.fan, false); }
None).with_fan(None, None); let state = hvac.heat(); assert_eq!(state.service, Some(HvacService::Heat)); assert_eq!(state.fan, true); le
function_block-random_span
[ { "content": "fn wait_seconds(\n\n last_update: Option<u32>,\n\n min_seconds: Option<u32>,\n\n last_change: Option<u32>,\n\n) -> Option<u32> {\n\n if let Some(last_update) = last_update {\n\n if let Some(min_seconds) = min_seconds {\n\n let delta = last_update - last_change.unwrap_or(0);\n\n if delta < min_seconds {\n\n Some(min_seconds - delta)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n } else {\n\n min_seconds\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 10, "score": 20412.260051522535 }, { "content": "# Example\n\n```rust\n\nuse hvac::prelude::*;\n\n\n\n// create a new hvac controller with the\n\n// following constraints:\n\n//\n\n// heat:\n\n// - no min run time\n\n// - min recover of 1 minute (60 sec)\n\n//\n\n// cool:\n\n// - min run time of 5 minutes (300 sec)\n\n// - min recover of 5 minutes (300 sec)\n\n//\n\n// fan:\n\n// - no min run time\n\n// - no min recovery\n\nlet mut hvac_controller = Hvac::default()\n\n .with_heat(None, Some(60))\n\n .with_cool(Some(300), Some(300))\n\n .with_fan(None, None);\n\n\n\n// enable heat as soon as permissible\n\nlet state = hvac_controller.heat();\n\n\n\nfor i in 0..60 {\n\n // advance state machine to `i`\n\n // seconds elapsed\n\n let state = hvac_controller.tick(i);\n\n // even though we have called for\n\n // heat, it will not be enabled\n\n // until we have met our 60 second\n\n // minimum recovery time\n\n assert_eq!(state.service, None);\n\n // and since the fan is set to auto\n\n // by default, it remains disabled\n\n assert_eq!(state.fan, false);\n\n}\n\n\n\n// once the state machine is at\n\n// 60 seconds elappsed...\n\nlet state = hvac_controller.tick(60);\n\n// we have now met our minimum recover\n\n// time and heat is enabled\n\nassert_eq!(state.service, Some(HvacService::Heat));\n\n// along with the fan\n\nassert_eq!(state.fan, true);\n\n\n\n// we can now call for cool\n\nlet state = hvac_controller.cool();\n\n// and heat will be immediately disabled\n\n// since we gave it no min run time but\n\n// cool is not immediately enabled due\n\n// to its 300 second recovery time\n\nassert_eq!(state.service, None);\n\n// fan is still set to auto and has no\n\n// minimum run time, it is also disabled\n\nassert_eq!(state.fan, false);\n\n\n\n// advancing to cool's minimum recovery\n\n// time will result in cool starting\n\nlet state = hvac_controller.tick(300);\n\nassert_eq!(state.service, Some(HvacService::Cool));\n\n// fan also starts again\n\nassert_eq!(state.fan, true);\n\n\n\n// we idle the system calls\n\nlet state = hvac_controller.idle();\n\n// which has no immediate effect because\n\n// of cool's min run time\n\nassert_eq!(state.service, Some(HvacService::Cool));\n", "file_path": "README.md", "rank": 11, "score": 10420.551336662307 }, { "content": "assert_eq!(state.fan, true);\n\n\n\n// we disable auto mode for the fan\n\nlet state = hvac_controller.fan_auto(false);\n\n// which still has no immediate effect\n\nassert_eq!(state.service, Some(HvacService::Cool));\n\nassert_eq!(state.fan, true);\n\n\n\n// until we advance another 300 seconds\n\n// elapsed to meet cool's min run time\n\nlet state = hvac_controller.tick(600);\n\n// now cool has stopped but fan\n\n// continues with auto mode disabled\n\nassert_eq!(state.service, None);\n\nassert_eq!(state.fan, true);\n\n\n\n// without a minimum run time, fan will\n\n// immediately shut down when put back\n\n// into auto mode\n\nlet state = hvac_controller.fan_auto(true);\n\nassert_eq!(state.fan, false);\n\n```\n\n\n", "file_path": "README.md", "rank": 12, "score": 10420.035961537524 }, { "content": "# hvac\n\n\n\n[![Build Status](https://travis-ci.org/uber-foo/hvac.svg?branch=master)](https://travis-ci.org/uber-foo/hvac)\n\n[![Latest Version](https://img.shields.io/crates/v/hvac.svg)](https://crates.io/crates/hvac)\n\n[![docs](https://docs.rs/hvac/badge.svg)](https://docs.rs/hvac)\n\n![rustc 1.31+](https://img.shields.io/badge/rustc-1.31+-blue.svg)\n\n\n\nThis crate provides a state machine for an\n\n[HVAC controller](https://en.wikipedia.org/wiki/HVAC_control_system).\n\n\n\nHVAC control systems regulate the operation of a heating and/or air conditioning system.\n\nEssentially, they turn on or off the heating, cooling, and air circulation as instructed by some\n\nother system–typically a thermostat.\n\n\n\nThis crate currently supports only single-stage HVAC implementations wherein the heating and\n\ncooling systems can be either on or off with no intermediate states of operation. Optional\n\nconstraints on the minimum run and recovery time are supported for the heat, cool, and fan\n\nservices.\n\n\n\nThis crate has no dependencies on the standard library or any other crates, making it\n\neasily used in standard applications as well as embedded targets leveraging\n\n[`#![no_std]`](https://doc.rust-lang.org/reference/attributes.html?highlight=no_std#crate-only-attributes).\n\n\n\n\n", "file_path": "README.md", "rank": 13, "score": 10415.397561644695 }, { "content": "## License\n\n\n\nLicensed under either of the following, at your option:\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for\n\ninclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed\n", "file_path": "README.md", "rank": 14, "score": 10413.166551812432 }, { "content": "# 0.1.0\n\n- initial release\n\n- single-stage heating and cooling service\n\n- fan with automatic (on during service call) and manual (always on) modes\n\n- optional minimum run time constraint for heat, cool, and fan\n", "file_path": "CHANGELOG.md", "rank": 15, "score": 10412.122268077313 }, { "content": " self.heat_calling = false;\n\n self.cool_calling = true;\n\n self.compute()\n\n }\n\n\n\n /// update state machine setting fan to auto (on with service) or manual (on always)\n\n pub fn fan_auto(&mut self, fan_auto: bool) -> HvacState {\n\n self.fan_auto = fan_auto;\n\n self.compute()\n\n }\n\n\n\n /// update state machine disabling any calls for service\n\n pub fn idle(&mut self) -> HvacState {\n\n self.heat_calling = false;\n\n self.cool_calling = false;\n\n self.compute()\n\n }\n\n}\n\n\n\n/// convienence module that re-exports the typical api\n\npub mod prelude {\n\n #[doc(no_inline)]\n\n pub use crate::{Hvac, HvacService, HvacState};\n\n}\n", "file_path": "src/lib.rs", "rank": 16, "score": 7.291569288255816 }, { "content": " };\n\n\n\n self.state()\n\n }\n\n\n\n /// update the state machine with new seconds elappsed value\n\n pub fn tick(&mut self, current_seconds: u32) -> HvacState {\n\n self.last_update = Some(current_seconds);\n\n self.compute()\n\n }\n\n\n\n /// update state machine with a call for heat, disabling call for cool in the process\n\n pub fn heat(&mut self) -> HvacState {\n\n self.heat_calling = true;\n\n self.cool_calling = false;\n\n self.compute()\n\n }\n\n\n\n /// update state machine with call for cool, disabling call for heat in the process\n\n pub fn cool(&mut self) -> HvacState {\n", "file_path": "src/lib.rs", "rank": 17, "score": 6.880503367446252 }, { "content": "//! // now cool has stopped but fan\n\n//! // continues with auto mode disabled\n\n//! assert_eq!(state.service, None);\n\n//! assert_eq!(state.fan, true);\n\n//!\n\n//! // without a minimum run time, fan will\n\n//! // immediately shut down when put back\n\n//! // into auto mode\n\n//! let state = hvac_controller.fan_auto(true);\n\n//! assert_eq!(state.fan, false);\n\n//! ```\n\n#![no_std]\n\n#![deny(warnings)]\n\n#![deny(bad_style)]\n\n#![deny(future_incompatible)]\n\n#![deny(nonstandard_style)]\n\n#![deny(unused)]\n\n#![deny(rust_2018_compatibility)]\n\n#![deny(rust_2018_idioms)]\n\n#![deny(macro_use_extern_crate)]\n", "file_path": "src/lib.rs", "rank": 18, "score": 5.9132572801491685 }, { "content": "//! assert_eq!(state.service, Some(HvacService::Cool));\n\n//! // fan also starts again\n\n//! assert_eq!(state.fan, true);\n\n//!\n\n//! // we idle the system calls\n\n//! let state = hvac_controller.idle();\n\n//! // which has no immediate effect because\n\n//! // of cool's min run time\n\n//! assert_eq!(state.service, Some(HvacService::Cool));\n\n//! assert_eq!(state.fan, true);\n\n//!\n\n//! // we disable auto mode for the fan\n\n//! let state = hvac_controller.fan_auto(false);\n\n//! // which still has no immediate effect\n\n//! assert_eq!(state.service, Some(HvacService::Cool));\n\n//! assert_eq!(state.fan, true);\n\n//!\n\n//! // until we advance another 300 seconds\n\n//! // elapsed to meet cool's min run time\n\n//! let state = hvac_controller.tick(600);\n", "file_path": "src/lib.rs", "rank": 19, "score": 5.792834524856507 }, { "content": " self.cool_min_recover_seconds = min_recover_seconds;\n\n self\n\n }\n\n\n\n /// use custom fan run and recover time constraints\n\n pub fn with_fan(\n\n mut self,\n\n min_run_seconds: Option<u32>,\n\n min_recover_seconds: Option<u32>,\n\n ) -> Self {\n\n self.fan_min_run_seconds = min_run_seconds;\n\n self.fan_min_recover_seconds = min_recover_seconds;\n\n self\n\n }\n\n\n\n fn state(&self) -> HvacState {\n\n HvacState {\n\n service: self.active_service,\n\n fan: self.fan_active,\n\n }\n", "file_path": "src/lib.rs", "rank": 20, "score": 5.307488444225898 }, { "content": "//! // we have now met our minimum recover\n\n//! // time and heat is enabled\n\n//! assert_eq!(state.service, Some(HvacService::Heat));\n\n//! // along with the fan\n\n//! assert_eq!(state.fan, true);\n\n//!\n\n//! // we can now call for cool\n\n//! let state = hvac_controller.cool();\n\n//! // and heat will be immediately disabled\n\n//! // since we gave it no min run time but\n\n//! // cool is not immediately enabled due\n\n//! // to its 300 second recovery time\n\n//! assert_eq!(state.service, None);\n\n//! // fan is still set to auto and has no\n\n//! // minimum run time, it is also disabled\n\n//! assert_eq!(state.fan, false);\n\n//!\n\n//! // advancing to cool's minimum recovery\n\n//! // time will result in cool starting\n\n//! let state = hvac_controller.tick(300);\n", "file_path": "src/lib.rs", "rank": 21, "score": 5.124698991655167 }, { "content": "//!\n\n//! // create a new hvac controller with the\n\n//! // following constraints:\n\n//! //\n\n//! // heat:\n\n//! // - no min run time\n\n//! // - min recover of 1 minute (60 sec)\n\n//! //\n\n//! // cool:\n\n//! // - min run time of 5 minutes (300 sec)\n\n//! // - min recover of 5 minutes (300 sec)\n\n//! //\n\n//! // fan:\n\n//! // - no min run time\n\n//! // - no min recovery\n\n//! let mut hvac_controller = Hvac::default()\n\n//! .with_heat(None, Some(60))\n\n//! .with_cool(Some(300), Some(300))\n\n//! .with_fan(None, None);\n\n//!\n", "file_path": "src/lib.rs", "rank": 22, "score": 5.070834678178179 }, { "content": "//! // enable heat as soon as permissible\n\n//! let state = hvac_controller.heat();\n\n\n\n//! for i in 0..60 {\n\n//! // advance state machine to `i`\n\n//! // seconds elapsed\n\n//! let state = hvac_controller.tick(i);\n\n//! // even though we have called for\n\n//! // heat, it will not be enabled\n\n//! // until we have met our 60 second\n\n//! // minimum recovery time\n\n//! assert_eq!(state.service, None);\n\n//! // and since the fan is set to auto\n\n//! // by default, it remains disabled\n\n//! assert_eq!(state.fan, false);\n\n//! }\n\n//!\n\n//! // once the state machine is at\n\n//! // 60 seconds elappsed...\n\n//! let state = hvac_controller.tick(60);\n", "file_path": "src/lib.rs", "rank": 23, "score": 5.027056529457804 }, { "content": " self.heat_last_start_seconds = self.last_update;\n\n self.active_service = Some(HvacService::Heat);\n\n };\n\n } else if self.cool_calling && self.cool_wait_seconds.is_none() {\n\n if !self.fan_active && self.fan_wait_seconds.is_none() {\n\n self.fan_last_start_seconds = self.last_update;\n\n self.fan_active = true;\n\n };\n\n if self.fan_active {\n\n self.cool_last_start_seconds = self.last_update;\n\n self.active_service = Some(HvacService::Cool);\n\n };\n\n };\n\n\n\n if self.fan_active && self.fan_auto {\n\n if self.active_service.is_none() && self.fan_wait_seconds.is_none() {\n\n self.fan_active = false;\n\n };\n\n } else if !self.fan_auto && self.fan_wait_seconds.is_none() {\n\n self.fan_active = true;\n", "file_path": "src/lib.rs", "rank": 24, "score": 4.7353106775882035 }, { "content": " HvacService::Cool => {\n\n if !self.cool_calling && self.cool_wait_seconds.is_none() {\n\n self.cool_last_stop_seconds = self.last_update;\n\n self.active_service = None;\n\n if self.heat_calling && self.heat_wait_seconds.is_none() {\n\n self.heat_last_start_seconds = self.last_update;\n\n self.active_service = Some(HvacService::Heat);\n\n } else if self.fan_auto && self.fan_wait_seconds.is_none() {\n\n self.fan_last_stop_seconds = self.last_update;\n\n self.fan_active = false;\n\n };\n\n };\n\n }\n\n };\n\n } else if self.heat_calling && self.heat_wait_seconds.is_none() {\n\n if !self.fan_active && self.fan_wait_seconds.is_none() {\n\n self.fan_last_start_seconds = self.last_update;\n\n self.fan_active = true;\n\n };\n\n if self.fan_active {\n", "file_path": "src/lib.rs", "rank": 25, "score": 4.2273023890151675 }, { "content": "//! This crate provides a state machine for an\n\n//! [HVAC controller](https://en.wikipedia.org/wiki/HVAC_control_system).\n\n//!\n\n//! HVAC control systems regulate the operation of a heating and/or air conditioning system.\n\n//! Essentially, they turn on or off the heating, cooling, and air circulation as instructed by some\n\n//! other system–typically a thermostat.\n\n//!\n\n//! This crate currently supports only single-stage HVAC implementations wherein the heating and\n\n//! cooling systems can be either on or off with no intermediate states of operation. Optional\n\n//! constraints on the minimum run and recovery time are supported for the heat, cool, and fan\n\n//! services.\n\n//!\n\n//! This crate has no dependencies on the standard library or any other crates, making it\n\n//! easily used in standard applications as well as embedded targets leveraging\n\n//! [`#![no_std]`](https://doc.rust-lang.org/reference/attributes.html?highlight=no_std#crate-only-attributes).\n\n\n\n//!\n\n//! # Example\n\n//! ```\n\n//! use hvac::prelude::*;\n", "file_path": "src/lib.rs", "rank": 26, "score": 4.0240252637473475 }, { "content": " }\n\n\n\n fn compute(&mut self) -> HvacState {\n\n self.heat_wait_seconds = if self.active_service == Some(HvacService::Heat) {\n\n wait_seconds(\n\n self.last_update,\n\n self.heat_min_run_seconds,\n\n self.heat_last_start_seconds,\n\n )\n\n } else {\n\n wait_seconds(\n\n self.last_update,\n\n self.heat_min_recover_seconds,\n\n self.heat_last_stop_seconds,\n\n )\n\n };\n\n\n\n self.cool_wait_seconds = if self.active_service == Some(HvacService::Cool) {\n\n wait_seconds(\n\n self.last_update,\n", "file_path": "src/lib.rs", "rank": 27, "score": 3.857031157840799 }, { "content": "\n\nimpl Hvac {\n\n /// use custom heat run and recover time constraints\n\n pub fn with_heat(\n\n mut self,\n\n min_run_seconds: Option<u32>,\n\n min_recover_seconds: Option<u32>,\n\n ) -> Self {\n\n self.heat_min_run_seconds = min_run_seconds;\n\n self.heat_min_recover_seconds = min_recover_seconds;\n\n self\n\n }\n\n\n\n /// use custom cool run and recover time constraints\n\n pub fn with_cool(\n\n mut self,\n\n min_run_seconds: Option<u32>,\n\n min_recover_seconds: Option<u32>,\n\n ) -> Self {\n\n self.cool_min_run_seconds = min_run_seconds;\n", "file_path": "src/lib.rs", "rank": 28, "score": 3.7501655533186744 }, { "content": " /// cool\n\n Cool,\n\n}\n\n\n\n/// hvac state\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct HvacState {\n\n /// active service, if any\n\n pub service: Option<HvacService>,\n\n /// if fan is active\n\n pub fan: bool,\n\n}\n\n\n\n/// hvac state machine\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Hvac {\n\n active_service: Option<HvacService>,\n\n fan_active: bool,\n\n last_update: Option<u32>,\n\n heat_calling: bool,\n", "file_path": "src/lib.rs", "rank": 29, "score": 3.6709917723470458 }, { "content": " self.fan_min_recover_seconds,\n\n self.fan_last_stop_seconds,\n\n )\n\n };\n\n\n\n if let Some(active_service) = self.active_service {\n\n match active_service {\n\n HvacService::Heat => {\n\n if !self.heat_calling && self.heat_wait_seconds.is_none() {\n\n self.heat_last_stop_seconds = self.last_update;\n\n self.active_service = None;\n\n if self.cool_calling && self.cool_wait_seconds.is_none() {\n\n self.cool_last_start_seconds = self.last_update;\n\n self.active_service = Some(HvacService::Cool);\n\n } else if self.fan_auto && self.fan_wait_seconds.is_none() {\n\n self.fan_last_stop_seconds = self.last_update;\n\n self.fan_active = false;\n\n };\n\n };\n\n }\n", "file_path": "src/lib.rs", "rank": 30, "score": 3.2080815593082272 }, { "content": " fn default() -> Self {\n\n Self {\n\n active_service: None,\n\n fan_active: false,\n\n last_update: None,\n\n heat_calling: false,\n\n heat_min_run_seconds: Some(60),\n\n heat_min_recover_seconds: Some(60),\n\n heat_wait_seconds: Some(60),\n\n heat_last_start_seconds: None,\n\n heat_last_stop_seconds: None,\n\n cool_calling: false,\n\n cool_min_run_seconds: Some(300),\n\n cool_min_recover_seconds: Some(300),\n\n cool_wait_seconds: Some(60),\n\n cool_last_start_seconds: None,\n\n cool_last_stop_seconds: None,\n\n fan_auto: true,\n\n fan_min_run_seconds: Some(60),\n\n fan_min_recover_seconds: Some(60),\n\n fan_wait_seconds: Some(60),\n\n fan_last_start_seconds: None,\n\n fan_last_stop_seconds: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 31, "score": 3.12519401229526 }, { "content": "#![deny(missing_copy_implementations)]\n\n#![deny(missing_debug_implementations)]\n\n#![deny(missing_docs)]\n\n#![deny(trivial_casts)]\n\n#![deny(trivial_numeric_casts)]\n\n#![deny(unreachable_pub)]\n\n#![deny(unsafe_code)]\n\n#![deny(unstable_features)]\n\n#![deny(unused_import_braces)]\n\n#![deny(unused_lifetimes)]\n\n#![deny(unused_qualifications)]\n\n#![deny(unused_results)]\n\n#![deny(variant_size_differences)]\n\n#![cfg_attr(feature = \"cargo-clippy\", deny(clippy::all))]\n\n\n\n/// hvac services\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum HvacService {\n\n /// heat\n\n Heat,\n", "file_path": "src/lib.rs", "rank": 32, "score": 1.1680283195836472 }, { "content": " heat_min_run_seconds: Option<u32>,\n\n heat_min_recover_seconds: Option<u32>,\n\n heat_wait_seconds: Option<u32>,\n\n heat_last_start_seconds: Option<u32>,\n\n heat_last_stop_seconds: Option<u32>,\n\n cool_calling: bool,\n\n cool_min_run_seconds: Option<u32>,\n\n cool_min_recover_seconds: Option<u32>,\n\n cool_wait_seconds: Option<u32>,\n\n cool_last_start_seconds: Option<u32>,\n\n cool_last_stop_seconds: Option<u32>,\n\n fan_auto: bool,\n\n fan_min_run_seconds: Option<u32>,\n\n fan_min_recover_seconds: Option<u32>,\n\n fan_wait_seconds: Option<u32>,\n\n fan_last_start_seconds: Option<u32>,\n\n fan_last_stop_seconds: Option<u32>,\n\n}\n\n\n\nimpl Default for Hvac {\n", "file_path": "src/lib.rs", "rank": 33, "score": 0.5948478936443315 } ]
Rust
crates/parser/src/lib.rs
fangerm/gelixrs
de7b7b0b53ed8a4bcbe6b6484103b07e16f420e1
mod declaration; mod expression; mod nodes; mod util; use crate::util::{ builder::{Checkpoint, NodeBuilder}, source::Source, }; use common::bench; use error::{Error, ErrorSpan, GErr}; use lexer::Lexer; pub use nodes::*; use syntax::kind::SyntaxKind; pub fn parse(input: &str) -> Result<ParseResult, Vec<Error>> { let lexer = Lexer::new(input); let lexemes = lexer .map(|(tok, lexeme)| Lexeme { kind: tok.into(), lexeme, }) .collect::<Vec<_>>(); let parser = Parser::new(&lexemes); parser.parse() } #[derive(Copy, Clone)] struct Lexeme<'t> { kind: SyntaxKind, lexeme: &'t str, } struct Parser<'p> { source: Source<'p>, builder: NodeBuilder, errors: Vec<Error>, poisoned: bool, modifiers: Vec<SyntaxKind>, } impl<'p> Parser<'p> { fn parse(mut self) -> Result<ParseResult, Vec<Error>> { bench!("parser", { while self.peek() != SyntaxKind::EndOfFile { self.declaration(); if self.poisoned { self.try_depoison(); } } }); if self.errors.is_empty() { Ok(ParseResult { green_node: self.builder.finish(), }) } else { Err(self.errors) } } fn matches(&mut self, kind: SyntaxKind) -> bool { let matches = self.check(kind); if matches { self.advance(); } matches } fn matches_separator(&mut self) -> bool { self.matches(SyntaxKind::Semicolon) } fn consume(&mut self, kind: SyntaxKind, want: &'static str, after: &'static str) { if self.advance_checked() != kind { self.error_at_current(GErr::E001 { want, after }); } } fn consume_either( &mut self, kind1: SyntaxKind, kind2: SyntaxKind, want: &'static str, after: &'static str, ) { if self.peek() != kind1 && self.peek() != kind2 { self.error_at_current(GErr::E001 { want, after }); } else { self.advance(); } } fn error_at_current(&mut self, err: GErr) { if self.poisoned { self.advance_checked(); return; } let err = Error { index: ErrorSpan::Token(self.source.position()), kind: err, }; self.errors.push(err); self.poisoned = true; } fn try_depoison(&mut self) { let recoverable = &[ SyntaxKind::Enum, SyntaxKind::Class, SyntaxKind::Func, SyntaxKind::Import, SyntaxKind::Export, SyntaxKind::Impl, SyntaxKind::Interface, SyntaxKind::EndOfFile, ]; while !recoverable.contains(&self.peek()) { self.advance_checked(); } self.poisoned = false; } fn check(&mut self, kind: SyntaxKind) -> bool { self.peek() == kind } fn check_next(&mut self, kind: SyntaxKind) -> bool { self.peek_next() == kind } fn advance(&mut self) -> Lexeme<'p> { self.skip_whitespace(); self.advance_inner() } fn advance_inner(&mut self) -> Lexeme<'p> { let Lexeme { kind, lexeme } = self.source.get_current().unwrap(); self.source.next(); self.builder.token(kind, lexeme.into()); Lexeme { kind, lexeme } } fn advance_checked(&mut self) -> SyntaxKind { if self.is_at_end() { SyntaxKind::EndOfFile } else { self.advance().kind } } fn peek(&mut self) -> SyntaxKind { self.skip_whitespace(); self.peek_raw().unwrap_or(SyntaxKind::EndOfFile) } fn peek_next(&mut self) -> SyntaxKind { self.source.save(); self.skip_whitespace(); self.source.next(); while self.peek_raw().map(|k| k.should_skip()) == Some(true) { self.source.next(); } let ret = self.peek_raw().unwrap_or(SyntaxKind::EndOfFile); self.source.restore(); ret } fn last_was_whitespace(&mut self) -> bool { self.source.get_last().kind.should_skip() } fn peek_raw(&self) -> Option<SyntaxKind> { self.source.get_current().map(|Lexeme { kind, .. }| kind) } fn skip_whitespace(&mut self) { while self.peek_raw().map(|k| k.should_skip()) == Some(true) { self.advance_inner(); } } fn is_at_end(&self) -> bool { self.source.get_current().is_none() } fn node_with<T: FnOnce(&mut Self)>(&mut self, kind: SyntaxKind, content: T) { self.start_node(kind); content(self); self.end_node() } fn start_node(&mut self, kind: SyntaxKind) { self.skip_whitespace(); self.builder.start_node(kind); } fn start_node_at(&mut self, checkpoint: Checkpoint, kind: SyntaxKind) { self.builder.start_node_at(kind, checkpoint); self.skip_whitespace(); } fn end_node(&mut self) { self.builder.end_node(); } fn checkpoint(&mut self) -> Checkpoint { self.builder.checkpoint() } fn new(lexemes: &'p [Lexeme<'p>]) -> Self { Self { source: Source::new(lexemes), builder: NodeBuilder::new(), errors: vec![], poisoned: false, modifiers: Vec::with_capacity(4), } } } #[derive(Debug)] pub struct ParseResult { green_node: Node, } impl ParseResult { pub fn root(self) -> Node { self.green_node } }
mod declaration; mod expression; mod nodes; mod util; use crate::util::{ builder::{Checkpoint, NodeBuilder}, source::Source, }; use common::bench; use error::{Error, ErrorSpan, GErr}; use lexer::Lexer; pub use nodes::*; use syntax::kind::SyntaxKind; pub fn parse(input: &str) -> Result<ParseResult, Vec<Error>> { let lexer = Lexer::new(input); let lexemes = lexer .map(|(tok, lexeme)| Lexeme { kind: tok.into(), lexeme, }) .collect::<Vec<_>>(); let parser = Parser::new(&lexemes); parser.parse() } #[derive(Copy, Clone)] struct Lexeme<'t> { kind: SyntaxKind, lexeme: &'t str, } struct Parser<'p> { source: Source<'p>, builder: NodeBuilder, errors: Vec<Error>, poisoned: bool, modifiers: Vec<SyntaxKind>, } impl<'p> Parser<'p> { fn parse(mut self) -> Result<ParseResult, Vec<Error>> { bench!("parser", { while self.peek() != SyntaxKind::EndOfFile { self.declaration(); if self.poisoned { self.try_depoison(); } } }); if self.errors.is_empty() { Ok(ParseResult { green_node: self.builder.finish(), }) } else { Err(self.errors) } } fn matches(&mut self, kind: SyntaxKind) -> bool { let matches = self.check(kind); if matches { self.advance(); } matches } fn matches_separator(&mut self) -> bool { self.matches(SyntaxKind::Semicolon) } fn consume(&mut self, kind: SyntaxKind, want: &'static str, after: &'static str) { if self.advance_checked() != kind { self.error_at_current(GErr::E001 { want, after }); } } fn consume_either( &mut self, kind1: SyntaxKind, kind2: SyntaxKind, want: &'static str, after: &'static str, ) { if self.peek() != kind1 && self.peek() != kind2 { self.error_at_current(GErr::E001 { want, after }); } else { self.advance(); } } fn error_at_current(&mut self, err: GErr) { if self.poisoned { self.advance_checked(); return; } let err = Error { index: ErrorSpan::Token(self.source.position()), kind: err, }; self.errors.push(err); self.poisoned = true; } fn try_depoison(&mut self) { let recoverable = &[ SyntaxKind::Enum, SyntaxKind::Class, SyntaxKind::Func, SyntaxKind::Import, SyntaxKind::Export, SyntaxKind::Impl, SyntaxKind::Interface, SyntaxKind::EndOfFile, ]; while !recoverable.contains(&self.peek()) { self.advance_checked(); } self.poisoned = false; } fn check(&mut self, kind: SyntaxKind) -> bool { self.peek() == kind } fn check_next(&mut self, kind: SyntaxKind) -> bool { self.peek_next() == kind } fn advance(&mut self) -> Lexeme<'p> { self.skip_whitespace(); self.advance_inner() } fn advance_inner(&mut self) -> Lexeme<'p> { let Lexeme { kind, lexeme } = self.source.get_current().unwrap(); self.source.next(); self.builder.token(kind, lexeme.into()); Lexeme { kind, lexeme } } fn advance_checked(&mut self) -> SyntaxKind { if self.is_at_end() { SyntaxKind::EndOfFile } else { self.advance().kind } } fn peek(&mut self) -> SyntaxKind { self.skip_whitespace(); self.peek_raw().unwrap_or(SyntaxKind::EndOfFile) } fn peek_next(&mut self) -> SyntaxKind { self.source.save(); self.skip_whitespace(); self.source.next(); while self.peek_raw().map(|k| k.should_skip()) == Some(true) { self.source.next(); } let ret = self.peek_raw().unwrap_or(SyntaxKind::EndOfFile); self.source.restore(); ret } fn last_was_whitespace(&mut self) -> bool { self.source.get_last().kind.should_skip() } fn peek_raw(&self) -> Option<SyntaxKind> { self.source.get_current().map(|Lexeme { kind, .. }| kind) } fn skip_whitespace(&mut self) { while self.peek_raw().map(|k| k.should_skip()) == Some(true) { self.advance_inner(); } } fn is_at_end(&self) -> bool { self.source.get_current().is_none() } fn node_with<T: FnOnce(&mut Self)>(&mut self, kind: SyntaxKind, content: T) { self.start_node(k
errors: vec![], poisoned: false, modifiers: Vec::with_capacity(4), } } } #[derive(Debug)] pub struct ParseResult { green_node: Node, } impl ParseResult { pub fn root(self) -> Node { self.green_node } }
ind); content(self); self.end_node() } fn start_node(&mut self, kind: SyntaxKind) { self.skip_whitespace(); self.builder.start_node(kind); } fn start_node_at(&mut self, checkpoint: Checkpoint, kind: SyntaxKind) { self.builder.start_node_at(kind, checkpoint); self.skip_whitespace(); } fn end_node(&mut self) { self.builder.end_node(); } fn checkpoint(&mut self) -> Checkpoint { self.builder.checkpoint() } fn new(lexemes: &'p [Lexeme<'p>]) -> Self { Self { source: Source::new(lexemes), builder: NodeBuilder::new(),
random
[ { "content": "/// Produces a new error for the GIR.\n\npub fn gir_err(cst: CSTNode, err: GErr) -> Error {\n\n Error {\n\n index: ErrorSpan::Span(cst.text_range()),\n\n kind: err,\n\n }\n\n}\n", "file_path": "crates/gir-nodes/src/lib.rs", "rank": 0, "score": 299536.0737118246 }, { "content": "#[derive(Debug)]\n\nstruct WorkNode {\n\n children: NodeVec,\n\n kind: SyntaxKind,\n\n start: u32,\n\n end: u32,\n\n}\n\n\n\nimpl WorkNode {\n\n pub fn into_node(self) -> Node {\n\n Node::new(Rc::new(self.children), self.kind, self.start..self.end)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Checkpoint {\n\n node: usize,\n\n child_count: usize,\n\n start: u32,\n\n}\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 2, "score": 235070.80287183923 }, { "content": "pub fn find_std_module() -> Result<PathBuf, &'static str> {\n\n let mut local_std = env::current_dir().expect(\"Failed to get current directory!\");\n\n local_std.push(\"std\");\n\n if local_std.exists() {\n\n return Ok(local_std);\n\n }\n\n\n\n let mut user_std = dirs::data_dir().expect(\"Failed to get home directory!\");\n\n user_std.push(\"gelix\");\n\n user_std.push(\"std\");\n\n if user_std.exists() {\n\n return Ok(user_std);\n\n }\n\n\n\n let system_std = PathBuf::from(\"/usr/local/lib/gelix/std\");\n\n if system_std.exists() {\n\n return Ok(system_std);\n\n }\n\n\n\n Err(\"Failed to find standard library. Please make sure to follow the installation instructions.\")\n\n}\n", "file_path": "crates/gelixrs/src/parse_stage.rs", "rank": 3, "score": 225209.64146156795 }, { "content": "pub fn print_type_args(f: &mut Formatter, args: &[Type]) -> fmt::Result {\n\n if !args.is_empty() {\n\n let mut args = args.iter();\n\n args.next().map(|arg| write!(f, \"[{}\", arg));\n\n for arg in args {\n\n write!(f, \", {}\", arg)?;\n\n }\n\n write!(f, \"]\")?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<T> Clone for Instance<T> {\n\n /// Clone this instance; does 2 Rc clones\n\n fn clone(&self) -> Self {\n\n Self {\n\n ty: Rc::clone(&self.ty),\n\n args: Rc::clone(&self.args),\n\n }\n\n }\n", "file_path": "crates/gir-nodes/src/types.rs", "rank": 4, "score": 193694.34066512165 }, { "content": "fn run(args: Opt) -> Result<(), &'static str> {\n\n if !args.file.exists() {\n\n return Err(\"Given path does not exist.\");\n\n }\n\n\n\n let modules = if !args.no_std {\n\n let std_mod = gelixrs::find_std_module()?;\n\n vec![args.file.clone(), std_mod]\n\n } else {\n\n vec![args.file.clone()]\n\n };\n\n\n\n let code = gelixrs::parse_source(modules).map_err(|errors| {\n\n for file in errors {\n\n println!(\"{} error(s):\\n{}\", file.errors.len(), file);\n\n println!();\n\n }\n\n \"Parser encountered errors. Exiting.\"\n\n })?;\n\n\n", "file_path": "crates/gelixrs-cli/src/main.rs", "rank": 5, "score": 174259.07482268038 }, { "content": "fn parse_module(input: PathBuf, path: &mut ModPath) -> Result<Module, Errors> {\n\n let code = Rc::new(fs::read_to_string(&input).expect(\"Failed to read file.\"));\n\n let parse = parser::parse(&code);\n\n let cst = parse.map_err(|errors| Errors {\n\n errors,\n\n src: Some(Rc::clone(&code)),\n\n origin: format!(\"{}\", path),\n\n })?;\n\n Ok(Module::new(&path, &code, cst))\n\n}\n\n\n", "file_path": "crates/gelixrs/src/parse_stage.rs", "rank": 6, "score": 173653.15290409766 }, { "content": "pub fn parse_source(input: Vec<PathBuf>) -> Result<ParsedModules, Vec<Errors>> {\n\n let mut modules = Vec::new();\n\n for path in input {\n\n make_modules(path, &mut ModPath::new(), &mut modules)?;\n\n }\n\n Ok(modules)\n\n}\n\n\n", "file_path": "crates/gelixrs/src/parse_stage.rs", "rank": 7, "score": 173142.08656893694 }, { "content": "struct ADTConfig {\n\n name: &'static str,\n\n modifiers: &'static [SyntaxKind],\n\n has_members: bool,\n\n has_constructors: bool,\n\n has_cases: bool,\n\n force_extern: bool,\n\n}\n\n\n\nconst CLASS_CONF: ADTConfig = ADTConfig {\n\n name: \"class\",\n\n modifiers: &[SyntaxKind::Extern, SyntaxKind::Value],\n\n has_members: true,\n\n has_constructors: true,\n\n has_cases: false,\n\n force_extern: false,\n\n};\n\n\n\nconst IFACE_CONF: ADTConfig = ADTConfig {\n\n name: \"interface\",\n", "file_path": "crates/parser/src/declaration.rs", "rank": 9, "score": 153879.10244898195 }, { "content": "pub fn stem_to_smol(path: &PathBuf) -> SmolStr {\n\n SmolStr::new(path.file_stem().unwrap().to_str().unwrap())\n\n}\n\n\n", "file_path": "crates/gelixrs/src/parse_stage.rs", "rank": 10, "score": 150762.38425540848 }, { "content": "pub fn mutrc_new<T>(value: T) -> MutRc<T> {\n\n Rc::new(RefCell::new(value))\n\n}\n", "file_path": "crates/common/src/mutrc.rs", "rank": 11, "score": 147301.98214168975 }, { "content": "fn span_to_info(src: &str, span: Span) -> (usize, usize, usize) {\n\n let (line, line_offset) = src[0..span.start]\n\n .lines()\n\n .rev()\n\n .skip(1)\n\n .fold((0, 0), |(lc, offs), line| (lc + 1, offs + line.len() + 1));\n\n (\n\n line + 1,\n\n span.start - line_offset + 1,\n\n span.end - span.start,\n\n )\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ErrorSpan {\n\n Token(usize),\n\n Span(Range<u32>),\n\n None,\n\n}\n\n\n", "file_path": "crates/error/src/lib.rs", "rank": 12, "score": 146603.1856963492 }, { "content": "#[derive(Deserialize)]\n\nstruct Node {\n\n #[serde(default = \"default_template\")]\n\n template: String,\n\n #[serde(default)]\n\n context: Ctx,\n\n}\n\n\n", "file_path": "crates/ast-generator/src/main.rs", "rank": 14, "score": 137287.5484776997 }, { "content": "pub fn compile_gir(ast: ParsedModules, flags: GIRFlags) -> Result<CompiledGIR, Vec<Errors>> {\n\n GIRGenerator::new(ast, flags).consume()\n\n}\n\n\n", "file_path": "crates/gelixrs/src/lib.rs", "rank": 15, "score": 134390.4390041306 }, { "content": "\n\n pub fn next(&mut self) {\n\n self.current += 1;\n\n }\n\n\n\n pub fn save(&mut self) {\n\n self.saved = self.current;\n\n }\n\n\n\n pub fn restore(&mut self) {\n\n self.current = self.saved;\n\n }\n\n\n\n pub fn new(lexemes: &'s [Lexeme<'s>]) -> Self {\n\n Self {\n\n lexemes,\n\n current: 0,\n\n saved: 0,\n\n }\n\n }\n\n}\n", "file_path": "crates/parser/src/util/source.rs", "rank": 16, "score": 131274.27910854956 }, { "content": "use crate::Lexeme;\n\n\n\npub(crate) struct Source<'s> {\n\n lexemes: &'s [Lexeme<'s>],\n\n current: usize,\n\n saved: usize,\n\n}\n\n\n\nimpl<'s> Source<'s> {\n\n pub fn get_current(&self) -> Option<Lexeme<'s>> {\n\n self.lexemes.get(self.current).copied()\n\n }\n\n\n\n pub fn get_last(&self) -> Lexeme<'s> {\n\n self.lexemes.get(self.current - 1).copied().unwrap()\n\n }\n\n\n\n pub fn position(&self) -> usize {\n\n self.current\n\n }\n", "file_path": "crates/parser/src/util/source.rs", "rank": 17, "score": 131273.25980895048 }, { "content": "use crate::{Node, NodeOrToken, NodeVec, Token};\n\nuse smallvec::SmallVec;\n\nuse smol_str::SmolStr;\n\nuse std::rc::Rc;\n\nuse syntax::kind::SyntaxKind;\n\n\n\n#[repr(transparent)]\n\npub struct NodeBuilder {\n\n nodes: Vec<WorkNode>,\n\n}\n\n\n\nimpl NodeBuilder {\n\n pub fn start_node(&mut self, kind: SyntaxKind) {\n\n let pos = self.nodes.last().map(|n| n.end).unwrap_or(0);\n\n self.nodes.push(WorkNode {\n\n children: SmallVec::new(),\n\n kind,\n\n start: pos,\n\n end: pos,\n\n })\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 18, "score": 131257.61419447738 }, { "content": " kind,\n\n start: loc.start,\n\n end,\n\n },\n\n )\n\n }\n\n\n\n pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {\n\n let mut current = self.current();\n\n current.end += text.len() as u32;\n\n current\n\n .children\n\n .push(NodeOrToken::Token(Token::new(kind, text)))\n\n }\n\n\n\n fn current(&mut self) -> &mut WorkNode {\n\n self.nodes.last_mut().unwrap()\n\n }\n\n\n\n pub fn finish(mut self) -> Node {\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 19, "score": 131249.02654480285 }, { "content": " }\n\n\n\n pub fn end_node(&mut self) {\n\n let node = self.nodes.pop().expect(\"No node?\");\n\n let mut current = self.current();\n\n current.end = node.end;\n\n current.children.push(NodeOrToken::Node(node.into_node()));\n\n }\n\n\n\n pub fn checkpoint(&self) -> Checkpoint {\n\n Checkpoint {\n\n node: self.nodes.len(),\n\n child_count: self.nodes.last().unwrap().children.len(),\n\n start: self.nodes.last().unwrap().end,\n\n }\n\n }\n\n\n\n pub fn start_node_at(&mut self, kind: SyntaxKind, loc: Checkpoint) {\n\n let parent = &mut self.nodes[loc.node - 1];\n\n let mut children: NodeVec =\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 20, "score": 131245.06595094164 }, { "content": " self.nodes.pop().expect(\"Missing root node?\").into_node()\n\n }\n\n\n\n pub fn new() -> Self {\n\n Self {\n\n nodes: vec![WorkNode {\n\n children: SmallVec::new(),\n\n kind: SyntaxKind::Root,\n\n start: 0,\n\n end: 0,\n\n }],\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 21, "score": 131244.35631686906 }, { "content": " SmallVec::with_capacity(parent.children.len() - loc.child_count);\n\n\n\n let mut last_node_end = loc.start;\n\n let mut tokens_end = 0;\n\n for pop in self.nodes[loc.node - 1].children.drain(loc.child_count..) {\n\n match &pop {\n\n NodeOrToken::Node(node) => {\n\n last_node_end = node.text_range().end;\n\n tokens_end = 0;\n\n }\n\n NodeOrToken::Token(tok) => tokens_end += tok.text().len() as u32,\n\n }\n\n children.push(pop);\n\n }\n\n\n\n let end = last_node_end + tokens_end;\n\n self.nodes.insert(\n\n loc.node,\n\n WorkNode {\n\n children,\n", "file_path": "crates/parser/src/util/builder.rs", "rank": 22, "score": 131238.06394972117 }, { "content": "pub mod builder;\n\npub mod source;\n", "file_path": "crates/parser/src/util/mod.rs", "rank": 23, "score": 131065.81391080814 }, { "content": "pub fn produce_binary(\n\n module: Module,\n\n location: &OsStr,\n\n optimize_level: usize,\n\n) -> Result<(), Box<dyn Error>> {\n\n let mut tmp_dir = env::temp_dir();\n\n tmp_dir.push(\"gelixrs\");\n\n if !tmp_dir.exists() {\n\n fs::create_dir(&tmp_dir)?;\n\n }\n\n\n\n let mut module_file = tmp_dir;\n\n module_file.push(\"out.bc\");\n\n module.write_bitcode_to_path(&module_file);\n\n\n\n if optimize_level > 3 {\n\n return Err(\"Invalid optimize level.\".to_string().into());\n\n }\n\n let status = process::Command::new(\"clang\")\n\n .arg(\"-o\")\n", "file_path": "crates/ir/src/lib.rs", "rank": 24, "score": 127589.02717424833 }, { "content": "pub fn compile_gir_cached_std(\n\n ast: ParsedModules,\n\n std: &CompiledGIR,\n\n flags: GIRFlags,\n\n) -> Result<CompiledGIR, Vec<Errors>> {\n\n GIRGenerator::with_cached_std(ast, std, flags).consume()\n\n}\n\n\n", "file_path": "crates/gelixrs/src/lib.rs", "rank": 25, "score": 124639.50900711246 }, { "content": "pub fn ir_context() -> Context {\n\n Context(context::Context::create())\n\n}\n\n\n", "file_path": "crates/ir/src/lib.rs", "rank": 26, "score": 123571.10500294968 }, { "content": "fn process_context(ctx: &mut Ctx) {\n\n if ctx.kind.is_empty() {\n\n ctx.kind = ctx.name.clone();\n\n }\n\n\n\n for item in &mut ctx.items {\n\n if item.kind.is_empty() {\n\n item.kind = item.r#type.clone();\n\n }\n\n\n\n let strat = item.strategy.clone();\n\n item.strategy = match &item.strategy[..] {\n\n \"single\" => format!(\"children().find_map({}::cast).unwrap()\", item.r#type),\n\n \"nested_single\" => format!(\"children().find(|i| i.kind() == SyntaxKind::{}).unwrap().children().find_map({}::cast).unwrap()\", item.kind, item.r#type),\n\n\n\n \"opt_single\" => format!(\"children().find_map({}::cast)\", item.r#type),\n\n \"nested_opt_single\" => format!(\n\n \"children().find(|i| i.kind() == SyntaxKind::{}).map(|i| i.children().find_map({}::cast)).flatten()\",\n\n item.kind, item.r#type\n\n ),\n", "file_path": "crates/ast-generator/src/main.rs", "rank": 27, "score": 119344.40344266189 }, { "content": "fn get_predicate(tok: SyntaxKind) -> IntPredicate {\n\n match tok {\n\n SyntaxKind::Greater => IntPredicate::SGT,\n\n SyntaxKind::GreaterEqual => IntPredicate::SGE,\n\n SyntaxKind::Less => IntPredicate::SLT,\n\n SyntaxKind::LessEqual => IntPredicate::SLE,\n\n SyntaxKind::EqualEqual => IntPredicate::EQ,\n\n SyntaxKind::BangEqual => IntPredicate::NE,\n\n _ => panic!(\"invalid tok\"),\n\n }\n\n}\n\n\n", "file_path": "crates/ir/src/generator/expr.rs", "rank": 28, "score": 117016.35804865815 }, { "content": "fn get_float_predicate(tok: SyntaxKind) -> FloatPredicate {\n\n match tok {\n\n SyntaxKind::Greater => FloatPredicate::OGT,\n\n SyntaxKind::GreaterEqual => FloatPredicate::OGE,\n\n SyntaxKind::Less => FloatPredicate::OLT,\n\n SyntaxKind::LessEqual => FloatPredicate::OLE,\n\n SyntaxKind::EqualEqual => FloatPredicate::OEQ,\n\n SyntaxKind::BangEqual => FloatPredicate::ONE,\n\n _ => panic!(\"invalid tok\"),\n\n }\n\n}\n", "file_path": "crates/ir/src/generator/expr.rs", "rank": 29, "score": 115754.79004840867 }, { "content": "fn get_expected_result(mut path: PathBuf) -> TestRes {\n\n // If the test is a directory, the wanted result is in a file 'expected' in the dir\n\n if path.is_dir() {\n\n path.push(\"expected\");\n\n }\n\n\n\n let code = read_to_string(path).expect(\"Couldn't get wanted result\");\n\n if code.starts_with(\"// P-ERR\") {\n\n Err(Failure::Parse(vec![]))\n\n } else if code.starts_with(\"// C-ERR\") {\n\n Err(Failure::Compile(vec![]))\n\n } else if code.starts_with(\"// LEAK\") {\n\n Err(Failure::Leak(1))\n\n } else {\n\n let split = code.split(\"*/\").next().unwrap();\n\n Ok(split[3..].to_string())\n\n }\n\n}\n\n\n", "file_path": "crates/e2etest/src/test.rs", "rank": 30, "score": 113371.384835284 }, { "content": "fn maybe_compile_stdlib(run: &mut TestRun) -> Result<(), Failure> {\n\n if run.gir_stdlib.is_none() {\n\n let code = gelixrs::parse_source(vec![std_mod()]).map_err(Failure::Parse)?;\n\n let flags = GIRFlags {\n\n library: true,\n\n ..GIRFlags::default()\n\n };\n\n let gir = gelixrs::compile_gir(code, flags).map_err(Failure::Compile)?;\n\n run.gir_stdlib = Some(gir);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/e2etest/src/test.rs", "rank": 31, "score": 111525.96194326313 }, { "content": "fn run_test(path: PathBuf, run: &mut TestRun) {\n\n run.total += 1;\n\n if run.options.verbose {\n\n println!(\"Running test: {}\", relative_path(&path))\n\n }\n\n\n\n let expected = get_expected_result(path.clone());\n\n let result = catch_unwind_silent(|| exec(path.clone(), run)).unwrap_or(Err(Failure::Panic));\n\n\n\n if result == expected {\n\n print!(\"{}\", GREEN_BOLD.paint(\".\"));\n\n } else {\n\n let rel_path = relative_path(&path);\n\n run.failed.push(FailedTest {\n\n rel_path,\n\n count: run.total,\n\n result,\n\n expected,\n\n });\n\n print!(\"{}\", RED_BOLD.paint(\"F\"));\n\n };\n\n io::stdout().flush().unwrap();\n\n}\n\n\n", "file_path": "crates/e2etest/src/test.rs", "rank": 32, "score": 111525.96194326313 }, { "content": "fn exec(path: PathBuf, run: &mut TestRun) -> TestRes {\n\n clear_state();\n\n\n\n let gir = if run.options.no_cache {\n\n let code = gelixrs::parse_source(vec![path, std_mod()]).map_err(Failure::Parse)?;\n\n gelixrs::compile_gir(code, GIRFlags::default())\n\n } else {\n\n maybe_compile_stdlib(run)?;\n\n let std = run.gir_stdlib.as_ref().unwrap();\n\n\n\n let code = gelixrs::parse_source(vec![path]).map_err(Failure::Parse)?;\n\n gelixrs::compile_gir_cached_std(code, std, GIRFlags::default())\n\n }\n\n .map_err(Failure::Compile)?;\n\n let module = gelixrs::compile_ir(run.ir_context.clone(), gir);\n\n\n\n if !run.options.no_jit {\n\n bench!(\"jit\", exec_jit(module))\n\n } else {\n\n bench!(\"bin\", exec_bin(module))\n\n }\n\n}\n\n\n", "file_path": "crates/e2etest/src/test.rs", "rank": 33, "score": 108630.79576731738 }, { "content": "pub fn compile_ir(context: Context, gir: CompiledGIR) -> CompiledIR {\n\n IRGenerator::new(context, gir).generate()\n\n}\n", "file_path": "crates/gelixrs/src/lib.rs", "rank": 34, "score": 107888.2206585214 }, { "content": "pub trait ToInstance<T> {\n\n fn to_inst(&self) -> Instance<T>;\n\n fn to_type(&self) -> Type;\n\n}\n\n\n\nimpl ToInstance<ADT> for MutRc<ADT> {\n\n fn to_inst(&self) -> Instance<ADT> {\n\n Instance::new_(Rc::clone(self))\n\n }\n\n\n\n fn to_type(&self) -> Type {\n\n Type::Adt(self.to_inst())\n\n }\n\n}\n\n\n\nimpl ToInstance<Function> for MutRc<Function> {\n\n fn to_inst(&self) -> Instance<Function> {\n\n Instance::new_(Rc::clone(self))\n\n }\n\n\n", "file_path": "crates/gir-nodes/src/types.rs", "rank": 35, "score": 104005.82127342082 }, { "content": "pub trait EmitGIRError<T> {\n\n fn or_err(self, cst: &CSTNode, err: GErr) -> Res<T>;\n\n fn or_error<F: FnOnce() -> GErr>(self, cst: &CSTNode, err: F) -> Res<T>;\n\n}\n\n\n\nimpl<T> EmitGIRError<T> for Option<T> {\n\n #[inline(always)]\n\n fn or_err(self, cst: &CSTNode, err: GErr) -> Res<T> {\n\n if let Some(i) = self {\n\n Ok(i)\n\n } else {\n\n Err(gir_err(cst.clone(), err))\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn or_error<F: FnOnce() -> GErr>(self, cst: &CSTNode, err: F) -> Res<T> {\n\n if let Some(i) = self {\n\n Ok(i)\n\n } else {\n", "file_path": "crates/gir-generator/src/result.rs", "rank": 36, "score": 102525.04282628608 }, { "content": "type R = Result<(), fmt::Error>;\n\n\n\nimpl Display for Module {\n\n fn fmt(&self, f: &mut Formatter) -> R {\n\n writeln!(f, \"--> {}:\", self.path)?;\n\n writeln!(f, \"Used names: \")?;\n\n for name in &self.used_names {\n\n writeln!(f, \"{} \", name)?;\n\n }\n\n writeln!(f, \"\\n\\n\")?;\n\n for ty in self.declarations.values() {\n\n ty.display(f)?;\n\n writeln!(f)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Declaration {\n\n fn display(&self, f: &mut Formatter) -> R {\n", "file_path": "crates/gir-nodes/src/printer.rs", "rank": 37, "score": 102414.34692874158 }, { "content": "fn std_mod() -> PathBuf {\n\n let mut std_mod = PathBuf::from(\n\n env::current_dir()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap(),\n\n );\n\n std_mod.push(\"std\");\n\n std_mod\n\n}\n\n\n", "file_path": "crates/e2etest/src/test.rs", "rank": 38, "score": 102133.82361104284 }, { "content": "use smol_str::SmolStr;\n\nuse strum_macros::*;\n\n\n\nuse GErr::*;\n\n\n\n#[derive(Debug, AsRefStr)]\n\npub enum GErr {\n\n // Unexpected parser token\n\n E001 {\n\n want: &'static str,\n\n after: &'static str,\n\n },\n\n // Invalid top-level declaration\n\n E002,\n\n // Expected type\n\n E003,\n\n // Expected ADT decl\n\n E004,\n\n // Expected ADT member info\n\n E005,\n", "file_path": "crates/error/src/kinds.rs", "rank": 39, "score": 89130.3740361261 }, { "content": " E317,\n\n // Cannot have multiple visibilities\n\n E318,\n\n // Method with same name already defined\n\n E319,\n\n // Cannot use data cases with enums that have fields\n\n E320,\n\n // Incorrect amount of type parameters\n\n E321,\n\n}\n\n\n\nimpl GErr {\n\n pub fn fmt(&self) -> String {\n\n match self {\n\n E001 { want, after } => format!(\"Expected {} after {}.\", want, after),\n\n E006 { modifier, on } => format!(\"Cannot have '{:?}' modifier on {}.\", modifier, on),\n\n\n\n E100(name) => format!(\"Name {} already defined in this module\", name),\n\n\n\n E200(name) => format!(\"Cannot assign to {}\", name),\n", "file_path": "crates/error/src/kinds.rs", "rank": 40, "score": 89119.50136462448 }, { "content": " // Invalid modifier\n\n E006 {\n\n modifier: String,\n\n on: &'static str,\n\n },\n\n // Multiple else on when\n\n E007,\n\n // Expected expression\n\n E008,\n\n\n\n // Already defined name\n\n E100(SmolStr),\n\n // Could not find main function\n\n E101,\n\n // Unknown module\n\n E102,\n\n // Unknown declaration\n\n E103,\n\n\n\n // Cannot assign to\n", "file_path": "crates/error/src/kinds.rs", "rank": 41, "score": 89117.47151118494 }, { "content": " }\n\n }\n\n\n\n fn fmt_list(&self, start: &str, list: &[SmolStr]) -> String {\n\n let mut buf = start.to_string();\n\n buf.push_str(&list[0]);\n\n for name in list.iter().skip(1) {\n\n buf.push_str(&format!(\", {}\", name));\n\n }\n\n buf\n\n }\n\n\n\n fn msg(&self) -> &str {\n\n match self {\n\n E002 => \"Expected top-level declaration.\",\n\n E003 => \"Expected type.\",\n\n E004 => \"Encountered invalid declaration inside declaration.\",\n\n E005 => \"Expected ':' or '=' after ADT member name.\",\n\n E007 => \"'when' expression can only have 1 'else' branch.\",\n\n E008 => \"Expected expression.\",\n", "file_path": "crates/error/src/kinds.rs", "rank": 42, "score": 89116.96277869421 }, { "content": " E236(name) => format!(\"Cannot have member and method '{}' with same name.\", name),\n\n E239 {\n\n index,\n\n argument,\n\n bound,\n\n } => format!(\n\n \"Type argument at position {} ({}) does not fulfill required bound ({}).\",\n\n index + 1,\n\n argument,\n\n bound\n\n ),\n\n\n\n E300(name) => format!(\"Unknown type '{}'.\", name),\n\n E309(names) => {\n\n let mut str = self.fmt_list(\n\n \"Cannot have uninitialized fields after constructor (Missing: \",\n\n names,\n\n );\n\n str.push_str(\").\");\n\n str\n", "file_path": "crates/error/src/kinds.rs", "rank": 43, "score": 89108.81962802261 }, { "content": " E236(SmolStr),\n\n // Not an iterator (must implement Iter or ToIter)\n\n E237,\n\n // Cannot use string literals with no_std enabled\n\n E238,\n\n // Type argument does not fulfill required bound\n\n E239 {\n\n index: usize,\n\n argument: String,\n\n bound: String,\n\n },\n\n // Field is not visible\n\n E240,\n\n // '?.' can only be used with nullable values\n\n E241,\n\n\n\n // Unknown type\n\n E300(String),\n\n // Cannot use function as type\n\n E301,\n", "file_path": "crates/error/src/kinds.rs", "rank": 44, "score": 89107.82624420885 }, { "content": " }\n\n E310 { expected, was } => format!(\n\n \"Body type does not match function return type (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n E314(names) => {\n\n let mut str = self.fmt_list(\"Missing methods in interface impl: \", names);\n\n str.push('.');\n\n str\n\n }\n\n E315 { expected, was } => format!(\n\n \"Incorrect return type on interface method (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n E316 { expected, was } => format!(\n\n \"Incorrect parameter type on interface method (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n\n\n _ => self.msg().to_string(),\n", "file_path": "crates/error/src/kinds.rs", "rank": 45, "score": 89107.64851462893 }, { "content": " E224 => \"Static access is only supported on enum types.\",\n\n E225 => \"Static access is not supported on values.\",\n\n E227 => \"'!' can only be used on boolean values.\",\n\n E228 => \"'-' can only be used on signed integers and floats.\",\n\n E229 => \"Branches of when must be of same type as the value compared.\",\n\n E231 => \"String escape sequence is unfinished.\",\n\n E232 => \"Unknown string escape sequence.\",\n\n E233 => \"Numeric literal does not fit into target type.\",\n\n E234 => \"ADT member may not be a weak reference.\",\n\n E235 => \"ADT member cannot be defined twice.\",\n\n E237 => \"Not an iterator (must implement Iter or ToIter).\",\n\n E238 => \"Cannot use string literals with no_std enabled.\",\n\n E240 => \"Field is not visible.\",\n\n E241 => \"'?.' can only be used with nullable values.\",\n\n\n\n E301 => \"Functions cannot be used as types.\",\n\n E302 => \"Nullable cannot be applied multiple times.\",\n\n E303 => \"'value' modifier cannot be applied to interfaces.\",\n\n E304 => \"Type does not take type arguments.\",\n\n E305 => \"Can't define main multiple times.\",\n", "file_path": "crates/error/src/kinds.rs", "rank": 46, "score": 89107.31799043245 }, { "content": " // Nullable cannot be applied multiple times\n\n E302,\n\n // 'value' modifier cannot be applied to interfaces\n\n E303,\n\n // Type does not take type arguments\n\n E304,\n\n // Can't define main multiple times\n\n E305,\n\n // Interface already defined for type\n\n E306,\n\n // Only interfaces can be implemented\n\n E307,\n\n // Cannot return a weak reference\n\n E308,\n\n // Cannot have uninitialized members after constructor\n\n E309(Vec<SmolStr>),\n\n // Body type does not match function return type\n\n E310 {\n\n expected: String,\n\n was: String,\n", "file_path": "crates/error/src/kinds.rs", "rank": 47, "score": 89106.23978432492 }, { "content": " E200(&'static str),\n\n // Mismatched types on assignment\n\n E201,\n\n // No implementation of operators\n\n E202,\n\n // Cannot call methods in constructors until all ADT members are initialized\n\n E203,\n\n // Fields cannot be called\n\n E204,\n\n // This variable may not be captured\n\n E205,\n\n // Undefined variable\n\n E206(SmolStr),\n\n // Break is only allowed in loops\n\n E207,\n\n // Cannot redefine variable in same scope\n\n E208(SmolStr),\n\n // Break and for must have same type\n\n E209 {\n\n expected: String,\n", "file_path": "crates/error/src/kinds.rs", "rank": 48, "score": 89104.08365982727 }, { "content": " was: usize,\n\n },\n\n // Call argument was the wrong type\n\n E218 {\n\n expected: String,\n\n was: String,\n\n },\n\n // No matching constructor found for arguments\n\n E219,\n\n // (If, For) condition must be a boolean\n\n E220,\n\n // Cannot get ADT method\n\n E221,\n\n // Cannot get uninitialized ADT member\n\n E222,\n\n // Unknown enum case\n\n E223,\n\n // Static access is only supported on enum types\n\n E224,\n\n // Static access is not supported on values\n", "file_path": "crates/error/src/kinds.rs", "rank": 49, "score": 89103.83343151766 }, { "content": " E206(name) => format!(\"Variable '{}' is not defined\", name),\n\n E208(name) => format!(\"Cannot redefine variable '{}' in the same scope.\", name),\n\n E209 { expected, was } => format!(\n\n \"Break expressions and for body must have same type (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n E212 { expected, was } => format!(\n\n \"Return expression in function has wrong type (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n E215(thing) => format!(\"'{}' cannot be called.\", thing),\n\n E216 { expected, was } => format!(\n\n \"Incorrect amount of function arguments. (Expected {}; got {}).\",\n\n expected, was\n\n ),\n\n E218 { expected, was } => format!(\n\n \"Call argument is the wrong type (Expected {}, was {}).\",\n\n expected, was\n\n ),\n\n E230(ty) => format!(\"Cannot assign type '{}' to a variable.\", ty),\n", "file_path": "crates/error/src/kinds.rs", "rank": 50, "score": 89103.04126503743 }, { "content": " was: String,\n\n },\n\n // Unknown field or method\n\n E210,\n\n // Can only call generic methods directly\n\n E211,\n\n // Return expr was different than function ret type\n\n E212 {\n\n expected: String,\n\n was: String,\n\n },\n\n // Cannot have type args on local variable\n\n E213,\n\n // Cannot infer types\n\n E214,\n\n // Cannot call\n\n E215(String),\n\n // Incorrect function parameter count\n\n E216 {\n\n expected: usize,\n", "file_path": "crates/error/src/kinds.rs", "rank": 51, "score": 89103.02643037315 }, { "content": "\n\n E101 => \"Could not find main function.\",\n\n E102 => \"Unknown module.\",\n\n E103 => \"Unresolved import.\",\n\n\n\n E201 => \"Value is a different type than assignment target.\",\n\n E202 => \"No implementation of operator found for types.\",\n\n E203 => \"Cannot call methods in constructors until all ADT members are initialized.\",\n\n E204 => \"Fields cannot be called.\",\n\n E205 => \"This variable may not be captured (weak reference)\",\n\n E207 => \"Break is only allowed in loops.\",\n\n E210 => \"Unknown field or method.\",\n\n E211 => \"Can only call generic methods directly.\",\n\n E213 => \"Cannot use type arguments on local variables.\",\n\n E214 => \"Cannot infer types (please specify explicitly).\",\n\n E219 => \"No matching constructor found for arguments.\",\n\n E220 => \"Condition must be a boolean.\",\n\n E221 => \"Cannot get ADT method (must be called).\",\n\n E222 => \"Cannot get uninitialized ADT member.\",\n\n E223 => \"Unknown enum case.\",\n", "file_path": "crates/error/src/kinds.rs", "rank": 52, "score": 89101.26789016198 }, { "content": " E225,\n\n // '!' can only be used on boolean values\n\n E227,\n\n // '-' can only be used on signed integers and floats\n\n E228,\n\n // Branches of when must be of same type as the value compared\n\n E229,\n\n // Cannot assign type to a variable\n\n E230(String),\n\n // Unfinished string escape sequence\n\n E231,\n\n // Unknown string escape sequence\n\n E232,\n\n // Numeric literal does not fit into target type.\n\n E233,\n\n // This type cannot be assigned to a field.\n\n E234,\n\n // ADT member cannot be defined twice\n\n E235,\n\n // Cannot have member and method with same name\n", "file_path": "crates/error/src/kinds.rs", "rank": 53, "score": 89099.73460260296 }, { "content": " },\n\n // Cannot infer type of member with default value (specify type explicitly)\n\n E311,\n\n // ADT contains constructors with duplicate signatures\n\n E312,\n\n // Method is not defined in interface\n\n E313,\n\n // Missing methods in interface impl\n\n E314(Vec<SmolStr>),\n\n // Incorrect return type on interface method\n\n E315 {\n\n expected: String,\n\n was: String,\n\n },\n\n // Incorrect parameter type on interface method\n\n E316 {\n\n expected: String,\n\n was: String,\n\n },\n\n // Unknown ADT field for constructor setter\n", "file_path": "crates/error/src/kinds.rs", "rank": 54, "score": 89098.899699959 }, { "content": " E306 => \"Interface already defined for type.\",\n\n E307 => \"Only interfaces can be implemented.\",\n\n E308 => \"Cannot return a weak reference.\",\n\n E311 => \"Cannot infer type of member with default value (specify type explicitly).\",\n\n E312 => \"ADT contains constructors with duplicate signatures.\",\n\n E313 => \"Method is not defined in interface.\",\n\n E317 => \"Unknown ADT field for constructor setter.\",\n\n E318 => \"Cannot have multiple visibilities.\",\n\n E319 => \"Method with same name already defined.\",\n\n E320 => \"Cannot use data cases with enums that have fields.\",\n\n E321 => \"Incorrect amount of type parameters.\",\n\n\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "crates/error/src/kinds.rs", "rank": 55, "score": 89098.7042404938 }, { "content": " self.node_with(SyntaxKind::Initializer, Self::expression);\n\n self.end_node();\n\n }\n\n\n\n pub fn expression(&mut self) {\n\n match self.peek() {\n\n SyntaxKind::LeftBrace => self.block(),\n\n SyntaxKind::If => self.if_expression(),\n\n SyntaxKind::For => self.for_expression(),\n\n SyntaxKind::Return => self.ret_or_break_expr(SyntaxKind::ReturnExpr),\n\n SyntaxKind::Break => self.ret_or_break_expr(SyntaxKind::BreakExpr),\n\n SyntaxKind::When => self.when_expression(),\n\n _ => self.binary(0),\n\n }\n\n }\n\n\n\n fn block(&mut self) {\n\n self.start_node(SyntaxKind::Block);\n\n self.advance(); // Consume '{'\n\n while !self.check(SyntaxKind::RightBrace) && !self.is_at_end() {\n", "file_path": "crates/parser/src/expression.rs", "rank": 56, "score": 88692.65447092906 }, { "content": "use crate::Parser;\n\nuse error::GErr;\n\nuse syntax::kind::SyntaxKind;\n\n\n\nimpl<'p> Parser<'p> {\n\n /// A 'higher' expression is an expression that is only allowed to appear\n\n /// as top-level inside a block.\n\n /// This function can also produce a top-level non-higher expression.\n\n fn higher_expression(&mut self) {\n\n match self.peek() {\n\n SyntaxKind::Var | SyntaxKind::Val => self.variable(),\n\n _ => self.expression(),\n\n }\n\n }\n\n\n\n fn variable(&mut self) {\n\n self.start_node(SyntaxKind::Variable);\n\n self.advance(); // Consume 'var' or 'val'\n\n self.consume(SyntaxKind::Identifier, \"variable name\", \"var/val\");\n\n self.consume(SyntaxKind::Equal, \"'='\", \"variable name\");\n", "file_path": "crates/parser/src/expression.rs", "rank": 57, "score": 88690.83715180366 }, { "content": " self.advance(); // Consume 'when'\n\n self.consume(SyntaxKind::LeftParen, \"'('\", \"'when'\");\n\n self.node_with(SyntaxKind::ExprCondition, Self::expression);\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"when value\");\n\n self.consume(SyntaxKind::LeftBrace, \"'{'\", \"when value\");\n\n\n\n let mut else_branch_found = false;\n\n while !self.matches(SyntaxKind::RightBrace) {\n\n if self.matches(SyntaxKind::Else) {\n\n if else_branch_found {\n\n self.error_at_current(GErr::E007);\n\n }\n\n self.consume(SyntaxKind::Arrow, \"'->'\", \"when condition\");\n\n self.start_node(SyntaxKind::ExprElse);\n\n self.expression();\n\n else_branch_found = true;\n\n } else {\n\n self.start_node(SyntaxKind::WhenBranch);\n\n self.node_with(SyntaxKind::ExprCondition, Self::expression);\n\n self.consume(SyntaxKind::Arrow, \"'->'\", \"when condition\");\n", "file_path": "crates/parser/src/expression.rs", "rank": 58, "score": 88689.77589444116 }, { "content": " self.consume(SyntaxKind::RightParen, \"')'\", \"for\");\n\n self.node_with(SyntaxKind::ExprBody, Self::expression);\n\n if self.matches(SyntaxKind::Else) {\n\n self.node_with(SyntaxKind::ExprElse, Self::expression);\n\n }\n\n\n\n self.end_node();\n\n }\n\n\n\n fn ret_or_break_expr(&mut self, kind: SyntaxKind) {\n\n self.start_node(kind);\n\n self.advance(); // Consume name\n\n if !self.matches_separator() {\n\n self.expression()\n\n }\n\n self.end_node();\n\n }\n\n\n\n fn when_expression(&mut self) {\n\n self.start_node(SyntaxKind::WhenExpr);\n", "file_path": "crates/parser/src/expression.rs", "rank": 59, "score": 88687.36800972182 }, { "content": " }\n\n }\n\n\n\n fn primary(&mut self) {\n\n match self.peek() {\n\n SyntaxKind::False\n\n | SyntaxKind::True\n\n | SyntaxKind::Int\n\n | SyntaxKind::Float\n\n | SyntaxKind::String\n\n | SyntaxKind::Null => {\n\n self.start_node(SyntaxKind::Literal);\n\n self.advance();\n\n self.end_node();\n\n }\n\n SyntaxKind::LeftParen => self.grouping_or_closure(),\n\n SyntaxKind::Identifier => self.identifier(),\n\n _ => self.error_at_current(GErr::E008),\n\n }\n\n }\n", "file_path": "crates/parser/src/expression.rs", "rank": 60, "score": 88682.33738612807 }, { "content": " self.higher_expression();\n\n }\n\n self.consume(SyntaxKind::RightBrace, \"'}'\", \"block\");\n\n self.end_node();\n\n }\n\n\n\n fn if_expression(&mut self) {\n\n self.start_node(SyntaxKind::IfExpr);\n\n self.advance(); // Consume 'if'\n\n self.consume(SyntaxKind::LeftParen, \"'('\", \"'if'\");\n\n\n\n self.node_with(SyntaxKind::ExprCondition, Self::expression);\n\n\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"if condition\");\n\n self.node_with(SyntaxKind::ExprBody, Self::expression);\n\n\n\n if self.matches(SyntaxKind::Else) {\n\n self.node_with(SyntaxKind::ExprElse, Self::expression);\n\n }\n\n\n", "file_path": "crates/parser/src/expression.rs", "rank": 61, "score": 88681.85779554266 }, { "content": " self.end_node();\n\n }\n\n\n\n fn for_expression(&mut self) {\n\n self.start_node(SyntaxKind::ForExpr);\n\n self.advance(); // Consume 'for'\n\n self.consume(SyntaxKind::LeftParen, \"'('\", \"'for'\");\n\n\n\n if self.check_next(SyntaxKind::In) {\n\n // for (item in iterator)\n\n self.start_node(SyntaxKind::ForIterCond);\n\n self.consume(SyntaxKind::Identifier, \"item name\", \"'('\");\n\n self.advance(); // Consume the `in`\n\n self.expression();\n\n self.end_node();\n\n } else {\n\n // for (condition)\n\n self.node_with(SyntaxKind::ExprCondition, Self::expression);\n\n }\n\n\n", "file_path": "crates/parser/src/expression.rs", "rank": 62, "score": 88676.85699305992 }, { "content": " self.start_node(SyntaxKind::FunctionSignature);\n\n self.func_parameters();\n\n if self.matches(SyntaxKind::Colon) {\n\n self.type_()\n\n }\n\n self.end_node();\n\n\n\n self.consume(SyntaxKind::Arrow, \"'->'\", \"closure signature\");\n\n self.node_with(SyntaxKind::FunctionBody, Self::expression);\n\n\n\n self.end_node();\n\n }\n\n}\n", "file_path": "crates/parser/src/expression.rs", "rank": 63, "score": 88676.84786803352 }, { "content": " if !self.check(SyntaxKind::RightParen) {\n\n loop {\n\n self.node_with(SyntaxKind::CallArgument, Self::expression);\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"call arguments\");\n\n self.end_node();\n\n }\n\n\n\n SyntaxKind::Dot | SyntaxKind::QuestionDot => {\n\n self.start_node_at(checkpoint, SyntaxKind::Callee);\n\n let kind = match self.peek() {\n\n SyntaxKind::Dot => SyntaxKind::GetExpr,\n\n SyntaxKind::QuestionDot => SyntaxKind::GetNullableExpr,\n\n _ => unreachable!(),\n\n };\n", "file_path": "crates/parser/src/expression.rs", "rank": 64, "score": 88675.53798176783 }, { "content": " self.node_with(SyntaxKind::ExprBody, Self::expression);\n\n }\n\n self.end_node()\n\n }\n\n\n\n self.end_node();\n\n }\n\n\n\n fn binary(&mut self, minimum_binding_power: u8) {\n\n let checkpoint = self.checkpoint();\n\n self.unary();\n\n\n\n while let Some((lbp, rbp)) = self.peek().infix_binding_power() {\n\n if lbp < minimum_binding_power {\n\n return;\n\n }\n\n\n\n self.node_with(SyntaxKind::Operator, |this| {\n\n this.advance();\n\n });\n", "file_path": "crates/parser/src/expression.rs", "rank": 65, "score": 88675.36569719626 }, { "content": "\n\n if (self.check(SyntaxKind::Identifier)\n\n && (self.check_next(SyntaxKind::Colon) || self.check_next(SyntaxKind::Comma)))\n\n || self.check(SyntaxKind::RightParen)\n\n {\n\n self.start_node_at(checkpoint, SyntaxKind::ClosureLiteral);\n\n self.closure()\n\n } else {\n\n self.start_node_at(checkpoint, SyntaxKind::Grouping);\n\n self.grouping()\n\n }\n\n }\n\n\n\n fn grouping(&mut self) {\n\n self.expression();\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"expression\");\n\n self.end_node();\n\n }\n\n\n\n fn closure(&mut self) {\n", "file_path": "crates/parser/src/expression.rs", "rank": 66, "score": 88673.21935258065 }, { "content": "\n\n fn identifier(&mut self) {\n\n self.start_node(SyntaxKind::Ident);\n\n self.advance();\n\n\n\n if self.matches(SyntaxKind::LeftBracket) {\n\n loop {\n\n self.type_();\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n self.consume(SyntaxKind::RightBracket, \"']'\", \"type parameters\");\n\n }\n\n self.end_node()\n\n }\n\n\n\n fn grouping_or_closure(&mut self) {\n\n let checkpoint = self.checkpoint();\n\n self.advance(); // Consume '('\n", "file_path": "crates/parser/src/expression.rs", "rank": 67, "score": 88673.09832326054 }, { "content": " fn call(&mut self) {\n\n let checkpoint = self.checkpoint();\n\n self.primary();\n\n\n\n loop {\n\n match self.peek() {\n\n // Do not allow whitespace before a call's parenthesis -\n\n // this is to prevent groupings or closure literals on the next\n\n // line being incorrectly parsed as a call, for example:\n\n //\n\n // val a = \"hello\"\n\n // (\"in parens\")\n\n //\n\n // This would get parsed as a call `\"hello\"(\"in parens\")` when it should not.\n\n SyntaxKind::LeftParen if !self.last_was_whitespace() => {\n\n self.start_node_at(checkpoint, SyntaxKind::Callee);\n\n self.start_node_at(checkpoint, SyntaxKind::CallExpr);\n\n self.end_node();\n\n\n\n self.advance(); // Consume '('\n", "file_path": "crates/parser/src/expression.rs", "rank": 68, "score": 88670.30192285581 }, { "content": " self.start_node_at(checkpoint, kind);\n\n self.end_node();\n\n\n\n self.advance(); // Consume '.'\n\n self.identifier();\n\n self.end_node();\n\n }\n\n\n\n SyntaxKind::Colon => {\n\n self.start_node_at(checkpoint, SyntaxKind::Callee);\n\n self.start_node_at(checkpoint, SyntaxKind::GetStaticExpr);\n\n self.end_node();\n\n\n\n self.advance(); // Consume ':'\n\n self.consume(SyntaxKind::Identifier, \"property name\", \"':'\");\n\n self.end_node();\n\n }\n\n\n\n _ => break,\n\n }\n", "file_path": "crates/parser/src/expression.rs", "rank": 69, "score": 88668.74247755537 }, { "content": "\n\n self.start_node_at(checkpoint, SyntaxKind::BinaryExpr);\n\n self.binary(rbp);\n\n self.end_node();\n\n }\n\n }\n\n\n\n fn unary(&mut self) {\n\n if let Some(rbp) = self.peek().prefix_binding_power() {\n\n self.start_node(SyntaxKind::PrefixExpr);\n\n self.node_with(SyntaxKind::Operator, |this| {\n\n this.advance();\n\n });\n\n self.binary(rbp);\n\n self.end_node();\n\n } else {\n\n self.call();\n\n }\n\n }\n\n\n", "file_path": "crates/parser/src/expression.rs", "rank": 70, "score": 88668.03893563745 }, { "content": "use crate::Parser;\n\nuse error::GErr;\n\nuse syntax::kind::SyntaxKind;\n\n\n\n// All tokens that indicate that a function has a body (bodies are optional in enum and interface definitions).\n\nstatic START_OF_FN_BODY: [SyntaxKind; 2] = [SyntaxKind::LeftBrace, SyntaxKind::Equal];\n\n\n\n// All tokens that can be modifiers at all.\n\nstatic MODIFIERS: [SyntaxKind; 5] = [\n\n SyntaxKind::Mod,\n\n SyntaxKind::Priv,\n\n SyntaxKind::Extern,\n\n SyntaxKind::Variadic,\n\n SyntaxKind::Value,\n\n];\n\n\n\n// All tokens that can be modifiers on any declaration.\n\nstatic GLOBAL_MODIFIERS: [SyntaxKind; 2] = [SyntaxKind::Mod, SyntaxKind::Priv];\n\n\n\n// All tokens that can be modifiers on a class member.\n", "file_path": "crates/parser/src/declaration.rs", "rank": 71, "score": 88534.61999153015 }, { "content": " self.start_node(SyntaxKind::AdtMember);\n\n self.consume_modifiers();\n\n self.check_mods(&MEMBER_MODIFIERS, \"class member\");\n\n\n\n self.advance(); // Consume 'var' or 'val'\n\n self.consume(SyntaxKind::Identifier, \"variable name\", \"var/val\");\n\n\n\n match self.advance_checked() {\n\n SyntaxKind::Equal => self.node_with(SyntaxKind::Initializer, Self::expression),\n\n\n\n SyntaxKind::Colon => {\n\n self.type_();\n\n if self.matches(SyntaxKind::Equal) {\n\n self.node_with(SyntaxKind::Initializer, Self::expression);\n\n }\n\n }\n\n\n\n _ => self.error_at_current(GErr::E005),\n\n }\n\n\n", "file_path": "crates/parser/src/declaration.rs", "rank": 72, "score": 88532.59956231747 }, { "content": " _ => SyntaxKind::AdtDecl,\n\n };\n\n self.start_node_at(checkpoint, ty);\n\n\n\n match self.advance_checked() {\n\n SyntaxKind::Func => self.function(&FUNC_MODIFIERS),\n\n SyntaxKind::Class => self.generic_adt(CLASS_CONF),\n\n SyntaxKind::Export => self.import_declaration(),\n\n SyntaxKind::Import => self.import_declaration(),\n\n SyntaxKind::Interface => self.generic_adt(IFACE_CONF),\n\n SyntaxKind::Impl => self.iface_impl(),\n\n SyntaxKind::Enum => self.generic_adt(ENUM_CONF),\n\n _ => self.error_at_current(GErr::E002),\n\n }\n\n self.end_node();\n\n }\n\n\n\n fn function(&mut self, mods: &'static [SyntaxKind]) {\n\n self.function_(mods, false)\n\n }\n", "file_path": "crates/parser/src/declaration.rs", "rank": 73, "score": 88531.47763807008 }, { "content": " self.error_at_current(GErr::E006 {\n\n modifier: format!(\"{:?}\", mod_),\n\n on: name,\n\n })\n\n }\n\n }\n\n\n\n /// Reads a type name.\n\n pub fn type_(&mut self) {\n\n let check = self.checkpoint();\n\n self.start_node(SyntaxKind::Type);\n\n let token = self.advance();\n\n match token.kind {\n\n SyntaxKind::Identifier => {\n\n if self.matches(SyntaxKind::LeftBracket) {\n\n loop {\n\n self.type_();\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n", "file_path": "crates/parser/src/declaration.rs", "rank": 74, "score": 88530.16095513519 }, { "content": " // Peeks past modifiers by advancing until not looking at a\n\n // modifier, then restoring state and returning the first non-modifier.\n\n fn peek_past_modifiers(&mut self) -> SyntaxKind {\n\n self.modifiers.clear();\n\n self.source.save();\n\n while MODIFIERS.contains(&self.peek()) {\n\n self.source.next();\n\n }\n\n let res = self.peek();\n\n self.source.restore();\n\n res\n\n }\n\n\n\n fn check_mods(&mut self, allowed: &'static [SyntaxKind], name: &'static str) {\n\n for mod_ in self\n\n .modifiers\n\n .clone()\n\n .iter()\n\n .filter(|m| !allowed.contains(&m) && !GLOBAL_MODIFIERS.contains(&m))\n\n {\n", "file_path": "crates/parser/src/declaration.rs", "rank": 75, "score": 88529.15691057163 }, { "content": "\n\n fn function_(&mut self, mods: &'static [SyntaxKind], force_ext: bool) {\n\n let is_extern = force_ext\n\n || self\n\n .modifiers\n\n .iter()\n\n .any(|kind| *kind == SyntaxKind::Extern);\n\n self.func_signature(mods);\n\n\n\n if !is_extern {\n\n self.start_node(SyntaxKind::FunctionBody);\n\n if !self.check(SyntaxKind::LeftBrace) {\n\n self.consume(\n\n SyntaxKind::Equal,\n\n \"start of block or '='\",\n\n \"function signature\",\n\n );\n\n }\n\n self.expression();\n\n self.end_node();\n", "file_path": "crates/parser/src/declaration.rs", "rank": 76, "score": 88526.77599633983 }, { "content": "static MEMBER_MODIFIERS: [SyntaxKind; 0] = [];\n\n// All tokens that can be modifiers on a method.\n\nstatic METHOD_MODIFIERS: [SyntaxKind; 0] = [];\n\n// All tokens that can be modifiers on a constructor.\n\nstatic CONSTRUCTOR_MODIFIERS: [SyntaxKind; 0] = [];\n\n\n\n// All tokens that can be modifiers on a function.\n\nstatic FUNC_MODIFIERS: [SyntaxKind; 2] = [SyntaxKind::Extern, SyntaxKind::Variadic];\n\n// All tokens that can be modifiers on an import declaration.\n\nstatic IMPORT_MODIFIERS: [SyntaxKind; 0] = [];\n\n\n\nimpl<'p> Parser<'p> {\n\n pub fn declaration(&mut self) {\n\n let checkpoint = self.checkpoint();\n\n self.consume_modifiers();\n\n\n\n let ty = match self.peek() {\n\n SyntaxKind::Func => SyntaxKind::FunctionDecl,\n\n SyntaxKind::Import | SyntaxKind::Export => SyntaxKind::ImportDecl,\n\n SyntaxKind::Impl => SyntaxKind::ImplDecl,\n", "file_path": "crates/parser/src/declaration.rs", "rank": 77, "score": 88526.14367555265 }, { "content": " match self.peek() {\n\n SyntaxKind::Func => self.method(false),\n\n _ => self.error_at_current(GErr::E004),\n\n }\n\n }\n\n self.consume(SyntaxKind::RightBrace, \"'}'\", \"impl body\");\n\n }\n\n\n\n // Reads an identifier followed by optional generic type parameters.\n\n fn generic_ident(&mut self, after: &'static str) {\n\n self.start_node(SyntaxKind::Ident);\n\n self.consume(SyntaxKind::Identifier, \"a name\", after);\n\n if self.matches(SyntaxKind::LeftBracket) {\n\n while self.check(SyntaxKind::Identifier) {\n\n self.start_node(SyntaxKind::TypeParameter);\n\n self.advance();\n\n if self.matches(SyntaxKind::Colon) {\n\n self.type_();\n\n }\n\n self.end_node();\n", "file_path": "crates/parser/src/declaration.rs", "rank": 78, "score": 88525.3422825022 }, { "content": " }\n\n self.node_with(SyntaxKind::FunctionBody, Self::expression);\n\n }\n\n }\n\n\n\n fn enum_case(&mut self) {\n\n self.start_node(SyntaxKind::EnumCase);\n\n\n\n if self.peek_next() == SyntaxKind::LeftBrace {\n\n self.generic_adt(CASE_CONF);\n\n } else {\n\n self.generic_ident(\"<internal error>\");\n\n\n\n if self.matches(SyntaxKind::LeftParen) {\n\n while !self.check(SyntaxKind::RightParen) && !self.is_at_end() {\n\n self.consume_modifiers();\n\n\n\n self.start_node(SyntaxKind::Variable);\n\n self.consume_either(\n\n SyntaxKind::Val,\n", "file_path": "crates/parser/src/declaration.rs", "rank": 79, "score": 88524.86580466597 }, { "content": " };\n\n }\n\n\n\n fn func_signature(&mut self, mods: &'static [SyntaxKind]) {\n\n self.start_node(SyntaxKind::FunctionSignature);\n\n self.check_mods(&mods, \"function\");\n\n self.generic_ident(\"'func'\");\n\n self.consume(SyntaxKind::LeftParen, \"'('\", \"function name\");\n\n self.func_parameters();\n\n if self.matches(SyntaxKind::Arrow) {\n\n self.type_()\n\n }\n\n self.end_node();\n\n }\n\n\n\n pub fn func_parameters(&mut self) {\n\n if !self.check(SyntaxKind::RightParen) {\n\n loop {\n\n self.start_node(SyntaxKind::Parameter);\n\n self.consume(SyntaxKind::Identifier, \"parameter name\", \"left parenthesis\");\n", "file_path": "crates/parser/src/declaration.rs", "rank": 80, "score": 88523.48833080917 }, { "content": " SyntaxKind::Construct if conf.has_constructors => self.constructor(),\n\n SyntaxKind::Func => self.method(conf.force_extern),\n\n SyntaxKind::Identifier if conf.has_cases => self.enum_case(),\n\n _ => self.error_at_current(GErr::E004),\n\n }\n\n }\n\n\n\n self.consume(SyntaxKind::RightBrace, \"'}'\", \"body\");\n\n }\n\n\n\n fn method(&mut self, force_extern: bool) {\n\n self.start_node(SyntaxKind::Method);\n\n self.consume_modifiers();\n\n\n\n self.advance(); // Consume 'func'\n\n self.function_(&METHOD_MODIFIERS, force_extern);\n\n self.end_node();\n\n }\n\n\n\n fn adt_member(&mut self) {\n", "file_path": "crates/parser/src/declaration.rs", "rank": 81, "score": 88522.52250820397 }, { "content": " fn import_declaration(&mut self) {\n\n self.check_mods(&IMPORT_MODIFIERS, \"import/export\");\n\n self.consume(SyntaxKind::Identifier, \"path\", \"import/export\");\n\n while self.matches(SyntaxKind::Slash) {\n\n self.consume_either(\n\n SyntaxKind::Identifier,\n\n SyntaxKind::Plus,\n\n \"'+' or path\",\n\n \"'/'\",\n\n );\n\n }\n\n }\n\n\n\n fn iface_impl(&mut self) {\n\n self.node_with(SyntaxKind::Implementing, |this| this.type_());\n\n self.consume(SyntaxKind::For, \"'for'\", \"interface name\");\n\n self.node_with(SyntaxKind::Implementor, |this| this.type_());\n\n self.consume(SyntaxKind::LeftBrace, \"'{'\", \"impl body\");\n\n\n\n while !self.check(SyntaxKind::RightBrace) && !self.is_at_end() {\n", "file_path": "crates/parser/src/declaration.rs", "rank": 82, "score": 88521.63769475209 }, { "content": " self.end_node();\n\n }\n\n\n\n fn constructor(&mut self) {\n\n self.start_node(SyntaxKind::Constructor);\n\n self.consume_modifiers();\n\n self.check_mods(&CONSTRUCTOR_MODIFIERS, \"constructor\");\n\n\n\n self.start_node(SyntaxKind::FunctionSignature);\n\n self.advance(); // Consume 'construct'\n\n self.consume(SyntaxKind::LeftParen, \"'('\", \"'construct'\");\n\n\n\n if !self.check(SyntaxKind::RightParen) {\n\n loop {\n\n self.start_node(SyntaxKind::Parameter);\n\n self.consume(SyntaxKind::Identifier, \"parameter name\", \"'construct'\");\n\n if self.matches(SyntaxKind::Colon) {\n\n self.type_()\n\n }\n\n self.end_node();\n", "file_path": "crates/parser/src/declaration.rs", "rank": 83, "score": 88519.12728957288 }, { "content": " self.consume(SyntaxKind::Colon, \"':'\", \"parameter name\");\n\n self.type_();\n\n self.end_node();\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n }\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"parameters\");\n\n }\n\n\n\n fn generic_adt(&mut self, conf: ADTConfig) {\n\n self.check_mods(conf.modifiers, conf.name);\n\n self.generic_ident(\"ADT identifier\");\n\n\n\n self.consume(SyntaxKind::LeftBrace, \"'{'\", \"before body\");\n\n\n\n while !self.check(SyntaxKind::RightBrace) && !self.is_at_end() {\n\n match self.peek_past_modifiers() {\n\n SyntaxKind::Var | SyntaxKind::Val if conf.has_members => self.adt_member(),\n", "file_path": "crates/parser/src/declaration.rs", "rank": 84, "score": 88517.59785572544 }, { "content": " if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n self.consume(SyntaxKind::RightBracket, \"']'\", \"type parameters\");\n\n }\n\n self.end_node();\n\n }\n\n\n\n fn consume_modifiers(&mut self) {\n\n self.modifiers.clear();\n\n while MODIFIERS.contains(&self.peek()) {\n\n let modifier = self.peek();\n\n self.modifiers.push(modifier);\n\n self.node_with(SyntaxKind::Modifier, |this| {\n\n this.advance();\n\n });\n\n }\n\n }\n\n\n", "file_path": "crates/parser/src/declaration.rs", "rank": 85, "score": 88516.83484800173 }, { "content": " if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n }\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"parameters\");\n\n self.end_node();\n\n\n\n self.maybe_fn_body();\n\n self.end_node();\n\n }\n\n\n\n fn maybe_fn_body(&mut self) {\n\n if START_OF_FN_BODY.contains(&self.peek()) {\n\n if !self.check(SyntaxKind::LeftBrace) {\n\n self.consume(\n\n SyntaxKind::Equal,\n\n \"start of block or '='\",\n\n \"function signature\",\n\n );\n", "file_path": "crates/parser/src/declaration.rs", "rank": 86, "score": 88515.83925506168 }, { "content": " self.consume(SyntaxKind::RightParen, \"')'\", \"closure parameters\");\n\n if self.matches(SyntaxKind::Colon) {\n\n self.type_()\n\n }\n\n }\n\n\n\n _ => self.error_at_current(GErr::E003),\n\n }\n\n\n\n if self.check(SyntaxKind::QuestionMark) {\n\n self.start_node_at(check, SyntaxKind::Type);\n\n self.end_node();\n\n self.advance();\n\n }\n\n self.end_node();\n\n }\n\n}\n\n\n", "file_path": "crates/parser/src/declaration.rs", "rank": 87, "score": 88515.82133868012 }, { "content": " modifiers: &[],\n\n has_members: false,\n\n has_constructors: false,\n\n has_cases: false,\n\n force_extern: true,\n\n};\n\n\n\nconst ENUM_CONF: ADTConfig = ADTConfig {\n\n name: \"enum\",\n\n modifiers: &[SyntaxKind::Value],\n\n has_members: true,\n\n has_constructors: false,\n\n has_cases: true,\n\n force_extern: false,\n\n};\n\n\n\nconst CASE_CONF: ADTConfig = ADTConfig {\n\n name: \"enum case\",\n\n modifiers: &[],\n\n has_members: true,\n\n has_constructors: true,\n\n has_cases: false,\n\n force_extern: false,\n\n};\n", "file_path": "crates/parser/src/declaration.rs", "rank": 88, "score": 88507.41662104276 }, { "content": " SyntaxKind::Var,\n\n \"'var' or 'val'\",\n\n \"left parenthesis\",\n\n );\n\n self.consume(SyntaxKind::Identifier, \"member name\", \"var/val\");\n\n self.consume(SyntaxKind::Colon, \"':'\", \"member name\");\n\n self.type_();\n\n self.end_node();\n\n\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n self.consume(SyntaxKind::RightParen, \"')'\", \"members\");\n\n }\n\n }\n\n\n\n self.end_node();\n\n }\n\n\n", "file_path": "crates/parser/src/declaration.rs", "rank": 89, "score": 88507.3817445199 }, { "content": " }\n\n self.consume(SyntaxKind::RightBracket, \"']'\", \"type parameters\");\n\n } else if self.matches(SyntaxKind::Colon) {\n\n self.consume(SyntaxKind::Identifier, \"case name\", \"':'\");\n\n }\n\n }\n\n\n\n // Read inner\n\n SyntaxKind::Tilde | SyntaxKind::Star => self.type_(),\n\n\n\n SyntaxKind::LeftParen => {\n\n if !self.check(SyntaxKind::RightParen) {\n\n loop {\n\n self.type_();\n\n if !self.matches(SyntaxKind::Comma) {\n\n break;\n\n }\n\n }\n\n }\n\n\n", "file_path": "crates/parser/src/declaration.rs", "rank": 90, "score": 88503.51492666916 }, { "content": "use smallvec::SmallVec;\n\nuse smol_str::SmolStr;\n\nuse std::{fmt, fmt::Formatter, iter, ops::Range, rc::Rc};\n\nuse syntax::kind::SyntaxKind;\n\n\n\n#[derive(Clone, Hash, PartialEq, Eq)]\n\npub enum NodeOrToken {\n\n Node(Node),\n\n Token(Token),\n\n}\n\n\n\nimpl NodeOrToken {\n\n pub fn into_node(self) -> Option<Node> {\n\n match self {\n\n Self::Node(n) => Some(n),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn into_token(self) -> Option<Token> {\n", "file_path": "crates/parser/src/nodes.rs", "rank": 91, "score": 87285.00733796174 }, { "content": " NodeOrToken::Node(self.clone()).debug_fmt(f, 0)\n\n }\n\n}\n\n\n\nimpl PartialEq for Node {\n\n fn eq(&self, other: &Self) -> bool {\n\n Rc::ptr_eq(&self.children, &other.children)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq)]\n\npub struct Token {\n\n text: SmolStr,\n\n kind: SyntaxKind,\n\n}\n\n\n\nimpl Token {\n\n pub fn text(&self) -> &SmolStr {\n\n &self.text\n\n }\n", "file_path": "crates/parser/src/nodes.rs", "rank": 92, "score": 87280.99753877234 }, { "content": "\n\n pub fn dummy() -> Self {\n\n Self {\n\n children: Rc::new(NodeVec::new()),\n\n kind: SyntaxKind::EndOfFile,\n\n span: 0..0,\n\n }\n\n }\n\n\n\n pub fn new(children: Rc<NodeVec>, kind: SyntaxKind, span: Range<u32>) -> Self {\n\n Self {\n\n children,\n\n kind,\n\n span,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Node {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n", "file_path": "crates/parser/src/nodes.rs", "rank": 93, "score": 87273.71121411127 }, { "content": " match self.children_with_tokens().next()? {\n\n NodeOrToken::Token(t) => Some(t),\n\n NodeOrToken::Node(n) => n.first_token_nest(),\n\n }\n\n }\n\n\n\n pub fn last_token(&self) -> Option<Token> {\n\n self.children_with_tokens()\n\n .rev()\n\n .filter_map(NodeOrToken::into_token)\n\n .next()\n\n }\n\n\n\n pub fn kind(&self) -> SyntaxKind {\n\n self.kind\n\n }\n\n\n\n pub fn text_range(&self) -> Range<u32> {\n\n self.span.clone()\n\n }\n", "file_path": "crates/parser/src/nodes.rs", "rank": 94, "score": 87272.60341933105 }, { "content": " pub fn kind(&self) -> SyntaxKind {\n\n match self {\n\n NodeOrToken::Node(n) => n.kind,\n\n NodeOrToken::Token(t) => t.kind,\n\n }\n\n }\n\n\n\n fn debug_fmt(&self, f: &mut Formatter<'_>, indent_size: usize) -> fmt::Result {\n\n let indent = iter::repeat(' ').take(indent_size).collect::<String>();\n\n write!(f, \"{}{:?}\", indent, self.kind())?;\n\n match self {\n\n NodeOrToken::Node(n) => {\n\n writeln!(f, \" @ {:?}\", n.span)?;\n\n for child in n.children.iter() {\n\n child.debug_fmt(f, indent_size + 2)?;\n\n }\n\n Ok(())\n\n }\n\n NodeOrToken::Token(t) => writeln!(f, \" {:?}\", t.text),\n\n }\n", "file_path": "crates/parser/src/nodes.rs", "rank": 95, "score": 87271.80260428572 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Debug for NodeOrToken {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.debug_fmt(f, 0)\n\n }\n\n}\n\n\n\npub(crate) type NodeVec = SmallVec<[NodeOrToken; 5]>;\n\n\n\n#[derive(Clone, Hash, Eq)]\n\n#[allow(clippy::derive_hash_xor_eq)]\n\npub struct Node {\n\n children: Rc<NodeVec>,\n\n kind: SyntaxKind,\n\n span: Range<u32>,\n\n}\n\n\n\nimpl Node {\n", "file_path": "crates/parser/src/nodes.rs", "rank": 96, "score": 87271.44437739771 }, { "content": " match self {\n\n Self::Token(t) => Some(t),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn as_node(&self) -> Option<&Node> {\n\n match self {\n\n Self::Node(n) => Some(n),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn as_token(&self) -> Option<&Token> {\n\n match self {\n\n Self::Token(t) => Some(t),\n\n _ => None,\n\n }\n\n }\n\n\n", "file_path": "crates/parser/src/nodes.rs", "rank": 97, "score": 87270.8191700788 }, { "content": "\n\n pub fn kind(&self) -> SyntaxKind {\n\n self.kind\n\n }\n\n\n\n pub fn new(kind: SyntaxKind, text: SmolStr) -> Self {\n\n Self { text, kind }\n\n }\n\n}\n", "file_path": "crates/parser/src/nodes.rs", "rank": 98, "score": 87269.93246461169 }, { "content": " pub fn children(&self) -> impl Iterator<Item = Node> + '_ {\n\n self.children_with_tokens()\n\n .filter_map(NodeOrToken::into_node)\n\n }\n\n\n\n pub fn children_with_tokens(&self) -> impl DoubleEndedIterator<Item = NodeOrToken> + '_ {\n\n self.children.iter().cloned()\n\n }\n\n\n\n pub fn first_child(&self) -> Option<Node> {\n\n self.children().next()\n\n }\n\n\n\n pub fn first_token(&self) -> Option<Token> {\n\n self.children_with_tokens()\n\n .filter_map(NodeOrToken::into_token)\n\n .next()\n\n }\n\n\n\n pub fn first_token_nest(&self) -> Option<Token> {\n", "file_path": "crates/parser/src/nodes.rs", "rank": 99, "score": 87263.92440852687 } ]
Rust
src/api.rs
sinsoku/miteras-cli
2dfca1619353bfb35992de7e9ae8e9878a89062c
use crate::config::Config; use chrono::prelude::*; #[cfg(test)] use mockito; use reqwest::blocking::{Client, Response}; use reqwest::header; use scraper::{Html, Selector}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; static APP_USER_AGENT: &str = "miteras-cli"; pub struct Api { config: Config, client: Client, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] struct ClockInParams { clock_in_condition: HashMap<String, i32>, daily_place_evidence: HashMap<String, i32>, work_date_string: String, enable_break_time: bool, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] struct ClockOutParams { clock_out_condition: HashMap<String, i32>, daily_place_evidence: HashMap<String, i32>, work_date_string: String, stamp_break_start: String, stamp_break_end: String, updated_date_string: String, } fn condition_value(condition: &str) -> i32 { match condition { "best" => 1, "good" => 2, "normal" => 3, "bad" => 4, _ => -1, } } fn work_date_string() -> String { let today = Local::today(); format!("{}-{}-{}", today.year(), today.month(), today.day()) } fn parse_csrf(body: String) -> String { let fragment = Html::parse_fragment(&body); let selector = Selector::parse("meta[name='_csrf'], input[name='_csrf']").unwrap(); let tag = fragment.select(&selector).next().unwrap().value(); let attr = if tag.name() == "meta" { "content" } else { "value" }; tag.attr(attr).unwrap().to_string() } fn parse_updated_date_string(body: String) -> String { let fragment = Html::parse_fragment(&body); let selector = Selector::parse("#daily-attendance").unwrap(); let tag = fragment.select(&selector).next().unwrap().value(); tag.attr("data-updated-date").unwrap().to_string() } impl Api { pub fn new(config: &Config) -> Api { let conf = Config::new( (*config.org).to_string(), (*config.username).to_string(), (*config.password).to_string(), ); let client = Client::builder() .cookie_store(true) .user_agent(APP_USER_AGENT) .build() .unwrap(); Api { config: conf, client: client, } } pub fn login(&self) -> Result<Response, reqwest::Error> { let login_url = self.build_url("login"); let login_res = self.client.get(&login_url).send().unwrap(); let csrf = parse_csrf(login_res.text().unwrap()); let auth_url = self.build_url("auth"); let mut params: HashMap<&str, &str> = HashMap::new(); params.insert("username", &self.config.username); params.insert("password", &self.config.password); params.insert("_csrf", &csrf); self.client .post(&auth_url) .form(&params) .header(header::REFERER, login_url) .send() } pub fn clock_in(&self, condition: &str) -> Result<Response, reqwest::Error> { let auth_res = self.login().unwrap(); let csrf = parse_csrf(auth_res.text().unwrap()); let cico_url = self.build_url("cico"); let url = self.build_url("submitClockIn"); let mut clock_in_condition = HashMap::new(); clock_in_condition.insert("condition".to_string(), condition_value(condition)); let params = ClockInParams { clock_in_condition: clock_in_condition, daily_place_evidence: HashMap::new(), work_date_string: work_date_string(), enable_break_time: false, }; self.client .post(&url) .json(&params) .header("X-CSRF-TOKEN", csrf) .header(header::REFERER, cico_url) .send() } pub fn clock_out(&self, condition: &str) -> Result<Response, reqwest::Error> { let auth_res = self.login().unwrap(); let auth_body = auth_res.text().unwrap(); let csrf = parse_csrf(auth_body.clone()); let updated_date_string = parse_updated_date_string(auth_body); let cico_url = self.build_url("cico"); let url = self.build_url("submitClockOut"); let mut clock_out_condition = HashMap::new(); clock_out_condition.insert("condition".to_string(), condition_value(condition)); let params = ClockOutParams { clock_out_condition: clock_out_condition, daily_place_evidence: HashMap::new(), work_date_string: work_date_string(), stamp_break_start: "".to_string(), stamp_break_end: "".to_string(), updated_date_string: updated_date_string, }; self.client .post(&url) .json(&params) .header("X-CSRF-TOKEN", csrf) .header(header::REFERER, cico_url) .send() } fn build_url(&self, path: &str) -> String { #[cfg(not(test))] let endpoint = "https://kintai.miteras.jp"; #[cfg(test)] let endpoint = &mockito::server_url(); format!("{}/{}/{}", endpoint, self.config.org, path) } } #[cfg(test)] mod tests { use super::condition_value; #[test] fn str_to_num() { assert_eq!(1, condition_value("best")); assert_eq!(2, condition_value("good")); assert_eq!(3, condition_value("normal")); assert_eq!(4, condition_value("bad")); } }
use crate::config::Config; use chrono::prelude::*; #[cfg(test)] use mockito; use reqwest::blocking::{Client, Response}; use reqwest::header; use scraper::{Html, Selector}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; static APP_USER_AGENT: &str = "miteras-cli"; pub struct Api { config: Config, client: Client, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] struct ClockInParams { clock_in_condition: HashMap<String, i32>, daily_place_evidence: HashMap<String, i32>, work_date_string: String, enable_break_time: bool, } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] struct ClockOutParams { clock_out_condition: HashMap<String, i32>, daily_place_evidence: HashMap<String, i32>, work_date_string: String, stamp_break_start: String, stamp_break_end: String, updated_date_string: String, } fn condition_value(condition: &str) -> i32 { match condition { "best" => 1, "good" => 2, "normal" => 3, "bad" => 4, _ => -1, } } fn work_date_string() -> String { let today = Local::today(); format!("{}-{}-{}", today.year(), today.month(), today.day()) } fn parse_csrf(body: String) -> String { let fragment = Html::parse_fragment(&body); let selector = Selector::parse("meta[name='_csrf'], input[name='_csrf']").unwrap(); let tag = fragment.select(&selector).next().unwrap().value(); let attr = if tag.name() == "meta" { "content" } else { "value" }; tag.attr(attr).unwrap().to_string() } fn parse_updated_date_string(body: String) -> String { let fragment = Html::parse_fragment(&body); let selector = Selector::parse("#daily-attendance").unwrap(); let tag = fragment.select(&selector).next().unwrap().value(); tag.attr("data-updated-date").unwrap().to_string() } impl Api { pub fn new(config: &Config) -> Api { let conf = Config::new( (*config.org).to_string(), (*config.username).to_string(), (*config.password).to_string(), ); let client = Client::builder() .cookie_store(true) .user_agent(APP_USER_AGENT) .build() .unwrap(); Api { config: conf, client: client, } }
pub fn clock_in(&self, condition: &str) -> Result<Response, reqwest::Error> { let auth_res = self.login().unwrap(); let csrf = parse_csrf(auth_res.text().unwrap()); let cico_url = self.build_url("cico"); let url = self.build_url("submitClockIn"); let mut clock_in_condition = HashMap::new(); clock_in_condition.insert("condition".to_string(), condition_value(condition)); let params = ClockInParams { clock_in_condition: clock_in_condition, daily_place_evidence: HashMap::new(), work_date_string: work_date_string(), enable_break_time: false, }; self.client .post(&url) .json(&params) .header("X-CSRF-TOKEN", csrf) .header(header::REFERER, cico_url) .send() } pub fn clock_out(&self, condition: &str) -> Result<Response, reqwest::Error> { let auth_res = self.login().unwrap(); let auth_body = auth_res.text().unwrap(); let csrf = parse_csrf(auth_body.clone()); let updated_date_string = parse_updated_date_string(auth_body); let cico_url = self.build_url("cico"); let url = self.build_url("submitClockOut"); let mut clock_out_condition = HashMap::new(); clock_out_condition.insert("condition".to_string(), condition_value(condition)); let params = ClockOutParams { clock_out_condition: clock_out_condition, daily_place_evidence: HashMap::new(), work_date_string: work_date_string(), stamp_break_start: "".to_string(), stamp_break_end: "".to_string(), updated_date_string: updated_date_string, }; self.client .post(&url) .json(&params) .header("X-CSRF-TOKEN", csrf) .header(header::REFERER, cico_url) .send() } fn build_url(&self, path: &str) -> String { #[cfg(not(test))] let endpoint = "https://kintai.miteras.jp"; #[cfg(test)] let endpoint = &mockito::server_url(); format!("{}/{}/{}", endpoint, self.config.org, path) } } #[cfg(test)] mod tests { use super::condition_value; #[test] fn str_to_num() { assert_eq!(1, condition_value("best")); assert_eq!(2, condition_value("good")); assert_eq!(3, condition_value("normal")); assert_eq!(4, condition_value("bad")); } }
pub fn login(&self) -> Result<Response, reqwest::Error> { let login_url = self.build_url("login"); let login_res = self.client.get(&login_url).send().unwrap(); let csrf = parse_csrf(login_res.text().unwrap()); let auth_url = self.build_url("auth"); let mut params: HashMap<&str, &str> = HashMap::new(); params.insert("username", &self.config.username); params.insert("password", &self.config.password); params.insert("_csrf", &csrf); self.client .post(&auth_url) .form(&params) .header(header::REFERER, login_url) .send() }
function_block-full_function
[ { "content": "pub fn build_cli() -> App<'static, 'static> {\n\n app_from_crate!()\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"login\").about(\"Authenticate to MITERAS and save credentials\"),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"clock-in\")\n\n .about(\"Clock in with today's condition\")\n\n .arg(\n\n Arg::from_usage(\"[condition] 'Specify your condition'\")\n\n .takes_value(true)\n\n .possible_values(&[\"best\", \"good\", \"normal\", \"bad\"])\n\n .default_value(\"good\"),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"clock-out\")\n\n .about(\"Clock out with today's condition\")\n\n .arg(\n\n Arg::from_usage(\"[condition] 'Specify your condition'\")\n\n .takes_value(true)\n\n .possible_values(&[\"best\", \"good\", \"normal\", \"bad\"])\n\n .default_value(\"good\"),\n\n ),\n\n )\n\n}\n", "file_path": "src/cli.rs", "rank": 1, "score": 102780.16932900688 }, { "content": "pub fn build_app() -> App<'static, 'static> {\n\n cli::build_cli()\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 2, "score": 102780.16932900688 }, { "content": "pub fn clock_in<W: Write>(matches: &ArgMatches, mut writer: W) {\n\n let condition = matches.value_of(\"condition\").unwrap();\n\n let config = Config::load().unwrap();\n\n let api = Api::new(&config);\n\n\n\n let res = api.clock_in(condition).unwrap();\n\n let json: Value = serde_json::from_str(&res.text().unwrap()).unwrap();\n\n if json[\"returnValue\"] == \"Success\" {\n\n let clock_time = json[\"clockTime\"].as_str().unwrap();\n\n write!(writer, \"clock-in at {}\\n\", clock_time).unwrap();\n\n } else {\n\n write!(writer, \"clock-in failed.\\n\").unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 6, "score": 58215.12086658311 }, { "content": "pub fn run<W: Write>(matches: ArgMatches, mut writer: W) {\n\n if let Some(_matches) = matches.subcommand_matches(\"login\") {\n\n login(None::<Empty>, &mut writer);\n\n }\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"clock-in\") {\n\n clock_in(matches, &mut writer);\n\n }\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"clock-out\") {\n\n clock_out(matches, &mut writer);\n\n }\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 7, "score": 58215.12086658311 }, { "content": "pub fn clock_out<W: Write>(matches: &ArgMatches, mut writer: W) {\n\n let condition = matches.value_of(\"condition\").unwrap();\n\n let config = Config::load().unwrap();\n\n let api = Api::new(&config);\n\n\n\n let res = api.clock_out(condition).unwrap();\n\n let json: Value = serde_json::from_str(&res.text().unwrap()).unwrap();\n\n if json[\"returnValue\"] == \"Success\" {\n\n let clock_time = json[\"clockTime\"].as_str().unwrap();\n\n write!(writer, \"clock-out at {}\\n\", clock_time).unwrap();\n\n } else {\n\n write!(writer, \"clock-out failed.\\n\").unwrap();\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{build_app, login, run, Config};\n\n use chrono::prelude::*;\n\n use mockito::{mock, Matcher, Mock};\n", "file_path": "src/app.rs", "rank": 8, "score": 58215.12086658311 }, { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return,\n\n Some(outdir) => outdir,\n\n };\n\n let mut app = build_cli();\n\n app.gen_completions(crate_name!(), Shell::Bash, &outdir);\n\n app.gen_completions(crate_name!(), Shell::Zsh, &outdir);\n\n}\n", "file_path": "build.rs", "rank": 11, "score": 49435.26891687152 }, { "content": "fn config_path() -> PathBuf {\n\n let base_path = if cfg!(test) {\n\n std::env::current_dir().unwrap().join(\"tmp\")\n\n } else {\n\n dirs::home_dir().unwrap().join(\".config\")\n\n };\n\n base_path.join(\"miteras.toml\")\n\n}\n\n\n\nimpl Config {\n\n pub fn new(org: String, username: String, password: String) -> Config {\n\n Config {\n\n org: org,\n\n username: username,\n\n password: password,\n\n }\n\n }\n\n\n\n pub fn load() -> Option<Config> {\n\n let config_path = config_path();\n", "file_path": "src/config.rs", "rank": 12, "score": 46623.56411007064 }, { "content": "pub fn login<R: BufRead, W: Write>(mut source: Option<R>, mut writer: W) {\n\n write!(&mut writer, \"Try logging in to MITERAS.\\n\").unwrap();\n\n\n\n let org = read_input(\"Org\", false, source.as_mut(), &mut writer);\n\n let username = read_input(\"Username\", false, source.as_mut(), &mut writer);\n\n let password = read_input(\"Password\", true, source.as_mut(), &mut writer);\n\n let config = Config::new(org, username, password);\n\n\n\n let api = Api::new(&config);\n\n let res = api.login().unwrap();\n\n\n\n if res.url().path().ends_with(\"/cico\") {\n\n config.save().ok();\n\n write!(&mut writer, \"\\nLogin successful.\\n\").unwrap();\n\n } else {\n\n write!(&mut writer, \"\\nLogin failed.\\n\").unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 13, "score": 32651.531107091316 }, { "content": "#[test]\n\nfn no_args() {\n\n let mut cmd = Command::cargo_bin(\"miteras\").unwrap();\n\n cmd.assert()\n\n .failure()\n\n .stderr(predicate::str::contains(CLI_DESCRIBE));\n\n}\n\n\n", "file_path": "tests/main.rs", "rank": 14, "score": 26724.517036705714 }, { "content": "fn main() {\n\n let matches = app::build_app().get_matches();\n\n app::run(matches, io::stdout());\n\n}\n", "file_path": "src/main.rs", "rank": 15, "score": 26724.517036705714 }, { "content": "#[test]\n\nfn with_version_args() {\n\n let mut cmd = Command::cargo_bin(\"miteras\").unwrap();\n\n cmd.arg(\"--version\")\n\n .assert()\n\n .success()\n\n .stdout(format!(\"miteras {}\\n\", crate_version!()));\n\n}\n", "file_path": "tests/main.rs", "rank": 16, "score": 25707.58748623737 }, { "content": "#[test]\n\nfn with_help_args() {\n\n let mut cmd = Command::cargo_bin(\"miteras\").unwrap();\n\n cmd.arg(\"--help\")\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(CLI_DESCRIBE));\n\n}\n\n\n", "file_path": "tests/main.rs", "rank": 17, "score": 25707.58748623737 }, { "content": "#[macro_use]\n\nextern crate clap;\n\n\n\nuse clap::Shell;\n\nuse std::env;\n\n\n\ninclude!(\"src/cli.rs\");\n\n\n", "file_path": "build.rs", "rank": 18, "score": 23009.27258157582 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::fs::{self, File};\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Deserialize, Serialize)]\n\npub struct Config {\n\n pub org: String,\n\n pub username: String,\n\n pub password: String,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 19, "score": 21437.736883667778 }, { "content": " let content: String = fs::read_to_string(config_path).unwrap();\n\n let config = toml::from_str(&content).ok()?;\n\n\n\n Some(config)\n\n }\n\n\n\n pub fn save(&self) -> Result<(), Box<dyn std::error::Error>> {\n\n let config_path = config_path();\n\n let toml = toml::to_string(&self).unwrap();\n\n let mut file = File::create(config_path)?;\n\n write!(file, \"{}\", toml)?;\n\n file.flush()?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{config_path, Config};\n", "file_path": "src/config.rs", "rank": 20, "score": 21436.439666732847 }, { "content": " use std::fs;\n\n\n\n #[test]\n\n fn test_config_path() {\n\n let path = std::env::current_dir().unwrap().join(\"tmp/miteras.toml\");\n\n assert_eq!(path, config_path());\n\n }\n\n\n\n #[test]\n\n fn test_save_and_load() {\n\n let path = config_path();\n\n if path.exists() {\n\n fs::remove_file(path).ok();\n\n }\n\n\n\n let config = Config::new(\n\n \"A123456\".to_string(),\n\n \"sinsoku\".to_string(),\n\n \"pass1234\".to_string(),\n\n );\n", "file_path": "src/config.rs", "rank": 21, "score": 21431.799410314317 }, { "content": "\n\n config.save().ok();\n\n assert_eq!(true, config_path().exists());\n\n\n\n let loaded = Config::load().unwrap();\n\n assert_eq!(\"A123456\", loaded.org);\n\n assert_eq!(\"sinsoku\", loaded.username);\n\n assert_eq!(\"pass1234\", loaded.password);\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 22, "score": 21428.82653487061 }, { "content": "fn read_input<R: BufRead, W: Write>(\n\n label: &str,\n\n hidden: bool,\n\n source: Option<R>,\n\n mut writer: W,\n\n) -> String {\n\n write!(&mut writer, \"{}: \", label).unwrap();\n\n writer.flush().unwrap();\n\n\n\n if hidden {\n\n match source {\n\n Some(reader) => read_password_with_reader(Some(reader)).unwrap(),\n\n None => read_password().unwrap(),\n\n }\n\n } else {\n\n let mut input = String::new();\n\n match source {\n\n Some(mut reader) => {\n\n reader.read_line(&mut input).unwrap();\n\n }\n\n None => {\n\n io::stdin().read_line(&mut input).unwrap();\n\n }\n\n }\n\n input.trim().to_string()\n\n }\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 29, "score": 19436.62117884318 }, { "content": "use crate::api::Api;\n\nuse crate::cli;\n\nuse crate::config::Config;\n\nuse clap::{App, ArgMatches};\n\nuse rpassword::read_password;\n\nuse rpassword::read_password_with_reader;\n\nuse serde_json::Value;\n\nuse std::io::{self, BufRead, Empty, Write};\n\n\n", "file_path": "src/app.rs", "rank": 30, "score": 9.300928029618095 }, { "content": " let params = json!({\n\n \"clockOutCondition\": {\n\n \"condition\": 2\n\n },\n\n \"dailyPlaceEvidence\": {},\n\n \"workDateString\": work_date_string,\n\n \"stampBreakStart\": \"\",\n\n \"stampBreakEnd\": \"\",\n\n \"updatedDateString\": \"2021-07-01 10:00:00.000\"\n\n });\n\n let _m4 = mock(\"POST\", \"/A123456/submitClockOut\")\n\n .match_header(\"content-type\", \"application/json\")\n\n .match_body(Matcher::Json(params))\n\n .with_body(\"{\\\"returnValue\\\":\\\"Success\\\",\\\"atmessage\\\":\\\"Your Attendance request has been sent\\\",\\\"filePath\\\":\\\"../../common/images/ico_condi02.svg\\\",\\\"clockTime\\\":\\\"19:00\\\"}\")\n\n .create();\n\n\n\n let matches = build_app().get_matches_from(vec![\"miteras\", \"clock-out\"]);\n\n let mut writer = Vec::<u8>::new();\n\n run(matches, &mut writer);\n\n\n\n _m1.assert();\n\n _m2.assert();\n\n _m3.assert();\n\n _m4.assert();\n\n assert_eq!(String::from_utf8(writer).unwrap(), \"clock-out at 19:00\\n\");\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 31, "score": 8.763582780879823 }, { "content": " let params = json!({\n\n \"clockInCondition\": {\n\n \"condition\": 2\n\n },\n\n \"dailyPlaceEvidence\": {},\n\n \"workDateString\": work_date_string,\n\n \"enableBreakTime\": false\n\n });\n\n let _m4 = mock(\"POST\", \"/A123456/submitClockIn\")\n\n .match_header(\"content-type\", \"application/json\")\n\n .match_body(Matcher::Json(params))\n\n .with_body(\"{\\\"returnValue\\\":\\\"Success\\\",\\\"filePath\\\":\\\"../../common/images/ico_condi02.svg\\\",\\\"clockTime\\\":\\\"10:00\\\"}\")\n\n .create();\n\n\n\n let matches = build_app().get_matches_from(vec![\"miteras\", \"clock-in\"]);\n\n let mut writer = Vec::<u8>::new();\n\n run(matches, &mut writer);\n\n\n\n _m1.assert();\n\n _m2.assert();\n", "file_path": "src/app.rs", "rank": 32, "score": 8.40827051391953 }, { "content": " _m3.assert();\n\n _m4.assert();\n\n assert_eq!(String::from_utf8(writer).unwrap(), \"clock-in at 10:00\\n\");\n\n }\n\n\n\n #[test]\n\n fn clock_out_no_args() {\n\n let config = Config::new(\n\n \"A123456\".to_string(),\n\n \"sinsoku\".to_string(),\n\n \"pass1234\".to_string(),\n\n );\n\n config.save().ok();\n\n\n\n let _m1 = mock_login();\n\n let _m2 = mock_auth(true);\n\n let _m3 = mock_cico();\n\n\n\n let today = Local::today();\n\n let work_date_string = format!(\"{}-{}-{}\", today.year(), today.month(), today.day());\n", "file_path": "src/app.rs", "rank": 33, "score": 7.985505457958162 }, { "content": " String::from_utf8(writer).unwrap(),\n\n \"Try logging in to MITERAS.\\nOrg: Username: Password: \\nLogin failed.\\n\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn clock_in_no_args() {\n\n let config = Config::new(\n\n \"A123456\".to_string(),\n\n \"sinsoku\".to_string(),\n\n \"pass1234\".to_string(),\n\n );\n\n config.save().ok();\n\n\n\n let _m1 = mock_login();\n\n let _m2 = mock_auth(true);\n\n let _m3 = mock_cico();\n\n\n\n let today = Local::today();\n\n let work_date_string = format!(\"{}-{}-{}\", today.year(), today.month(), today.day());\n", "file_path": "src/app.rs", "rank": 34, "score": 7.940510735024154 }, { "content": "#[macro_use]\n\nextern crate clap;\n\n\n\nextern crate chrono;\n\nextern crate rpassword;\n\nextern crate serde;\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate serde_json;\n\nextern crate toml;\n\n\n\npub mod api;\n\npub mod app;\n\npub mod cli;\n\npub mod config;\n", "file_path": "src/lib.rs", "rank": 35, "score": 6.93551631844891 }, { "content": "extern crate assert_cmd;\n\n#[macro_use]\n\nextern crate clap;\n\nextern crate predicates;\n\n\n\nuse assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::process::Command;\n\n\n\nstatic CLI_DESCRIBE: &str = \"A command-line tool for MITERAS.\";\n\n\n\n#[test]\n", "file_path": "tests/main.rs", "rank": 36, "score": 5.288259748480623 }, { "content": " use std::io::Cursor;\n\n\n\n fn mock_login() -> Mock {\n\n mock(\"GET\", \"/A123456/login\")\n\n .with_body_from_file(\"tests/files/login.html\")\n\n .create()\n\n }\n\n\n\n fn mock_auth(success: bool) -> Mock {\n\n let location = if success {\n\n \"/A123456/cico\"\n\n } else {\n\n \"/A123456/login\"\n\n };\n\n mock(\"POST\", \"/A123456/auth\")\n\n .with_status(302)\n\n .with_header(\"Location\", location)\n\n .create()\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 37, "score": 2.6987219061889824 }, { "content": "extern crate miteras;\n\nuse miteras::app;\n\nuse std::io;\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 2.341640849561399 }, { "content": " String::from_utf8(writer).unwrap(),\n\n \"Try logging in to MITERAS.\\nOrg: Username: Password: \\nLogin successful.\\n\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn login_with_invalid_args() {\n\n let _m1 = mock(\"GET\", \"/A123456/login\")\n\n .with_body_from_file(\"tests/files/login.html\")\n\n .expect(2)\n\n .create();\n\n let _m2 = mock_auth(false);\n\n\n\n let source = Cursor::new(b\"A123456\\nsinsoku\\npassXXX\\n\");\n\n let mut writer = Vec::<u8>::new();\n\n login(Some(source), &mut writer);\n\n\n\n _m1.assert();\n\n _m2.assert();\n\n assert_eq!(\n", "file_path": "src/app.rs", "rank": 39, "score": 2.15564017633341 }, { "content": "![build](https://github.com/sinsoku/miteras-cli/workflows/build/badge.svg)\n\n[![codecov](https://codecov.io/gh/sinsoku/miteras-cli/branch/main/graph/badge.svg)](https://codecov.io/gh/sinsoku/miteras-cli)\n\n\n\n# MITERAS CLI\n\n\n\nAn (**unofficial**) command-line tool for [MITERAS](https://www.persol-pt.co.jp/miteras/).\n\n\n\n## Installation\n\n\n\n### Install from GitHub Releases\n\n\n\nDownload the latest version from [GitHub Releases](https://github.com/sinsoku/miteras-cli/releases).\n\n\n\n### Homebrew on macOS\n\n\n\nIf you are using MacOS, you can install with [Homebrew](https://brew.sh/).\n\n\n\n```console\n\n$ brew tap sinsoku/tap\n\n$ brew install miteras-cli\n\n```\n\n\n\n## Usage\n\n\n\n### login\n\n\n\nSave credentials using the `login` command.\n\n\n\n```console\n\n$ miteras login\n\nTry logging in to MITERAS.\n\n\n\nOrg: A123456\n\nUsername: sinsoku\n\nPassword: ********\n\n\n\nLogin successful.\n\n```\n\n\n\n### clock-in / clock-out\n\n\n\n```console\n\n$ miteras clock-in\n\n$ miteras clock-out\n\n```\n\n\n\n## Contributing\n\n\n\nBug reports and pull requests are welcome on GitHub at https://github.com/sinsoku/miteras-cli/.\n\n\n\n## License\n\n\n\nThe tool is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).\n", "file_path": "README.md", "rank": 40, "score": 1.9897870180164743 }, { "content": "use clap::{App, AppSettings, Arg, SubCommand};\n\n\n", "file_path": "src/cli.rs", "rank": 41, "score": 1.9575085780165118 } ]
Rust
01-running/src/main.rs
davidhollis/rust-gba-scratchpad
2d02fd4839bca33f6ef4167e5f83af3db7c517ff
#![no_std] #![no_main] #![feature(asm)] use core::cmp::{ max, min }; use gba::prelude::*; use gbainputs::{ Key, KeyMonitor }; use gbamath::fixed::{ UFixed8, SFixed8 }; use gbamath::geometry::BoundingBox; use gbamath::Vec2D; #[panic_handler] fn panic(_info: &core::panic::PanicInfo) -> ! { mode3::dma3_clear_to(Color::from_rgb(31,0,0)); loop {} } enum PlayerState { Standing, Walking, } struct Player { old_position: Vec2D<UFixed8>, current_position: Vec2D<UFixed8>, old_velocity: Vec2D<SFixed8>, current_velocity: Vec2D<SFixed8>, collision_box: BoundingBox, box_offset: Vec2D<SFixed8>, inputs: KeyMonitor, state: PlayerState, collision_state: u8, } impl Player { const COLOR: Color = Color::from_rgb(0, 0, 31); const CENTER_COLOR: Color = Color::from_rgb(0, 31, 0); const ANCHOR_COLOR: Color = Color::from_rgb(31, 31, 0); const WALK_SPEED: SFixed8 = SFixed8::constant(2i16); const TOP_COLLISION_LAST_FRAME: u8 = 0b1000_0000; const TOP_COLLISION_THIS_FRAME: u8 = 0b0100_0000; const BOTTOM_COLLISION_LAST_FRAME: u8 = 0b0010_0000; const BOTTOM_COLLISION_THIS_FRAME: u8 = 0b0001_0000; const LEFT_COLLISION_LAST_FRAME: u8 = 0b0000_1000; const LEFT_COLLISION_THIS_FRAME: u8 = 0b0000_0100; const RIGHT_COLLISION_LAST_FRAME: u8 = 0b0000_0010; const RIGHT_COLLISION_THIS_FRAME: u8 = 0b0000_0001; const LAST_FRAME_STATES: u8 = Player::TOP_COLLISION_LAST_FRAME | Player::BOTTOM_COLLISION_LAST_FRAME | Player::LEFT_COLLISION_LAST_FRAME | Player::RIGHT_COLLISION_LAST_FRAME; fn update(&mut self) { self.inputs.update(); match self.state { PlayerState::Standing => { self.current_velocity = VEC2D_ZERO; if self.inputs.is_pressed(Key::LEFT) != self.inputs.is_pressed(Key::RIGHT) { self.state = PlayerState::Walking; } }, PlayerState::Walking => { if self.inputs.is_pressed(Key::LEFT) == self.inputs.is_pressed(Key::RIGHT) { self.state = PlayerState::Standing; self.current_velocity = VEC2D_ZERO; } else if self.inputs.is_pressed(Key::RIGHT) { if self.collision_state & Player::RIGHT_COLLISION_THIS_FRAME > 0 { self.current_velocity.x = SFixed8::ZERO; } else { self.current_velocity.x = Player::WALK_SPEED; } } else if self.inputs.is_pressed(Key::LEFT) { if self.collision_state & Player::LEFT_COLLISION_THIS_FRAME > 0 { self.current_velocity.x = SFixed8::ZERO; } else { self.current_velocity.x = -Player::WALK_SPEED; } } }, }; self.update_physics(); } fn update_physics(&mut self) { self.old_position = self.current_position; self.old_velocity = self.current_velocity; self.collision_state = (self.collision_state << 1) & Player::LAST_FRAME_STATES; self.current_position.saturating_add_signed_assign(self.current_velocity); self.collision_box.center = self.current_position.saturating_add_signed(self.box_offset); if self.collision_box.left() < LEFT_WALL_X { let new_collision_box_center_x = LEFT_WALL_X + self.collision_box.half_size.x; self.current_position.x = new_collision_box_center_x.saturating_add_signed(-self.box_offset.x); self.collision_box.center.x = new_collision_box_center_x; self.current_velocity.x = max(self.current_velocity.x, SFixed8::ZERO); self.collision_state |= Player::LEFT_COLLISION_THIS_FRAME; } else if self.collision_box.right() > RIGHT_WALL_X { let new_collision_box_center_x = RIGHT_WALL_X - self.collision_box.half_size.x; self.current_position.x = new_collision_box_center_x.saturating_add_signed(-self.box_offset.x); self.collision_box.center.x = new_collision_box_center_x; self.current_velocity.x = min(self.current_velocity.x, SFixed8::ZERO); self.collision_state |= Player::RIGHT_COLLISION_THIS_FRAME; } } #[inline] fn draw(&self) { let left: u16 = self.collision_box.left().into(); let right: u16 = self.collision_box.right().into(); let top: u16 = self.collision_box.top().into(); let bottom: u16 = self.collision_box.bottom().into(); fill_rect(left, right, top, bottom, Player::COLOR); let center_x: u16 = self.collision_box.center.x.into(); let center_y: u16 = self.collision_box.center.y.into(); mode3::bitmap_xy(center_x.into(), center_y.into()).write(Player::CENTER_COLOR); let player_x: u16 = self.current_position.x.into(); let player_y: u16 = self.current_position.y.into(); mode3::bitmap_xy(player_x.into(), player_y.into()).write(Player::ANCHOR_COLOR); } } const BACKGROUND_COLOR: Color = Color::from_rgb(15, 15, 15); const WALL_COLOR: Color = Color::from_rgb(0, 0, 0); const VEC2D_ZERO: Vec2D<SFixed8> = Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }; const LEFT_WALL_X: UFixed8 = UFixed8::constant(20u16); const RIGHT_WALL_X: UFixed8 = UFixed8::constant(219u16); const FLOOR_Y: u16 = 130u16; #[no_mangle] fn main() -> ! { const SETUP_DISPLAY: DisplayControl = DisplayControl::new().with_display_mode(3).with_display_bg2(true); DISPCNT.write(SETUP_DISPLAY); let mut player: Player = Player { old_position: Vec2D { x: UFixed8::from(100u16), y: FLOOR_Y.into() }, current_position: Vec2D { x: UFixed8::from(100u16), y: FLOOR_Y.into() }, old_velocity: Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }, current_velocity: Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }, collision_box: BoundingBox { center: Vec2D { x: UFixed8::ZERO, y: UFixed8::ZERO }, half_size: Vec2D { x: 8u16.into(), y: 16u16.into() }, }, box_offset: Vec2D { x: 8i16.into(), y: (-16i16).into() }, inputs: KeyMonitor::new(), state: PlayerState::Standing, collision_state: 0, }; mode3::dma3_clear_to(WALL_COLOR); loop { spin_until_vdraw(); player.update(); spin_until_vblank(); fill_rect( u16::from(LEFT_WALL_X), u16::from(RIGHT_WALL_X), 0u16, FLOOR_Y, BACKGROUND_COLOR ); player.draw(); } } #[inline] fn fill_rect(left: u16, right: u16, top: u16, bottom: u16, color: Color) { let raw_color: u16 = color.0; let word_count: u16 = right - left + 1; for y in top..=bottom { unsafe { DMA3SAD.write(&raw_color as *const _ as usize); DMA3DAD.write(0x0600_0000usize + ((mode3::WIDTH * (y as usize)) + (left as usize)) * 2usize); DMA3CNT_L.write(word_count); const CTRL: DmaControl = DmaControl::new() .with_dest_addr(DestAddrControl::Increment) .with_src_addr(SrcAddrControl::Fixed) .with_transfer_u32(false) .with_start_time(DmaStartTiming::Immediately) .with_enabled(true); DMA3CNT_H.write(CTRL); asm!( " nop nop ", options(nostack), ); } } } #[inline] fn spin_until_vblank() { while VCOUNT.read() < 160 {} } #[inline] fn spin_until_vdraw() { while VCOUNT.read() >= 160 {} }
#![no_std] #![no_main] #![feature(asm)] use core::cmp::{ max, min }; use gba::prelude::*; use gbainputs::{ Key, KeyMonitor }; use gbamath::fixed::{ UFixed8, SFixed8 }; use gbamath::geometry::BoundingBox; use gbamath::Vec2D; #[panic_handler] fn panic(_info: &core::panic::PanicInfo) -> ! { mode3::dma3_clear_to(Color::from_rgb(31,0,0)); loop {} } enum PlayerState { Standing, Walking, } struct Player { old_position: Vec2D<UFixed8>, current_position: Vec2D<UFixed8>, old_velocity: Vec2D<SFixed8>, current_velocity: Vec2D<SFixed8>, collision_box: BoundingBox, box_offset: Vec2D<SFixed8>, inputs: KeyMonitor, state: PlayerState, collision_state: u8, } impl Player { const COLOR: Color = Color::from_rgb(0, 0, 31); const CENTER_COLOR: Color = Color::from_rgb(0, 31, 0); const ANCHOR_COLOR: Color = Color::from_rgb(31, 31, 0); const WALK_SPEED: SFixed8 = SFixed8::constant(2i16); const TOP_COLLISION_LAST_FRAME: u8 = 0b1000_0000; const TOP_COLLISION_THIS_FRAME: u8 = 0b0100_0000; const BOTTOM_COLLISION_LAST_FRAME: u8 = 0b0010_0000; const BOTTOM_COLLISION_THIS_FRAME: u8 = 0b0001_0000; const LEFT_COLLISION_LAST_FRAME: u8 = 0b0000_1000; const LEFT_COLLISION_THIS_FRAME: u8 = 0b0000_0100; const RIGHT_COLLISION_LAST_FRAME: u8 = 0b0000_0010; const RIGHT_COLLISION_THIS_FRAME: u8 = 0b0000_0001; const LAST_FRAME_STATES: u8 = Player::TOP_COLLISION_LAST_FRAME | Player::BOTTOM_COLLISION_LAST_FRAME | Player::LEFT_COLLISION_LAST_FRAME | Player::RIGHT_COLLISION_LAST_FRAME; fn update(&mut self) { self.inputs.update(); match self.state { PlayerState::Standing => { self.current_velocity = VEC2D_ZERO; if self.inputs.is_pressed(Key::LEFT) != self.inputs.is_pressed(Key::RIGHT) { self.state = PlayerState::Walking; } }, PlayerState::Walking => { if self.inputs.is_pressed(Key::LEFT) == self.inputs.is_pressed(Key::RIGHT) { self.state = PlayerState::Standing;
ode3::WIDTH * (y as usize)) + (left as usize)) * 2usize); DMA3CNT_L.write(word_count); const CTRL: DmaControl = DmaControl::new() .with_dest_addr(DestAddrControl::Increment) .with_src_addr(SrcAddrControl::Fixed) .with_transfer_u32(false) .with_start_time(DmaStartTiming::Immediately) .with_enabled(true); DMA3CNT_H.write(CTRL); asm!( " nop nop ", options(nostack), ); } } } #[inline] fn spin_until_vblank() { while VCOUNT.read() < 160 {} } #[inline] fn spin_until_vdraw() { while VCOUNT.read() >= 160 {} }
self.current_velocity = VEC2D_ZERO; } else if self.inputs.is_pressed(Key::RIGHT) { if self.collision_state & Player::RIGHT_COLLISION_THIS_FRAME > 0 { self.current_velocity.x = SFixed8::ZERO; } else { self.current_velocity.x = Player::WALK_SPEED; } } else if self.inputs.is_pressed(Key::LEFT) { if self.collision_state & Player::LEFT_COLLISION_THIS_FRAME > 0 { self.current_velocity.x = SFixed8::ZERO; } else { self.current_velocity.x = -Player::WALK_SPEED; } } }, }; self.update_physics(); } fn update_physics(&mut self) { self.old_position = self.current_position; self.old_velocity = self.current_velocity; self.collision_state = (self.collision_state << 1) & Player::LAST_FRAME_STATES; self.current_position.saturating_add_signed_assign(self.current_velocity); self.collision_box.center = self.current_position.saturating_add_signed(self.box_offset); if self.collision_box.left() < LEFT_WALL_X { let new_collision_box_center_x = LEFT_WALL_X + self.collision_box.half_size.x; self.current_position.x = new_collision_box_center_x.saturating_add_signed(-self.box_offset.x); self.collision_box.center.x = new_collision_box_center_x; self.current_velocity.x = max(self.current_velocity.x, SFixed8::ZERO); self.collision_state |= Player::LEFT_COLLISION_THIS_FRAME; } else if self.collision_box.right() > RIGHT_WALL_X { let new_collision_box_center_x = RIGHT_WALL_X - self.collision_box.half_size.x; self.current_position.x = new_collision_box_center_x.saturating_add_signed(-self.box_offset.x); self.collision_box.center.x = new_collision_box_center_x; self.current_velocity.x = min(self.current_velocity.x, SFixed8::ZERO); self.collision_state |= Player::RIGHT_COLLISION_THIS_FRAME; } } #[inline] fn draw(&self) { let left: u16 = self.collision_box.left().into(); let right: u16 = self.collision_box.right().into(); let top: u16 = self.collision_box.top().into(); let bottom: u16 = self.collision_box.bottom().into(); fill_rect(left, right, top, bottom, Player::COLOR); let center_x: u16 = self.collision_box.center.x.into(); let center_y: u16 = self.collision_box.center.y.into(); mode3::bitmap_xy(center_x.into(), center_y.into()).write(Player::CENTER_COLOR); let player_x: u16 = self.current_position.x.into(); let player_y: u16 = self.current_position.y.into(); mode3::bitmap_xy(player_x.into(), player_y.into()).write(Player::ANCHOR_COLOR); } } const BACKGROUND_COLOR: Color = Color::from_rgb(15, 15, 15); const WALL_COLOR: Color = Color::from_rgb(0, 0, 0); const VEC2D_ZERO: Vec2D<SFixed8> = Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }; const LEFT_WALL_X: UFixed8 = UFixed8::constant(20u16); const RIGHT_WALL_X: UFixed8 = UFixed8::constant(219u16); const FLOOR_Y: u16 = 130u16; #[no_mangle] fn main() -> ! { const SETUP_DISPLAY: DisplayControl = DisplayControl::new().with_display_mode(3).with_display_bg2(true); DISPCNT.write(SETUP_DISPLAY); let mut player: Player = Player { old_position: Vec2D { x: UFixed8::from(100u16), y: FLOOR_Y.into() }, current_position: Vec2D { x: UFixed8::from(100u16), y: FLOOR_Y.into() }, old_velocity: Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }, current_velocity: Vec2D { x: SFixed8::ZERO, y: SFixed8::ZERO }, collision_box: BoundingBox { center: Vec2D { x: UFixed8::ZERO, y: UFixed8::ZERO }, half_size: Vec2D { x: 8u16.into(), y: 16u16.into() }, }, box_offset: Vec2D { x: 8i16.into(), y: (-16i16).into() }, inputs: KeyMonitor::new(), state: PlayerState::Standing, collision_state: 0, }; mode3::dma3_clear_to(WALL_COLOR); loop { spin_until_vdraw(); player.update(); spin_until_vblank(); fill_rect( u16::from(LEFT_WALL_X), u16::from(RIGHT_WALL_X), 0u16, FLOOR_Y, BACKGROUND_COLOR ); player.draw(); } } #[inline] fn fill_rect(left: u16, right: u16, top: u16, bottom: u16, color: Color) { let raw_color: u16 = color.0; let word_count: u16 = right - left + 1; for y in top..=bottom { unsafe { DMA3SAD.write(&raw_color as *const _ as usize); DMA3DAD.write(0x0600_0000usize + ((m
random
[ { "content": "#[inline]\n\nfn draw_icon(x: usize, y: usize, width: usize, icon: &[Color], enabled: bool) {\n\n for (idx, color) in icon.iter().enumerate() {\n\n let dx = idx % width;\n\n let dy = idx / width;\n\n let modified_color =\n\n if *color == U && !enabled {\n\n W\n\n } else {\n\n *color\n\n };\n\n mode3::bitmap_xy(x + dx, y + dy).write(modified_color);\n\n }\n\n}\n\n\n", "file_path": "00-buttons/src/main.rs", "rank": 3, "score": 33896.93866168019 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n const SETUP_DISPLAY: DisplayControl = DisplayControl::new().with_display_mode(3).with_display_bg2(true);\n\n DISPCNT.write(SETUP_DISPLAY);\n\n\n\n mode3::dma3_clear_to(W);\n\n\n\n let mut keys: Keys = KEYINPUT.read().into();\n\n\n\n loop {\n\n spin_until_vdraw();\n\n spin_until_vblank();\n\n \n\n draw_icon(20, 20, 20, &L_BUTTON_ICON, keys.l());\n\n draw_icon(200, 20, 20, &R_BUTTON_ICON, keys.r());\n\n draw_icon(170, 60, 20, &A_BUTTON_ICON, keys.a());\n\n draw_icon(150, 80, 20, &B_BUTTON_ICON, keys.b());\n\n draw_icon(60, 50, 20, &UP_BUTTON_ICON, keys.up());\n\n draw_icon(40, 70, 20, &LEFT_BUTTON_ICON, keys.left());\n\n draw_icon(80, 70, 20, &RIGHT_BUTTON_ICON, keys.right());\n\n draw_icon(60, 90, 20, &DOWN_BUTTON_ICON, keys.down());\n\n draw_icon(60, 114, 20, &START_BUTTON_ICON, keys.start());\n\n draw_icon(60, 132, 20, &SELECT_BUTTON_ICON, keys.select());\n\n \n\n // read our keys for this frame\n\n keys = KEYINPUT.read().into();\n\n }\n\n}\n", "file_path": "00-buttons/src/main.rs", "rank": 5, "score": 26310.762740938226 }, { "content": "#[inline]\n\nfn spin_until_vdraw() {\n\n while VCOUNT.read() >= 160 {}\n\n}\n\n\n", "file_path": "00-buttons/src/main.rs", "rank": 7, "score": 25289.326001704805 }, { "content": "#[inline]\n\nfn spin_until_vblank() {\n\n while VCOUNT.read() < 160 {}\n\n}\n\n\n", "file_path": "00-buttons/src/main.rs", "rank": 8, "score": 25289.326001704805 }, { "content": "#[panic_handler]\n\nfn panic(_info: &core::panic::PanicInfo) -> ! {\n\n loop {}\n\n}\n\n\n\n// spin_until_* copied from examples. In a real game we'd use interrupts\n", "file_path": "00-buttons/src/main.rs", "rank": 10, "score": 19328.764881713563 }, { "content": "}\n\n\n\npub struct KeyMonitor {\n\n previous: u16,\n\n current: u16,\n\n}\n\n\n\nimpl KeyMonitor {\n\n pub fn new() -> KeyMonitor {\n\n KeyMonitor {\n\n previous: 0,\n\n current: 0,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_pressed(&self, key: Key) -> bool {\n\n self.current & (key as u16) != 0\n\n }\n\n\n", "file_path": "lib/gbainputs/src/lib.rs", "rank": 12, "score": 19323.95828240622 }, { "content": " #[inline]\n\n pub fn was_pressed(&self, key: Key) -> bool {\n\n self.previous & (key as u16) != 0\n\n }\n\n\n\n #[inline]\n\n pub fn is_released(&self, key: Key) -> bool {\n\n self.current & (key as u16) == 0\n\n }\n\n\n\n #[inline]\n\n pub fn was_released(&self, key: Key) -> bool {\n\n self.previous & (key as u16) == 0\n\n }\n\n\n\n #[inline]\n\n pub fn just_pressed(&self, key: Key) -> bool {\n\n self.was_released(key) && self.is_pressed(key)\n\n }\n\n\n", "file_path": "lib/gbainputs/src/lib.rs", "rank": 13, "score": 19322.839069663718 }, { "content": " #[inline]\n\n pub fn just_released(&self, key: Key) -> bool {\n\n self.was_pressed(key) && self.is_released(key)\n\n }\n\n\n\n #[inline]\n\n pub fn held(&self, key: Key) -> bool {\n\n self.was_pressed(key) && self.is_pressed(key)\n\n }\n\n\n\n #[inline]\n\n pub fn update(&mut self) {\n\n self.previous = self.current;\n\n self.current = KEY_REGISTER.read() ^ KEY_MASK;\n\n }\n\n}", "file_path": "lib/gbainputs/src/lib.rs", "rank": 14, "score": 19322.766623156815 }, { "content": "#![no_std]\n\n\n\nuse voladdress::{ Safe, VolAddress };\n\n\n\nconst KEY_REGISTER: VolAddress<u16, Safe, ()> = unsafe { VolAddress::new(0x0400_0130) };\n\nconst KEY_MASK: u16 = 0b11_1111_1111u16;\n\n\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\n#[repr(u16)]\n\npub enum Key {\n\n A = 0b00_0000_0001u16,\n\n B = 0b00_0000_0010u16,\n\n SELECT = 0b00_0000_0100u16,\n\n START = 0b00_0000_1000u16,\n\n RIGHT = 0b00_0001_0000u16,\n\n LEFT = 0b00_0010_0000u16,\n\n UP = 0b00_0100_0000u16,\n\n DOWN = 0b00_1000_0000u16,\n\n R = 0b01_0000_0000u16,\n\n L = 0b10_0000_0000u16,\n", "file_path": "lib/gbainputs/src/lib.rs", "rank": 15, "score": 19322.211272351647 }, { "content": "\n\nimpl Sub for UFixed8 {\n\n type Output = Self;\n\n fn sub(self, rhs: Self) -> Self {\n\n UFixed8(self.0 - rhs.0)\n\n }\n\n}\n\n\n\nimpl Mul for UFixed8 {\n\n type Output = Self;\n\n fn mul(self, rhs: Self) -> Self {\n\n UFixed8((self.0 * rhs.0) >> UFixed8::FRACTION_BITS)\n\n }\n\n}\n\n\n\nimpl Mul<u16> for UFixed8 {\n\n type Output = Self;\n\n fn mul(self, rhs: u16) -> Self {\n\n UFixed8(self.0 * rhs)\n\n }\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 20, "score": 4.671412820488503 }, { "content": " u16::MAX\n\n } else {\n\n 0u16\n\n }\n\n )\n\n }\n\n \n\n\n\n #[inline]\n\n pub fn wrapping_add_signed(self, rhs: SFixed8) -> UFixed8 {\n\n UFixed8(self.0.wrapping_add(rhs.0 as u16))\n\n }\n\n}\n\n\n\nimpl Add for UFixed8 {\n\n type Output = Self;\n\n fn add(self, rhs: Self) -> Self {\n\n UFixed8(self.0 + rhs.0)\n\n }\n\n}\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 21, "score": 4.642173095534904 }, { "content": " SFixed8(self.0 - rhs.0)\n\n }\n\n}\n\n\n\nimpl Mul for SFixed8 {\n\n type Output = Self;\n\n fn mul(self, rhs: Self) -> Self {\n\n SFixed8((self.0 * rhs.0) >> SFixed8::FRACTION_BITS)\n\n }\n\n}\n\n\n\nimpl Mul<i16> for SFixed8 {\n\n type Output = Self;\n\n fn mul(self, rhs: i16) -> Self {\n\n SFixed8(self.0 * rhs)\n\n }\n\n}\n\n\n\nimpl Neg for SFixed8 {\n\n type Output = Self;\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 22, "score": 4.5972812486984695 }, { "content": " pub const SCALE: u16 = 1 << SFixed8::FRACTION_BITS;\n\n\n\n pub const ZERO: SFixed8 = SFixed8(0i16);\n\n\n\n #[inline]\n\n pub const fn constant(v: i16) -> SFixed8 {\n\n SFixed8(v << SFixed8::FRACTION_BITS)\n\n }\n\n}\n\n\n\nimpl Add for SFixed8 {\n\n type Output = Self;\n\n fn add(self, rhs: Self) -> Self {\n\n SFixed8(self.0 + rhs.0)\n\n }\n\n}\n\n\n\nimpl Sub for SFixed8 {\n\n type Output = Self;\n\n fn sub(self, rhs: Self) -> Self {\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 24, "score": 3.92921845084447 }, { "content": " fn add_assign(&mut self, rhs: Self) {\n\n self.x = self.x + rhs.x;\n\n self.y = self.y + rhs.y;\n\n }\n\n}\n\n\n\nimpl<T> Mul<T> for Vec2D<T> where T: Mul<T> + Mul<Output = T> + Copy {\n\n type Output = Self;\n\n fn mul(self, rhs: T) -> Self {\n\n Vec2D {\n\n x: self.x * rhs,\n\n y: self.y * rhs,\n\n }\n\n }\n\n}", "file_path": "lib/gbamath/src/lib.rs", "rank": 26, "score": 3.433432999784305 }, { "content": " fn neg(self) -> Self {\n\n SFixed8(-self.0)\n\n }\n\n}\n\n\n\nimpl From<i16> for SFixed8 {\n\n #[inline]\n\n fn from(v: i16) -> SFixed8 {\n\n SFixed8::constant(v)\n\n }\n\n}\n\n\n\nimpl From<SFixed8> for i16 {\n\n fn from(v: SFixed8) -> i16 {\n\n v.0 >> SFixed8::FRACTION_BITS\n\n }\n\n}", "file_path": "lib/gbamath/src/fixed.rs", "rank": 27, "score": 3.3022442424392704 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn saturating_add_signed_assign(&mut self, rhs: Vec2D<fixed::SFixed8>) {\n\n self.x = self.x.saturating_add_signed(rhs.x);\n\n self.y = self.y.saturating_add_signed(rhs.y);\n\n }\n\n}\n\n\n\nimpl<T> Add for Vec2D<T> where T: Add<T> + Add<Output = T> + Copy {\n\n type Output = Self;\n\n fn add(self, rhs: Self) -> Self {\n\n Vec2D {\n\n x: self.x + rhs.x,\n\n y: self.y + rhs.y,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> AddAssign for Vec2D<T> where T: Add<T> + Add<Output = T> + Copy {\n", "file_path": "lib/gbamath/src/lib.rs", "rank": 28, "score": 3.0383117800198836 }, { "content": "#![no_std]\n\n\n\nuse core::ops::{ Add, AddAssign, Mul };\n\n\n\npub mod fixed;\n\npub mod geometry;\n\n\n\n#[derive(PartialEq, Eq, Clone, Copy)]\n\npub struct Vec2D<T> {\n\n pub x: T,\n\n pub y: T,\n\n}\n\n\n\nimpl Vec2D<fixed::UFixed8> {\n\n #[inline]\n\n pub fn saturating_add_signed(self, rhs: Vec2D<fixed::SFixed8>) -> Self {\n\n Vec2D {\n\n x: self.x.saturating_add_signed(rhs.x),\n\n y: self.y.saturating_add_signed(rhs.y),\n\n }\n", "file_path": "lib/gbamath/src/lib.rs", "rank": 30, "score": 2.5322954890171423 }, { "content": "use crate::fixed::UFixed8;\n\nuse crate::Vec2D;\n\n\n\npub struct BoundingBox {\n\n pub center: Vec2D<UFixed8>,\n\n pub half_size: Vec2D<UFixed8>,\n\n}\n\n\n\nimpl BoundingBox {\n\n pub fn intersects(&self, other: &BoundingBox) -> bool {\n\n ! (\n\n (self.center.x.abs_diff(other.center.x) > self.half_size.x + other.half_size.x) ||\n\n (self.center.y.abs_diff(other.center.y) > self.half_size.y + other.half_size.y)\n\n )\n\n }\n\n\n\n #[inline]\n\n pub fn left(&self) -> UFixed8 {\n\n self.center.x - self.half_size.x\n\n }\n", "file_path": "lib/gbamath/src/geometry.rs", "rank": 31, "score": 2.1948539179354047 }, { "content": "}\n\n\n\nimpl From<u16> for UFixed8 {\n\n #[inline]\n\n fn from(v: u16) -> UFixed8 {\n\n UFixed8::constant(v)\n\n }\n\n}\n\n\n\nimpl From<UFixed8> for u16 {\n\n fn from(v: UFixed8) -> u16 {\n\n v.0 >> UFixed8::FRACTION_BITS\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\npub struct SFixed8(i16);\n\n\n\nimpl SFixed8 {\n\n pub const FRACTION_BITS: u16 = 8;\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 32, "score": 1.9341225515799452 }, { "content": "use core::ops::{ Add, Sub, Mul, Neg };\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\npub struct UFixed8(u16);\n\n\n\nimpl UFixed8 {\n\n pub const FRACTION_BITS: u16 = 8;\n\n\n\n pub const ZERO: UFixed8 = UFixed8(0u16);\n\n\n\n #[inline]\n\n pub const fn constant(v: u16) -> UFixed8 {\n\n UFixed8(v << UFixed8::FRACTION_BITS)\n\n }\n\n\n\n pub fn abs_diff(self, other: UFixed8) -> UFixed8 {\n\n if self < other {\n\n other - self\n\n } else {\n\n self - other\n", "file_path": "lib/gbamath/src/fixed.rs", "rank": 33, "score": 1.7479747157872612 }, { "content": "#![no_std]\n\n#![no_main]\n\n\n\nuse gba::prelude::*;\n\n\n\nconst B: Color = Color::from_rgb(0, 0, 0);\n\nconst W: Color = Color::from_rgb(31, 31, 31);\n\nconst U: Color = Color::from_rgb(0, 0, 31);\n\n\n\n// Icons\n\nconst A_BUTTON_ICON: [Color; 400] = [\n\n W, W, W, W, W, W, W, B, B, B, B, B, B, W, W, W, W, W, W, W,\n\n W, W, W, W, W, B, B, U, U, U, U, U, U, B, B, W, W, W, W, W,\n\n W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, B, B, W, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, B, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, B, W,\n\n W, B, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, B, W,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 34, "score": 1.6732230560875223 }, { "content": "# rust-gba-scratchpad\n\nPlaying around with targeting the gba from rust\n\n\n\n## Milestones\n\n\n\nMy goal with this exercise is to eventually produce a barebones 2d platformer, and in doing so refresh my memory on some aspects of GBA programming and learn others for the first time. The milestones I've set for myself are:\n\n\n\n- [x] `00`: button inputs\n\n - source: [`00-buttons`](00-buttons)\n\n - build artifact: `target/gba/buttons.gba`\n\n- [x] `01`: horizontal running, collision with walls, in a bitmapped graphics mode\n\n - source: [`01-running`](01-running)\n\n - build artifact: `target/gba/running.gba`\n\n- [ ] `02`: use vblank/vcount interrupts to schedule drawing and physics calculations, respectively\n\n- [ ] `03`: add acceleration, jumping, and vertical collisions\n\n- [ ] `04`: switch to a tiled graphics mode with a sprite for the player character\n\n- [ ] `05`: add collisions with tiles\n\n- [ ] `06`: add collisions with sprites\n\n- [ ] `07`: expand the tilemap with a scolling camera\n\n- [ ] `08`: add support for multiple rooms with loading zones\n\n- [ ] `09`: add background music\n\n- [ ] `10`: add sfx\n\n\n\n### Getting binaries\n\n\n\nI'll be creating github releases of each milestone as they're completed\n\n\n\n### Building from source\n\n\n\nIf you'd like to build these examples from source, you'll need to install a specific version of nightly rust (this is due to a bug in the underlying gba library that'll be fixed in the next release):\n\n\n\n```\n\nrustup install nightly-2021-07-13\n\nrustup +nightly-2021-07-13 component add rust-src\n\n```\n\n\n\nThen install the ARM binutils and `gbafix`:\n\n\n\n```\n\n# on OS X\n\nbrew install --cask gcc-arm-embedded\n\ncargo install gbafix\n\n```\n\n\n\nFinally, to build all of the examples in this repo:\n\n\n\n```\n\n./build.sh\n\n```\n\n\n", "file_path": "README.md", "rank": 35, "score": 1.1049441799741797 }, { "content": " B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n];\n\n\n\nconst DOWN_BUTTON_ICON: [Color; 400] = [\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, B,\n\n B, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, B,\n\n B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 36, "score": 0.17829965953485472 }, { "content": "];\n\n\n\nconst UP_BUTTON_ICON: [Color; 400] = [\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, B,\n\n B, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 37, "score": 0.17829965953485472 }, { "content": " W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, B, U, U, U, B, B, B, U, U, U, B, B, B, B, B, U, U, B, W,\n\n W, B, U, U, B, U, U, U, B, U, U, U, U, B, U, U, U, U, B, W,\n\n B, U, U, U, B, U, U, U, U, U, U, U, U, B, U, U, U, U, U, B,\n\n B, U, U, U, U, B, B, B, U, U, U, U, U, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, B, U, U, U, U, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, B, U, U, U, U, B, U, U, U, U, U, B,\n\n W, B, U, U, B, U, U, U, B, U, U, U, U, B, U, U, U, U, B, W,\n\n W, B, U, U, U, B, B, B, U, U, U, U, U, B, U, U, U, U, B, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, W, B, B, B, B, B, B, B, B, B, B, B, B, B, B, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n];\n\n\n\nconst SELECT_BUTTON_ICON: [Color; 400] = [\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n", "file_path": "00-buttons/src/main.rs", "rank": 38, "score": 0.17781477138161028 }, { "content": " W, B, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B, W,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n W, B, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B, W,\n\n W, B, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, B, B, W, W, W,\n\n W, W, W, W, W, B, B, U, U, U, U, U, U, B, B, W, W, W, W, W,\n\n W, W, W, W, W, W, W, B, B, B, B, B, B, W, W, W, W, W, W, W,\n\n];\n\n\n\nconst L_BUTTON_ICON: [Color; 400] = [\n\n W, W, W, W, W, W, W, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n W, W, W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 39, "score": 0.17781477138161028 }, { "content": " B, U, U, U, U, U, U, U, B, B, B, B, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n];\n\n\n\nconst LEFT_BUTTON_ICON: [Color; 400] = [\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 40, "score": 0.17781477138161028 }, { "content": " B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n];\n\n\n\nconst START_BUTTON_ICON: [Color; 400] = [\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W, W,\n\n W, W, W, B, B, B, B, B, B, B, B, B, B, B, B, B, B, W, W, W,\n", "file_path": "00-buttons/src/main.rs", "rank": 41, "score": 0.17781477138161028 }, { "content": " B, U, U, U, U, B, B, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, B, B, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, B, B, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, B, B, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, B, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n];\n\n\n\nconst RIGHT_BUTTON_ICON: [Color; 400] = [\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, U, U, U, B,\n\n B, U, U, B, B, B, B, B, B, B, B, B, B, U, U, U, U, U, U, B,\n", "file_path": "00-buttons/src/main.rs", "rank": 42, "score": 0.17781477138161028 }, { "content": " W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n W, B, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n W, B, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B,\n\n B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B, B,\n\n];\n\n\n\nconst R_BUTTON_ICON: [Color; 400] = [\n", "file_path": "00-buttons/src/main.rs", "rank": 43, "score": 0.17781477138161028 }, { "content": " B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, B, B, B, B, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n B, U, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, U, B,\n\n W, B, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, B, W,\n\n W, B, U, U, U, U, B, B, U, U, U, U, B, B, U, U, U, U, B, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, B, B, W, W, W,\n\n W, W, W, W, W, B, B, U, U, U, U, U, U, B, B, W, W, W, W, W,\n\n W, W, W, W, W, W, W, B, B, B, B, B, B, W, W, W, W, W, W, W,\n\n];\n\n\n\nconst B_BUTTON_ICON: [Color; 400] = [\n\n W, W, W, W, W, W, W, B, B, B, B, B, B, W, W, W, W, W, W, W,\n\n W, W, W, W, W, B, B, U, U, U, U, U, U, B, B, W, W, W, W, W,\n\n W, W, W, B, B, U, U, U, U, U, U, U, U, U, U, B, B, W, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, W, B, U, U, U, U, U, U, U, U, U, U, U, U, U, U, B, W, W,\n\n W, B, U, U, U, U, B, B, B, B, B, B, U, U, U, U, U, U, B, W,\n", "file_path": "00-buttons/src/main.rs", "rank": 44, "score": 0.17781477138161028 } ]
Rust
src/day21/mod.rs
maxdavidson/advent-of-code-2020
25838e6c15317d1cf909350b15d4244d51f7c99e
use std::collections::{hash_map::Entry, HashMap, HashSet}; use lazy_static::lazy_static; use regex::Regex; #[derive(Debug)] struct Food<'a> { ingredients: HashSet<&'a str>, allergens: HashSet<&'a str>, } impl<'a> Food<'a> { fn parse(value: &'a str) -> Option<Self> { lazy_static! { static ref RECIPE_RE: Regex = Regex::new(r"^(?P<ingredients>.+) \(contains (?P<allergens>.+)\)$").unwrap(); } let caps = RECIPE_RE.captures(value)?; let ingredients = caps .name("ingredients") .unwrap() .as_str() .split(' ') .collect(); let allergens = caps .name("allergens") .unwrap() .as_str() .split(", ") .collect(); Some(Self { ingredients, allergens, }) } } fn find_dangerous_ingredients_helper<'a>( mut allergen_ingredients: Vec<(&'a str, HashSet<&'a str>)>, ) -> Option<impl Iterator<Item = &'a str>> { if allergen_ingredients .iter() .any(|(_, ingredients)| ingredients.is_empty()) { None } else if allergen_ingredients .iter() .all(|(_, ingredients)| ingredients.len() == 1) { allergen_ingredients.sort_unstable_by_key(|(allergen, _)| *allergen); Some( allergen_ingredients .into_iter() .flat_map(|(_, ingredient)| ingredient.into_iter()), ) } else { allergen_ingredients.sort_unstable_by_key(|(_, ingredients)| ingredients.len()); allergen_ingredients .iter() .find_map(|(allergen, ingredients)| { if ingredients.len() == 1 { None } else { ingredients.iter().find_map(|ingredient| { let mut next_allergen_ingredients = allergen_ingredients.clone(); for (other_allergen, ingredients) in next_allergen_ingredients.iter_mut() { if allergen != other_allergen { ingredients.remove(ingredient); } } find_dangerous_ingredients_helper(next_allergen_ingredients) }) } }); None } } fn find_dangerous_ingredients<'a, 'b: 'a>( foods: impl IntoIterator<Item = &'a Food<'b>>, ) -> Option<impl Iterator<Item = &'a str>> { let mut allergen_ingredients: HashMap<&str, HashSet<&str>> = HashMap::new(); for food in foods.into_iter() { for allergen in food.allergens.iter() { match allergen_ingredients.entry(allergen) { Entry::Vacant(entry) => { entry.insert(food.ingredients.clone()); } Entry::Occupied(mut entry) => { entry .get_mut() .retain(|ingredient| food.ingredients.contains(ingredient)); if entry.get().is_empty() { return None; } } } } } let mut allergen_ingredients: Vec<(&str, HashSet<&str>)> = allergen_ingredients.into_iter().collect(); let mut visited_allergens: HashSet<&str> = HashSet::new(); while let Some((allergen, ingredient)) = allergen_ingredients .iter() .find_map(|(allergen, ingredients)| { if !visited_allergens.contains(allergen) && ingredients.len() == 1 { Some((*allergen, ingredients.iter().copied().next()?)) } else { None } }) { visited_allergens.insert(allergen); for (other_allergen, ingredients) in allergen_ingredients.iter_mut() { if allergen != *other_allergen { ingredients.remove(ingredient); } } } find_dangerous_ingredients_helper(allergen_ingredients) } pub fn part1(input: &str) -> usize { let foods: Vec<Food> = input.lines().filter_map(Food::parse).collect(); let all_ingredients: HashSet<&str> = foods .iter() .flat_map(|food| food.ingredients.iter().copied()) .collect(); let allergen_ingredients: HashSet<&str> = find_dangerous_ingredients(&foods) .expect("No solution found!") .collect(); let safe_ingredients = all_ingredients .into_iter() .filter(|ingredient| !allergen_ingredients.contains(ingredient)); safe_ingredients .map(|ingredient| { foods .iter() .filter(|food| food.ingredients.contains(ingredient)) .count() }) .sum() } pub fn part2(input: &str) -> String { let foods: Vec<Food> = input.lines().filter_map(Food::parse).collect(); let allergen_ingredients = find_dangerous_ingredients(&foods).expect("No solution found!"); allergen_ingredients.collect::<Vec<_>>().join(",") } #[cfg(test)] mod tests { use super::*; static TEST_INPUT: &str = include_str!("test_input.txt"); static INPUT: &str = include_str!("input.txt"); #[test] fn part1_works() { assert_eq!(part1(TEST_INPUT), 5); assert_eq!(part1(INPUT), 2072); } #[test] fn part2_works() { assert_eq!(part2(TEST_INPUT), "mxmxvkd,sqjhc,fvjkl"); assert_eq!( part2(INPUT), "fdsfpg,jmvxx,lkv,cbzcgvc,kfgln,pqqks,pqrvc,lclnj" ); } }
use std::collections::{hash_map::Entry, HashMap, HashSet}; use lazy_static::lazy_static; use regex::Regex; #[derive(Debug)] struct Food<'a> { ingredients: HashSet<&'a str>, allergens: HashSet<&'a str>, } impl<'a> Food<'a> {
} fn find_dangerous_ingredients_helper<'a>( mut allergen_ingredients: Vec<(&'a str, HashSet<&'a str>)>, ) -> Option<impl Iterator<Item = &'a str>> { if allergen_ingredients .iter() .any(|(_, ingredients)| ingredients.is_empty()) { None } else if allergen_ingredients .iter() .all(|(_, ingredients)| ingredients.len() == 1) { allergen_ingredients.sort_unstable_by_key(|(allergen, _)| *allergen); Some( allergen_ingredients .into_iter() .flat_map(|(_, ingredient)| ingredient.into_iter()), ) } else { allergen_ingredients.sort_unstable_by_key(|(_, ingredients)| ingredients.len()); allergen_ingredients .iter() .find_map(|(allergen, ingredients)| { if ingredients.len() == 1 { None } else { ingredients.iter().find_map(|ingredient| { let mut next_allergen_ingredients = allergen_ingredients.clone(); for (other_allergen, ingredients) in next_allergen_ingredients.iter_mut() { if allergen != other_allergen { ingredients.remove(ingredient); } } find_dangerous_ingredients_helper(next_allergen_ingredients) }) } }); None } } fn find_dangerous_ingredients<'a, 'b: 'a>( foods: impl IntoIterator<Item = &'a Food<'b>>, ) -> Option<impl Iterator<Item = &'a str>> { let mut allergen_ingredients: HashMap<&str, HashSet<&str>> = HashMap::new(); for food in foods.into_iter() { for allergen in food.allergens.iter() { match allergen_ingredients.entry(allergen) { Entry::Vacant(entry) => { entry.insert(food.ingredients.clone()); } Entry::Occupied(mut entry) => { entry .get_mut() .retain(|ingredient| food.ingredients.contains(ingredient)); if entry.get().is_empty() { return None; } } } } } let mut allergen_ingredients: Vec<(&str, HashSet<&str>)> = allergen_ingredients.into_iter().collect(); let mut visited_allergens: HashSet<&str> = HashSet::new(); while let Some((allergen, ingredient)) = allergen_ingredients .iter() .find_map(|(allergen, ingredients)| { if !visited_allergens.contains(allergen) && ingredients.len() == 1 { Some((*allergen, ingredients.iter().copied().next()?)) } else { None } }) { visited_allergens.insert(allergen); for (other_allergen, ingredients) in allergen_ingredients.iter_mut() { if allergen != *other_allergen { ingredients.remove(ingredient); } } } find_dangerous_ingredients_helper(allergen_ingredients) } pub fn part1(input: &str) -> usize { let foods: Vec<Food> = input.lines().filter_map(Food::parse).collect(); let all_ingredients: HashSet<&str> = foods .iter() .flat_map(|food| food.ingredients.iter().copied()) .collect(); let allergen_ingredients: HashSet<&str> = find_dangerous_ingredients(&foods) .expect("No solution found!") .collect(); let safe_ingredients = all_ingredients .into_iter() .filter(|ingredient| !allergen_ingredients.contains(ingredient)); safe_ingredients .map(|ingredient| { foods .iter() .filter(|food| food.ingredients.contains(ingredient)) .count() }) .sum() } pub fn part2(input: &str) -> String { let foods: Vec<Food> = input.lines().filter_map(Food::parse).collect(); let allergen_ingredients = find_dangerous_ingredients(&foods).expect("No solution found!"); allergen_ingredients.collect::<Vec<_>>().join(",") } #[cfg(test)] mod tests { use super::*; static TEST_INPUT: &str = include_str!("test_input.txt"); static INPUT: &str = include_str!("input.txt"); #[test] fn part1_works() { assert_eq!(part1(TEST_INPUT), 5); assert_eq!(part1(INPUT), 2072); } #[test] fn part2_works() { assert_eq!(part2(TEST_INPUT), "mxmxvkd,sqjhc,fvjkl"); assert_eq!( part2(INPUT), "fdsfpg,jmvxx,lkv,cbzcgvc,kfgln,pqqks,pqrvc,lclnj" ); } }
fn parse(value: &'a str) -> Option<Self> { lazy_static! { static ref RECIPE_RE: Regex = Regex::new(r"^(?P<ingredients>.+) \(contains (?P<allergens>.+)\)$").unwrap(); } let caps = RECIPE_RE.captures(value)?; let ingredients = caps .name("ingredients") .unwrap() .as_str() .split(' ') .collect(); let allergens = caps .name("allergens") .unwrap() .as_str() .split(", ") .collect(); Some(Self { ingredients, allergens, }) }
function_block-full_function
[ { "content": "struct Data<'a>(pub HashMap<&'a str, HashMap<&'a str, usize>>);\n\n\n\nimpl<'a> Data<'a> {\n\n fn parse(input: &'a str) -> Self {\n\n lazy_static! {\n\n static ref RE_1: Regex = Regex::new(r\"^(?P<color>[a-z ]+) bags? contain\").unwrap();\n\n static ref RE_2: Regex =\n\n Regex::new(r\"(?P<count>\\d+) (?P<color>[a-z ]+) bags?\").unwrap();\n\n }\n\n\n\n Data(\n\n input\n\n .lines()\n\n .map(|line| {\n\n let color = RE_1.captures(line).unwrap().name(\"color\").unwrap().as_str();\n\n let color_counts = RE_2\n\n .captures_iter(line)\n\n .map(|c| {\n\n let color = c.name(\"color\").unwrap().as_str();\n\n let count = c.name(\"count\").unwrap().as_str().parse().unwrap();\n\n (color, count)\n\n })\n\n .collect();\n\n (color, color_counts)\n\n })\n\n .collect(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/day07/mod.rs", "rank": 0, "score": 132449.70218400113 }, { "content": "fn parse_passports(input: &str) -> impl Iterator<Item = HashMap<&str, &str>> + '_ {\n\n input.split(\"\\n\\n\").map(|chunks| {\n\n chunks\n\n .split_whitespace()\n\n .map(|chunk| {\n\n let mut it = chunk.splitn(2, ':');\n\n (it.next().unwrap(), it.next().unwrap())\n\n })\n\n .collect()\n\n })\n\n}\n\n\n\nstatic VALID_FIELDS: [&str; 7] = [\"byr\", \"iyr\", \"eyr\", \"hgt\", \"hcl\", \"ecl\", \"pid\"];\n\n\n", "file_path": "src/day04/mod.rs", "rank": 1, "score": 85832.70469099177 }, { "content": "fn get_black_tiles(input: &str) -> HashSet<(i16, i16)> {\n\n let mut black_tiles = HashSet::new();\n\n\n\n for line in input.lines() {\n\n let pos = parse_line(line).fold((0, 0), |pos, dir| dir.step(pos));\n\n\n\n if !black_tiles.remove(&pos) {\n\n black_tiles.insert(pos);\n\n }\n\n }\n\n\n\n black_tiles\n\n}\n\n\n", "file_path": "src/day24/mod.rs", "rank": 2, "score": 70377.4127216631 }, { "content": "#[derive(Debug)]\n\nstruct Program {\n\n pub mask: Mask,\n\n pub memory_init: Box<[(u64, u64)]>,\n\n}\n\n\n", "file_path": "src/day14/mod.rs", "rank": 3, "score": 64074.7993327115 }, { "content": "struct Notes {\n\n departure_timestamp: u64,\n\n bus_ids: Box<[Option<u64>]>,\n\n}\n\n\n\nimpl<'a> From<&'a str> for Notes {\n\n fn from(s: &'a str) -> Self {\n\n let mut lines = s.lines();\n\n let first_line = lines.next().unwrap();\n\n let second_line = lines.next().unwrap();\n\n Self {\n\n departure_timestamp: first_line.parse().unwrap(),\n\n bus_ids: second_line.split(',').map(|s| s.parse().ok()).collect(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day13/mod.rs", "rank": 4, "score": 64074.7993327115 }, { "content": "struct CPU {\n\n program: Box<[Instruction]>,\n\n visited: HashSet<usize>,\n\n pc: usize,\n\n acc: i64,\n\n}\n\n\n", "file_path": "src/day08/mod.rs", "rank": 5, "score": 64074.7993327115 }, { "content": "struct Game {\n\n pub current_cup: Cup,\n\n pub next_cups: Box<[Cup]>,\n\n}\n\n\n\nimpl Game {\n\n pub fn new(initial_cups: &[Cup], cups_count: usize) -> Self {\n\n let mut cups = Vec::with_capacity(cups_count);\n\n\n\n cups.extend_from_slice(initial_cups);\n\n cups.extend(initial_cups.len()..cups_count);\n\n\n\n let mut next_cups = vec![0; cups_count];\n\n\n\n for i in 0..cups.len() {\n\n let cup = cups[i];\n\n let next_cup = cups[(i + 1) % cups.len()];\n\n next_cups[cup] = next_cup;\n\n }\n\n\n", "file_path": "src/day23/mod.rs", "rank": 6, "score": 64074.7993327115 }, { "content": "#[derive(Eq, PartialEq)]\n\nstruct SeatMap {\n\n tiles: Box<[Option<Seat>]>,\n\n column_count: usize,\n\n}\n\n\n\nimpl<'a> From<&'a str> for SeatMap {\n\n fn from(input: &'a str) -> Self {\n\n let tiles: Vec<_> = input\n\n .lines()\n\n .flat_map(|line| {\n\n line.chars().map(move |c| match c {\n\n 'L' => Some(Seat::Empty),\n\n '#' => Some(Seat::Occupied),\n\n '.' => None,\n\n _ => panic!(\"Invalid char: {}\", c),\n\n })\n\n })\n\n .collect();\n\n\n\n let row_count = input.lines().count();\n", "file_path": "src/day11/mod.rs", "rank": 7, "score": 62925.44098683528 }, { "content": "struct Part2Helper {\n\n cache: RefCell<HashMap<u64, u64>>,\n\n successors: HashMap<u64, Vec<u64>>,\n\n}\n\n\n\nimpl Part2Helper {\n\n fn path_count_from_node(&self, node: u64) -> u64 {\n\n if let Some(count) = self.cache.borrow().get(&node) {\n\n return *count;\n\n }\n\n\n\n if let Some(nodes) = self.successors.get(&node) {\n\n let count = nodes\n\n .iter()\n\n .rfold(0, |count, node| count + self.path_count_from_node(*node));\n\n\n\n self.cache.borrow_mut().insert(node, count);\n\n\n\n count\n\n } else {\n\n 1\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day10/mod.rs", "rank": 8, "score": 62925.44098683528 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Rule<'a> {\n\n name: &'a str,\n\n ranges: [RangeInclusive<usize>; 2],\n\n}\n\n\n\nimpl Rule<'_> {\n\n pub fn name(&self) -> &str {\n\n self.name\n\n }\n\n\n\n pub fn matches(&self, value: usize) -> bool {\n\n self.ranges.iter().any(|range| range.contains(&value))\n\n }\n\n}\n\n\n", "file_path": "src/day16/mod.rs", "rank": 9, "score": 62266.01039927991 }, { "content": "#[derive(Debug)]\n\nstruct Notes<'a> {\n\n pub rules: Vec<Rule<'a>>,\n\n pub my_ticket: Ticket,\n\n pub nearby_tickets: Vec<Ticket>,\n\n}\n\n\n\nimpl<'a> From<&'a str> for Notes<'a> {\n\n fn from(input: &'a str) -> Self {\n\n lazy_static! {\n\n static ref RULES_RE: Regex = Regex::new(r\"(.+): (\\d+)-(\\d+) or (\\d+)-(\\d+)\").unwrap();\n\n static ref TICKET_RE: Regex = Regex::new(r\"^(?:\\d+,?)+$\").unwrap();\n\n }\n\n\n\n let rules = RULES_RE\n\n .captures_iter(input)\n\n .map(|caps| Rule {\n\n name: caps.get(1).unwrap().as_str(),\n\n ranges: [\n\n caps[2].parse().unwrap()..=caps[3].parse().unwrap(),\n\n caps[4].parse().unwrap()..=caps[5].parse().unwrap(),\n", "file_path": "src/day16/mod.rs", "rank": 10, "score": 62266.01039927991 }, { "content": "#[derive(Debug)]\n\nstruct Entry<'a> {\n\n a: usize,\n\n b: usize,\n\n letter: char,\n\n password: &'a str,\n\n}\n\n\n", "file_path": "src/day02/mod.rs", "rank": 11, "score": 62266.01039927991 }, { "content": "struct MapView<'a> {\n\n map: &'a mut [Pixel],\n\n size: usize,\n\n transposed: bool,\n\n rotation: Rotation,\n\n}\n\n\n\nimpl MapView<'_> {\n\n fn index(&self, mut x: usize, mut y: usize) -> usize {\n\n if self.transposed {\n\n std::mem::swap(&mut x, &mut y);\n\n }\n\n\n\n match self.rotation {\n\n Rotation::None => {}\n\n Rotation::Right => {\n\n let prev_x = x;\n\n x = self.size - 1 - y;\n\n y = prev_x;\n\n }\n", "file_path": "src/day20/mod.rs", "rank": 13, "score": 61116.6520534037 }, { "content": "#[derive(Debug, Clone)]\n\nstruct TileView<'a> {\n\n tile: &'a Tile,\n\n transposed: bool,\n\n rotation: Rotation,\n\n}\n\n\n\nimpl<'a> std::ops::Index<[usize; 2]> for TileView<'a> {\n\n type Output = bool;\n\n\n\n fn index(&self, [mut x, mut y]: [usize; 2]) -> &Self::Output {\n\n if self.transposed {\n\n std::mem::swap(&mut x, &mut y);\n\n }\n\n\n\n match self.rotation {\n\n Rotation::None => {}\n\n Rotation::Right => {\n\n let prev_x = x;\n\n x = TILE_SIZE - 1 - y;\n\n y = prev_x;\n", "file_path": "src/day20/mod.rs", "rank": 14, "score": 61116.6520534037 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Bitset(usize);\n\n\n\nimpl std::fmt::Debug for Bitset {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:064b}\", self.0)\n\n }\n\n}\n\n\n\nimpl Bitset {\n\n pub fn new() -> Self {\n\n Bitset(0)\n\n }\n\n\n\n pub fn len(self) -> usize {\n\n self.0.count_ones() as usize\n\n }\n\n\n\n pub fn inversed(self) -> Self {\n\n Self(!self.0)\n\n }\n", "file_path": "src/day16/mod.rs", "rank": 15, "score": 61116.6520534037 }, { "content": "fn parse_lines<T: std::str::FromStr>(input: &str) -> impl Iterator<Item = T> + '_ {\n\n input.trim().lines().filter_map(|line| line.parse().ok())\n\n}\n\n\n", "file_path": "src/day01/mod.rs", "rank": 16, "score": 58774.140563593646 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\nstruct Vec2<T>(pub [T; 2]);\n\n\n\nimpl<T, U> Add<Vec2<U>> for Vec2<T>\n\nwhere\n\n T: Add<U>,\n\n{\n\n type Output = Vec2<T::Output>;\n\n\n\n fn add(self, rhs: Vec2<U>) -> Self::Output {\n\n let Vec2([x0, y0]) = self;\n\n let Vec2([x1, y1]) = rhs;\n\n Vec2([x0 + x1, y0 + y1])\n\n }\n\n}\n\n\n\nimpl<T, U> AddAssign<Vec2<U>> for Vec2<T>\n\nwhere\n\n T: Add<U, Output = T> + Copy,\n\n{\n\n fn add_assign(&mut self, rhs: Vec2<U>) {\n", "file_path": "src/day12/mod.rs", "rank": 17, "score": 54665.152971806194 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\nstruct Mat2<T>(pub [[T; 2]; 2]);\n\n\n\nimpl<T, U> Mul<Vec2<U>> for Mat2<T>\n\nwhere\n\n T: Mul<U>,\n\n U: Copy,\n\n T::Output: Add,\n\n{\n\n type Output = Vec2<<T::Output as Add>::Output>;\n\n\n\n fn mul(self, rhs: Vec2<U>) -> Self::Output {\n\n let Mat2([[a00, a01], [a10, a11]]) = self;\n\n let Vec2([x, y]) = rhs;\n\n Vec2([a00 * x + a01 * y, a10 * x + a11 * y])\n\n }\n\n}\n\n\n", "file_path": "src/day12/mod.rs", "rank": 18, "score": 53470.99388432427 }, { "content": "fn parse_input(input: &str) -> RuleMap {\n\n input.lines().filter_map(parse_rule).collect()\n\n}\n\n\n", "file_path": "src/day19/mod.rs", "rank": 19, "score": 49216.83535776123 }, { "content": "fn boarding_pass(input: &str) -> BoardingPass {\n\n let mut row_from = 0;\n\n let mut row_to = 127;\n\n\n\n let mut column_from = 0;\n\n let mut column_to = 7;\n\n\n\n for c in input.chars() {\n\n match c {\n\n 'B' => row_from = (row_to + row_from + 1) / 2,\n\n 'F' => row_to = (row_to + row_from) / 2,\n\n 'R' => column_from = (column_from + column_to + 1) / 2,\n\n 'L' => column_to = (column_from + column_to) / 2,\n\n _ => {}\n\n }\n\n }\n\n\n\n (row_to, column_to)\n\n}\n\n\n", "file_path": "src/day05/mod.rs", "rank": 20, "score": 49216.83535776123 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let mut active_cubes: HashSet<_> = parse_input(input).map(|(x, y)| (x, y, 0)).collect();\n\n\n\n for _ in 0..6 {\n\n let mut next_active_cubes = HashSet::new();\n\n let mut inactive_neighbor_cubes = HashSet::new();\n\n\n\n for cube in active_cubes.iter().copied() {\n\n let mut active_neighbor_count = 0;\n\n\n\n for dx in -1..=1 {\n\n for dy in -1..=1 {\n\n for dz in -1..=1 {\n\n if (dx, dy, dz) != (0, 0, 0) {\n\n let (x, y, z) = cube;\n\n let neighbor_cube = (x + dx, y + dy, z + dz);\n\n\n\n if active_cubes.contains(&neighbor_cube) {\n\n active_neighbor_count += 1;\n\n } else {\n", "file_path": "src/day17/mod.rs", "rank": 21, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> Number {\n\n let operator_precedence = |operator| match operator {\n\n Operator::Add => 2,\n\n Operator::Multiply => 1,\n\n };\n\n\n\n input\n\n .lines()\n\n .map(|line| evaluate(line, operator_precedence))\n\n .sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n", "file_path": "src/day18/mod.rs", "rank": 23, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let numbers: Vec<usize> = input.split(',').map(|s| s.parse().unwrap()).collect();\n\n\n\n get_number(&numbers, 2020 - 1)\n\n}\n\n\n", "file_path": "src/day15/mod.rs", "rank": 24, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let black_tiles = get_black_tiles(input);\n\n\n\n black_tiles.len()\n\n}\n\n\n", "file_path": "src/day24/mod.rs", "rank": 25, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let Notes {\n\n rules,\n\n my_ticket,\n\n nearby_tickets,\n\n } = Notes::from(input);\n\n\n\n let valid_nearby_tickets: Vec<_> = nearby_tickets\n\n .into_iter()\n\n .filter(|ticket| {\n\n ticket\n\n .iter()\n\n .all(|value| rules.iter().any(|rule| rule.matches(*value)))\n\n })\n\n .collect();\n\n\n\n let mut allowed_rules_per_position: Vec<_> = (0..my_ticket.len())\n\n .map(|position| {\n\n let mut allowed_rules = Bitset::new();\n\n\n", "file_path": "src/day16/mod.rs", "rank": 27, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n parse_passports(input)\n\n .filter(|passport| {\n\n VALID_FIELDS.iter().all(|field| {\n\n if let Some(value) = passport.get(*field) {\n\n match *field {\n\n \"byr\" => {\n\n let num_value: u16 = value.parse().unwrap();\n\n 1920 <= num_value && num_value <= 2002\n\n }\n\n \"iyr\" => {\n\n let num_value: u16 = value.parse().unwrap();\n\n 2010 <= num_value && num_value <= 2020\n\n }\n\n \"eyr\" => {\n\n let num_value: u16 = value.parse().unwrap();\n\n 2020 <= num_value && num_value <= 2030\n\n }\n\n \"hgt\" => {\n\n lazy_static! {\n", "file_path": "src/day04/mod.rs", "rank": 28, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let mut rules = parse_input(input);\n\n\n\n rules.extend(parse_rule(\"8: 42 | 42 8\"));\n\n rules.extend(parse_rule(\"11: 42 31 | 42 11 31\"));\n\n\n\n let validate = create_validator(&rules);\n\n\n\n input.lines().filter(|line| validate(*line)).count()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT_0: &str = include_str!(\"test_input_0.txt\");\n\n static TEST_INPUT_1: &str = include_str!(\"test_input_1.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n", "file_path": "src/day19/mod.rs", "rank": 29, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> i64 {\n\n let program: Vec<_> = input.lines().map(|line| line.parse().unwrap()).collect();\n\n\n\n let mut cpu = CPU::from(program);\n\n\n\n loop {\n\n match cpu.step() {\n\n CPUState::Running => {}\n\n CPUState::Terminated(_) => panic!(\"Shouldn't terminate!\"),\n\n CPUState::Looping(val) => break val,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day08/mod.rs", "rank": 30, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n parse_entries(input)\n\n .filter(|entry| {\n\n let match_a = entry.letter == entry.password.chars().nth(entry.a - 1).unwrap();\n\n let match_b = entry.letter == entry.password.chars().nth(entry.b - 1).unwrap();\n\n\n\n match_a && !match_b || !match_a && match_b\n\n })\n\n .count()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n", "file_path": "src/day02/mod.rs", "rank": 31, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let notes = Notes::from(input);\n\n\n\n notes\n\n .nearby_tickets\n\n .iter()\n\n .flat_map(|ticket| ticket.iter().copied())\n\n .filter(|value| !notes.rules.iter().any(|rule| rule.matches(*value)))\n\n .sum()\n\n}\n\n\n", "file_path": "src/day16/mod.rs", "rank": 32, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n input\n\n .lines()\n\n .map(boarding_pass)\n\n .map(boarding_pass_id)\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/day05/mod.rs", "rank": 33, "score": 48893.59459339059 }, { "content": "fn adapters(input: &str) -> Box<[u64]> {\n\n let mut numbers: Vec<u64> = input.lines().map(|line| line.parse().unwrap()).collect();\n\n numbers.push(0);\n\n numbers.push(numbers.iter().max().unwrap() + 3);\n\n numbers.sort_unstable();\n\n numbers.into()\n\n}\n\n\n", "file_path": "src/day10/mod.rs", "rank": 34, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let data = Data::parse(input);\n\n\n\n impl<'a> Data<'a> {\n\n fn count_bags(&self, color: &'a str) -> usize {\n\n self.0\n\n .get(color)\n\n .map(|color_counts| {\n\n color_counts\n\n .iter()\n\n .map(|(color, count)| count * (1 + self.count_bags(color)))\n\n .sum()\n\n })\n\n .unwrap_or(0)\n\n }\n\n }\n\n\n\n data.count_bags(\"shiny gold\")\n\n}\n\n\n", "file_path": "src/day07/mod.rs", "rank": 35, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> u64 {\n\n let initial_cups: Vec<Cup> = parse_cups(input).collect();\n\n\n\n let mut game = Game::new(&initial_cups, 1_000_000);\n\n\n\n for _ in 0..10_000_000 {\n\n game.make_move();\n\n }\n\n\n\n let cup1 = game.next_cups[0];\n\n let cup2 = game.next_cups[cup1];\n\n\n\n ((cup1 as u64) + 1) * ((cup2 as u64) + 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/day23/mod.rs", "rank": 36, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n input\n\n .split(\"\\n\\n\")\n\n .map(|group| {\n\n let mut it = group\n\n .trim()\n\n .split('\\n')\n\n .map(|line| line.chars().collect::<HashSet<_>>());\n\n\n\n let mut intersection = it.next().unwrap();\n\n\n\n for chars in it {\n\n intersection.retain(|c| chars.contains(c));\n\n }\n\n\n\n intersection.len()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "src/day06/mod.rs", "rank": 37, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> u64 {\n\n let mut memory = HashMap::new();\n\n\n\n for program in programs(input) {\n\n for (address, value) in program.memory_init.iter().copied() {\n\n let masked_value = apply_mask(program.mask, value);\n\n memory.insert(address, masked_value);\n\n }\n\n }\n\n\n\n memory.values().sum()\n\n}\n\n\n", "file_path": "src/day14/mod.rs", "rank": 38, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let mut black_tiles = get_black_tiles(input);\n\n let mut next_black_tiles = HashSet::new();\n\n let mut white_neighbor_tiles = HashSet::new();\n\n\n\n for _ in 0..100 {\n\n for pos in black_tiles.iter().copied() {\n\n let mut black_neighbor_count = 0;\n\n\n\n for dir in ALL_DIRECTIONS.iter() {\n\n let neighbor_pos = dir.step(pos);\n\n\n\n if black_tiles.contains(&neighbor_pos) {\n\n black_neighbor_count += 1;\n\n } else {\n\n white_neighbor_tiles.insert(neighbor_pos);\n\n }\n\n }\n\n\n\n if black_neighbor_count == 1 || black_neighbor_count == 2 {\n", "file_path": "src/day24/mod.rs", "rank": 39, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n count_trees(input, 1, 1)\n\n * count_trees(input, 3, 1)\n\n * count_trees(input, 5, 1)\n\n * count_trees(input, 7, 1)\n\n * count_trees(input, 1, 2)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 7);\n\n assert_eq!(part1(INPUT), 270);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 336);\n\n assert_eq!(part2(INPUT), 2_122_848_000);\n\n }\n\n}\n", "file_path": "src/day03/mod.rs", "rank": 40, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let (deck1, deck2) = parse_decks(input);\n\n let result = play_recursive_game(deck1, deck2).expect(\"Game failed :(\");\n\n\n\n result.score()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 306);\n\n assert_eq!(part1(INPUT), 32_083);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 291);\n\n assert_eq!(part2(INPUT), 35_495);\n\n }\n\n}\n", "file_path": "src/day22/mod.rs", "rank": 41, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let mut seats = SeatMap::from(input);\n\n\n\n loop {\n\n let next_seats = seats.transformed(|seat, pos| match seat {\n\n Seat::Empty if seats.visible_occupied_seat_count(pos) == 0 => Seat::Occupied,\n\n Seat::Occupied if seats.visible_occupied_seat_count(pos) >= 5 => Seat::Empty,\n\n seat => seat,\n\n });\n\n\n\n if seats == next_seats {\n\n break seats\n\n .tiles\n\n .iter()\n\n .filter(|seat| matches!(seat, Some(Seat::Occupied)))\n\n .count();\n\n }\n\n\n\n seats = next_seats;\n\n }\n", "file_path": "src/day11/mod.rs", "rank": 42, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> u64 {\n\n let notes = Notes::from(input);\n\n\n\n for timestamp in notes.departure_timestamp.. {\n\n for bus_id in notes.bus_ids.iter().flatten().copied() {\n\n if timestamp % bus_id == 0 {\n\n return (timestamp - notes.departure_timestamp) * bus_id;\n\n }\n\n }\n\n }\n\n\n\n panic!(\"No solution found\");\n\n}\n\n\n", "file_path": "src/day13/mod.rs", "rank": 43, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n count_trees(input, 3, 1)\n\n}\n\n\n", "file_path": "src/day03/mod.rs", "rank": 44, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let mut active_cubes: HashSet<_> = parse_input(input).map(|(x, y)| (x, y, 0, 0)).collect();\n\n\n\n for _ in 0..6 {\n\n let mut next_active_cubes = HashSet::new();\n\n let mut inactive_neighbor_cubes = HashSet::new();\n\n\n\n for cube in active_cubes.iter().copied() {\n\n let mut active_neighbor_count = 0;\n\n\n\n for dx in -1..=1 {\n\n for dy in -1..=1 {\n\n for dz in -1..=1 {\n\n for dw in -1..=1 {\n\n if (dx, dy, dz, dw) != (0, 0, 0, 0) {\n\n let (x, y, z, w) = cube;\n\n let neighbor_cube = (x + dx, y + dy, z + dz, w + dw);\n\n\n\n if active_cubes.contains(&neighbor_cube) {\n\n active_neighbor_count += 1;\n", "file_path": "src/day17/mod.rs", "rank": 45, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let boarding_passes_by_id: HashMap<_, _> = input\n\n .lines()\n\n .map(|input| {\n\n let pass = boarding_pass(input);\n\n (boarding_pass_id(pass), pass)\n\n })\n\n .collect();\n\n\n\n let (first_row, last_row) = boarding_passes_by_id\n\n .values()\n\n .map(|(row, _)| row)\n\n .minmax()\n\n .into_option()\n\n .unwrap();\n\n\n\n for row in (first_row + 1)..=(last_row - 1) {\n\n for column in 0..=7 {\n\n let id = boarding_pass_id((row, column));\n\n if !boarding_passes_by_id.contains_key(&id)\n", "file_path": "src/day05/mod.rs", "rank": 46, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let data = Data::parse(input);\n\n\n\n impl<'a> Data<'a> {\n\n fn contains_shiny_gold(&self, color: &'a str) -> bool {\n\n self.0\n\n .get(color)\n\n .map(|color_counts| {\n\n color_counts.contains_key(\"shiny gold\")\n\n || color_counts\n\n .keys()\n\n .any(|color| self.contains_shiny_gold(color))\n\n })\n\n .unwrap_or(false)\n\n }\n\n }\n\n\n\n data.0\n\n .keys()\n\n .filter(|key| data.contains_shiny_gold(key))\n\n .count()\n\n}\n\n\n", "file_path": "src/day07/mod.rs", "rank": 47, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> i32 {\n\n let rotations = [\n\n Mat2([[0, 1], [1, 0]]), // 0 degress\n\n Mat2([[0, -1], [1, 0]]), // 90 degress CCW\n\n Mat2([[-1, 0], [0, -1]]), // 180 degress\n\n Mat2([[0, 1], [-1, 0]]), // 270 degress CCW\n\n ];\n\n\n\n let mut position = Vec2([0, 0]);\n\n let mut direction = Vec2([1, 0]);\n\n\n\n for (c, val) in instructions(input) {\n\n match c {\n\n 'N' => position += Vec2([0, 1]) * val,\n\n 'S' => position += Vec2([0, -1]) * val,\n\n 'E' => position += Vec2([1, 0]) * val,\n\n 'W' => position += Vec2([-1, 0]) * val,\n\n 'L' => direction = rotations[((val / 90) % 4) as usize] * direction,\n\n 'R' => direction = rotations[((-val / 90) % 4 + 4) as usize] * direction,\n\n 'F' => position += direction * val,\n\n _ => {}\n\n }\n\n }\n\n\n\n let Vec2([x, y]) = position;\n\n x.abs() + y.abs()\n\n}\n\n\n", "file_path": "src/day12/mod.rs", "rank": 48, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let numbers: Vec<usize> = input.split(',').map(|s| s.parse().unwrap()).collect();\n\n\n\n get_number(&numbers, 30_000_000 - 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(\"0,3,6\"), 436);\n\n assert_eq!(part1(\"5,1,9,18,13,8,0\"), 376);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(\"5,1,9,18,13,8,0\"), 323_780);\n\n }\n\n}\n", "file_path": "src/day15/mod.rs", "rank": 49, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let (deck1, deck2) = parse_decks(input);\n\n let result = play_game(deck1, deck2).expect(\"Game failed :(\");\n\n\n\n result.score()\n\n}\n\n\n", "file_path": "src/day22/mod.rs", "rank": 50, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n parse_passports(input)\n\n .filter(|passport| {\n\n VALID_FIELDS\n\n .iter()\n\n .all(|field| passport.get(*field).is_some())\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "src/day04/mod.rs", "rank": 51, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> u64 {\n\n let notes = Notes::from(input);\n\n\n\n let mut timestamp = 0;\n\n let mut stride = 1;\n\n\n\n for (maybe_bus_id, offset) in notes.bus_ids.iter().copied().zip(0u64..) {\n\n if let Some(bus_id) = maybe_bus_id {\n\n while (timestamp + offset) % bus_id != 0 {\n\n timestamp += stride\n\n }\n\n stride *= bus_id\n\n }\n\n }\n\n\n\n timestamp\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/day13/mod.rs", "rank": 52, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let mut seats = SeatMap::from(input);\n\n\n\n loop {\n\n let next_seats = seats.transformed(|seat, pos| match seat {\n\n Seat::Empty if seats.adjacent_occupied_seat_count(pos) == 0 => Seat::Occupied,\n\n Seat::Occupied if seats.adjacent_occupied_seat_count(pos) >= 4 => Seat::Empty,\n\n seat => seat,\n\n });\n\n\n\n if seats == next_seats {\n\n break seats\n\n .tiles\n\n .iter()\n\n .filter(|seat| matches!(seat, Some(Seat::Occupied)))\n\n .count();\n\n }\n\n\n\n seats = next_seats;\n\n }\n\n}\n\n\n", "file_path": "src/day11/mod.rs", "rank": 53, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> i32 {\n\n let rotations = [\n\n Mat2([[0, 1], [1, 0]]), // 0 degress\n\n Mat2([[0, -1], [1, 0]]), // 90 degress CCW\n\n Mat2([[-1, 0], [0, -1]]), // 180 degress\n\n Mat2([[0, 1], [-1, 0]]), // 270 degress CCW\n\n ];\n\n\n\n let mut position = Vec2([0, 0]);\n\n let mut direction = Vec2([10, 1]);\n\n\n\n for (c, val) in instructions(input) {\n\n match c {\n\n 'N' => direction += Vec2([0, 1]) * val,\n\n 'S' => direction += Vec2([0, -1]) * val,\n\n 'E' => direction += Vec2([1, 0]) * val,\n\n 'W' => direction += Vec2([-1, 0]) * val,\n\n 'L' => direction = rotations[((val / 90) % 4) as usize] * direction,\n\n 'R' => direction = rotations[((-val / 90) % 4 + 4) as usize] * direction,\n\n 'F' => position += direction * val,\n", "file_path": "src/day12/mod.rs", "rank": 54, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> Number {\n\n let operator_precedence = |operator| match operator {\n\n Operator::Add => 1,\n\n Operator::Multiply => 1,\n\n };\n\n\n\n input\n\n .lines()\n\n .map(|line| evaluate(line, operator_precedence))\n\n .sum()\n\n}\n\n\n", "file_path": "src/day18/mod.rs", "rank": 55, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> u64 {\n\n let numbers = adapters(input);\n\n\n\n let mut groups = HashMap::new();\n\n let mut it = numbers.windows(2);\n\n while let Some(&[a, b]) = it.next() {\n\n let diff = b - a;\n\n *groups.entry(diff).or_default() += 1;\n\n }\n\n\n\n groups.values().product()\n\n}\n\n\n", "file_path": "src/day10/mod.rs", "rank": 56, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> u64 {\n\n let mut memory = HashMap::new();\n\n\n\n for program in programs(input) {\n\n for (address, value) in program.memory_init.iter().copied() {\n\n for_each_mask(program.mask, |mask| {\n\n let masked_address = apply_mask(mask, address);\n\n memory.insert(masked_address, value);\n\n });\n\n }\n\n }\n\n\n\n memory.values().sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT_0: &str = include_str!(\"test_input_0.txt\");\n", "file_path": "src/day14/mod.rs", "rank": 57, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n input\n\n .split(\"\\n\\n\")\n\n .map(|group| {\n\n group\n\n .chars()\n\n .filter(|c| c.is_alphabetic())\n\n .collect::<HashSet<_>>()\n\n .len()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "src/day06/mod.rs", "rank": 58, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> u64 {\n\n let numbers = adapters(input);\n\n\n\n let successors: HashMap<u64, Vec<u64>> = numbers\n\n .windows(4)\n\n .map(|window| {\n\n let (node, rest) = window.split_first().unwrap();\n\n (\n\n *node,\n\n rest.iter()\n\n .copied()\n\n .filter(|node2| node2 - node <= 3)\n\n .collect(),\n\n )\n\n })\n\n .collect();\n\n\n\n let part2_helper = Part2Helper {\n\n successors,\n\n cache: RefCell::new(HashMap::new()),\n", "file_path": "src/day10/mod.rs", "rank": 59, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> u64 {\n\n let tiles: Vec<(TileId, Tile)> = parse_tiles(input).collect();\n\n\n\n let tile_arrangement = find_tile_arrangement(&tiles).expect(\"No tile arrangement found!\");\n\n\n\n let [[min_x, min_y], [max_x, max_y]] = bounding_box(tile_arrangement.keys()).unwrap();\n\n\n\n let corners = [\n\n [min_x, min_y],\n\n [min_x, max_y],\n\n [max_x, min_y],\n\n [max_x, max_y],\n\n ];\n\n\n\n corners\n\n .iter()\n\n .map(|corner| tile_arrangement.get(corner).expect(\"Not a square!\"))\n\n .map(|(tile_id, _)| *tile_id as u64)\n\n .product()\n\n}\n\n\n", "file_path": "src/day20/mod.rs", "rank": 60, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n parse_entries(input)\n\n .filter(|entry| {\n\n let count = entry\n\n .password\n\n .chars()\n\n .filter(|char| *char == entry.letter)\n\n .count();\n\n\n\n count >= entry.a && count <= entry.b\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "src/day02/mod.rs", "rank": 61, "score": 48893.59459339059 }, { "content": "pub fn part1(input: &str) -> usize {\n\n let rules = parse_input(input);\n\n\n\n let validate = create_validator(&rules);\n\n\n\n input.lines().filter(|line| validate(*line)).count()\n\n}\n\n\n", "file_path": "src/day19/mod.rs", "rank": 62, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> usize {\n\n let tiles: Vec<(TileId, Tile)> = parse_tiles(input).collect();\n\n\n\n let tile_arrangement = find_tile_arrangement(&tiles).expect(\"No tile arrangement found!\");\n\n\n\n let [[min_x, min_y], [max_x, max_y]] = bounding_box(tile_arrangement.keys()).unwrap();\n\n\n\n // for y in min_y..=max_y {\n\n // println!();\n\n // for x in min_x..=max_x {\n\n // let (tile_id, _) = tile_arrangement.get(&[x, y]).unwrap();\n\n // print!(\"{} \", tile_id);\n\n // }\n\n // }\n\n\n\n let tiles_size = {\n\n let ncols = 1 + max_x - min_x;\n\n let nrows = 1 + max_y - min_y;\n\n assert_eq!(ncols, nrows);\n\n ncols as usize\n", "file_path": "src/day20/mod.rs", "rank": 63, "score": 48893.59459339059 }, { "content": "pub fn part2(input: &str) -> i64 {\n\n let program: Vec<_> = input.lines().map(|line| line.parse().unwrap()).collect();\n\n\n\n program\n\n .iter()\n\n .enumerate()\n\n .find_map(|(index, instruction)| {\n\n let instruction = match *instruction {\n\n Instruction::Nop(val) => Instruction::Jmp(val),\n\n Instruction::Jmp(val) => Instruction::Nop(val),\n\n _ => return None,\n\n };\n\n\n\n let mut program = program.clone();\n\n program[index] = instruction;\n\n\n\n let mut cpu = CPU::from(program);\n\n\n\n loop {\n\n match cpu.step() {\n", "file_path": "src/day08/mod.rs", "rank": 64, "score": 48893.59459339059 }, { "content": "fn parse_decks(input: &str) -> (Deck, Deck) {\n\n let mut it = input\n\n .split(\"\\n\\n\")\n\n .map(|chunk| chunk.lines().filter_map(|line| line.parse().ok()).collect());\n\n\n\n let first = it.next().unwrap();\n\n let second = it.next().unwrap();\n\n\n\n (first, second)\n\n}\n\n\n", "file_path": "src/day22/mod.rs", "rank": 65, "score": 48068.90895224191 }, { "content": "pub fn part1(input: &str) -> Option<u32> {\n\n let nums: Vec<u32> = parse_lines(input).collect();\n\n\n\n nums.into_iter()\n\n .tuple_combinations()\n\n .find_map(|(a, b)| if a + b == 2020 { Some(a * b) } else { None })\n\n}\n\n\n", "file_path": "src/day01/mod.rs", "rank": 66, "score": 47026.05914030097 }, { "content": "pub fn part2(input: &str) -> Option<u32> {\n\n let nums: Vec<u32> = parse_lines(input).collect();\n\n\n\n nums.into_iter().tuple_combinations().find_map(|(a, b, c)| {\n\n if a + b + c == 2020 {\n\n Some(a * b * c)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n", "file_path": "src/day01/mod.rs", "rank": 67, "score": 47026.05914030097 }, { "content": "pub fn part1(input: &str, moves: usize) -> String {\n\n let initial_cups: Vec<Cup> = parse_cups(input).collect();\n\n\n\n let mut game = Game::new(&initial_cups, initial_cups.len());\n\n\n\n for _ in 0..moves {\n\n game.make_move();\n\n }\n\n\n\n use std::fmt::Write;\n\n\n\n let mut stringified = String::with_capacity(game.next_cups.len() - 1);\n\n let mut current_cup = game.next_cups[0];\n\n\n\n for _ in 0..game.next_cups.len() - 1 {\n\n write!(&mut stringified, \"{}\", current_cup + 1).unwrap();\n\n current_cup = game.next_cups[current_cup];\n\n }\n\n\n\n stringified\n\n}\n\n\n", "file_path": "src/day23/mod.rs", "rank": 68, "score": 45300.30837232972 }, { "content": "fn parse_rule(s: &str) -> Option<(RuleRef, Rule<'_>)> {\n\n use regex::Regex;\n\n\n\n lazy_static! {\n\n static ref CHAR_RE: Regex = Regex::new(r#\"^(?P<index>\\d+): \"(?P<text>.+)\"$\"#).unwrap();\n\n static ref REFS_RE: Regex = Regex::new(r#\"^(?P<index>\\d+): (?P<refs>.+)$\"#).unwrap();\n\n }\n\n\n\n if let Some(caps) = CHAR_RE.captures(s) {\n\n let index = caps.name(\"index\").unwrap().as_str().parse().unwrap();\n\n let text = caps.name(\"text\").unwrap().as_str();\n\n Some((index, Rule::Text(text)))\n\n } else if let Some(caps) = REFS_RE.captures(s) {\n\n let index = caps.name(\"index\").unwrap().as_str().parse().unwrap();\n\n let refs = caps.name(\"refs\").unwrap().as_str();\n\n Some((index, Rule::Refs(refs)))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/day19/mod.rs", "rank": 69, "score": 45300.30837232972 }, { "content": "pub fn part2(input: &str, preamble_length: usize) -> u64 {\n\n let numbers: Vec<u64> = input.lines().map(|line| line.parse().unwrap()).collect();\n\n let sums: Vec<u64> = numbers\n\n .iter()\n\n .scan(0, |sum, num| {\n\n *sum += num;\n\n Some(*sum)\n\n })\n\n .collect();\n\n\n\n let first_invalid_number =\n\n find_first_invalid_number(&numbers, preamble_length).expect(\"No invalid number found\");\n\n\n\n for i in 0..numbers.len() - 1 {\n\n let mut start = i;\n\n let mut end = numbers.len() - 1;\n\n\n\n while start <= end {\n\n let mid = (start + end) / 2;\n\n let sum = sums[mid] - sums[start];\n", "file_path": "src/day09/mod.rs", "rank": 70, "score": 44572.12945139964 }, { "content": "pub fn part1(input: &str, preamble_length: usize) -> u64 {\n\n let numbers: Vec<u64> = input.lines().map(|line| line.parse().unwrap()).collect();\n\n\n\n find_first_invalid_number(&numbers, preamble_length).expect(\"No invalid number found\")\n\n}\n\n\n", "file_path": "src/day09/mod.rs", "rank": 71, "score": 44572.12945139964 }, { "content": "fn programs(input: &str) -> impl Iterator<Item = Program> + '_ {\n\n lazy_static! {\n\n static ref MASK_RE: Regex = Regex::new(r\"^mask = (?P<mask>[01X]{36})$\").unwrap();\n\n static ref MEM_RE: Regex = Regex::new(r\"^mem\\[(?P<index>\\d+)\\] = (?P<value>\\d+)$\").unwrap();\n\n }\n\n\n\n let mut lines = input.lines().peekable();\n\n\n\n iter::from_fn(move || {\n\n let line = lines.next()?;\n\n let mask_str = MASK_RE.captures(line)?.name(\"mask\")?.as_str();\n\n\n\n let mut mask = [BitMask::Off; 36];\n\n for (i, c) in mask_str.chars().rev().enumerate() {\n\n mask[i] = match c {\n\n '0' => BitMask::Off,\n\n '1' => BitMask::On,\n\n 'X' => BitMask::Floating,\n\n _ => panic!(\"Invalid mask char!\"),\n\n }\n", "file_path": "src/day14/mod.rs", "rank": 72, "score": 44428.53754904721 }, { "content": "fn tokens(input: &str) -> impl Iterator<Item = Token> + '_ {\n\n let mut it = input.char_indices().peekable();\n\n\n\n std::iter::from_fn(move || loop {\n\n match it.next()? {\n\n (start, c) if c.is_numeric() => {\n\n let mut end = start + 1;\n\n while let Some((i, c)) = it.peek() {\n\n if c.is_numeric() {\n\n it.next();\n\n } else {\n\n end = *i;\n\n break;\n\n }\n\n }\n\n let value = input[start..end].parse().unwrap();\n\n break Some(Token::Number(value));\n\n }\n\n (_, '+') => break Some(Token::Operator(Operator::Add)),\n\n (_, '*') => break Some(Token::Operator(Operator::Multiply)),\n\n (_, '(') => break Some(Token::LeftParenthesis),\n\n (_, ')') => break Some(Token::RightParenthesis),\n\n (_, ' ') => {}\n\n (_, c) => panic!(\"Unexpected value: {}\", c),\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/day18/mod.rs", "rank": 73, "score": 44428.53754904721 }, { "content": "fn count_trees(forest: &str, right: usize, down: usize) -> usize {\n\n walk_forest(forest, right, down)\n\n .filter(|b| *b == b'#')\n\n .count()\n\n}\n\n\n", "file_path": "src/day03/mod.rs", "rank": 74, "score": 43700.35862811713 }, { "content": "fn parse_cups(input: &str) -> impl Iterator<Item = Cup> + '_ {\n\n input.chars().map(|c| (c.to_digit(10).unwrap() - 1) as Cup)\n\n}\n\n\n", "file_path": "src/day23/mod.rs", "rank": 75, "score": 43700.35862811713 }, { "content": "fn instructions(input: &str) -> impl Iterator<Item = (char, i32)> + '_ {\n\n input\n\n .lines()\n\n .map(|line| (line.chars().next().unwrap(), line[1..].parse().unwrap()))\n\n}\n\n\n", "file_path": "src/day12/mod.rs", "rank": 76, "score": 42898.7523479097 }, { "content": "fn parse_line(mut input: &str) -> impl Iterator<Item = Direction> + '_ {\n\n std::iter::from_fn(move || {\n\n if let Some(remaining_input) = input.strip_prefix(\"e\") {\n\n input = remaining_input;\n\n Some(Direction::East)\n\n } else if let Some(remaining_input) = input.strip_prefix(\"w\") {\n\n input = remaining_input;\n\n Some(Direction::West)\n\n } else if let Some(remaining_input) = input.strip_prefix(\"se\") {\n\n input = remaining_input;\n\n Some(Direction::SouthEast)\n\n } else if let Some(remaining_input) = input.strip_prefix(\"sw\") {\n\n input = remaining_input;\n\n Some(Direction::SouthWest)\n\n } else if let Some(remaining_input) = input.strip_prefix(\"ne\") {\n\n input = remaining_input;\n\n Some(Direction::NorthEast)\n\n } else if let Some(remaining_input) = input.strip_prefix(\"nw\") {\n\n input = remaining_input;\n\n Some(Direction::NorthWest)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/day24/mod.rs", "rank": 77, "score": 42212.59406279109 }, { "content": "fn parse_input(input: &str) -> impl Iterator<Item = (Int, Int)> + '_ {\n\n input.lines().enumerate().flat_map(|(y, line)| {\n\n line.chars()\n\n .enumerate()\n\n .filter(|(_, c)| *c == '#')\n\n .map(move |(x, _)| (x as Int, y as Int))\n\n })\n\n}\n\n\n", "file_path": "src/day17/mod.rs", "rank": 78, "score": 42212.59406279109 }, { "content": "fn parse_tiles(input: &str) -> impl Iterator<Item = (TileId, Tile)> + '_ {\n\n input.split(\"\\n\\n\").map(|chunk| {\n\n let mut lines = chunk.lines();\n\n\n\n let first_line = lines.next().unwrap();\n\n let id = first_line\n\n .trim_start_matches(\"Tile \")\n\n .trim_end_matches(':')\n\n .parse()\n\n .unwrap();\n\n\n\n let mut data = [[false; TILE_SIZE]; TILE_SIZE];\n\n for (y, line) in lines.enumerate() {\n\n let y_data = &mut data[y];\n\n for (x, c) in line.chars().enumerate() {\n\n y_data[x] = match c {\n\n '#' => true,\n\n '.' => false,\n\n _ => panic!(\"Invalid char: {}\", c),\n\n };\n\n }\n\n }\n\n\n\n (id, data)\n\n })\n\n}\n\n\n", "file_path": "src/day20/mod.rs", "rank": 79, "score": 41564.92112411552 }, { "content": "fn evaluate(input: &str, operator_precedence: impl Fn(Operator) -> usize) -> Number {\n\n let mut stack = Vec::new();\n\n\n\n for rpn_token in rpn_tokens(tokens(input), operator_precedence) {\n\n match rpn_token {\n\n Token::Number(value) => {\n\n stack.push(value);\n\n }\n\n Token::Operator(operator) => {\n\n let lhs = stack.pop().unwrap();\n\n let rhs = stack.pop().unwrap();\n\n stack.push(match operator {\n\n Operator::Add => lhs + rhs,\n\n Operator::Multiply => lhs * rhs,\n\n });\n\n }\n\n _ => panic!(\"Unexpected token: {:?}\", rpn_token),\n\n }\n\n }\n\n\n\n stack.pop().unwrap()\n\n}\n\n\n", "file_path": "src/day18/mod.rs", "rank": 80, "score": 41564.92112411552 }, { "content": "fn parse_entries<'a>(input: &'a str) -> impl Iterator<Item = Entry<'a>> + 'a {\n\n lazy_static! {\n\n static ref RE: Regex =\n\n Regex::new(r\"(?P<a>\\d+)-(?P<b>\\d+) (?P<letter>\\w): (?P<password>\\w+)\").unwrap();\n\n }\n\n\n\n RE.captures_iter(input).map(|c| Entry {\n\n a: c.name(\"a\").unwrap().as_str().parse().unwrap(),\n\n b: c.name(\"b\").unwrap().as_str().parse().unwrap(),\n\n letter: c.name(\"letter\").unwrap().as_str().chars().next().unwrap(),\n\n password: c.name(\"password\").unwrap().as_str(),\n\n })\n\n}\n\n\n", "file_path": "src/day02/mod.rs", "rank": 81, "score": 41474.674175221524 }, { "content": "fn create_validator<'a>(rules: &RuleMap<'a>) -> impl Fn(&str) -> bool + 'a {\n\n use pcre2::bytes::RegexBuilder;\n\n\n\n let pattern = create_validator_pattern(rules).unwrap();\n\n\n\n let regex = RegexBuilder::new()\n\n .jit_if_available(true)\n\n .extended(true)\n\n .build(&pattern)\n\n .unwrap();\n\n\n\n move |s| regex.is_match(s.as_bytes()).unwrap()\n\n}\n\n\n", "file_path": "src/day19/mod.rs", "rank": 82, "score": 41473.00922651777 }, { "content": "fn walk_forest(forest: &str, right: usize, down: usize) -> impl Iterator<Item = u8> + '_ {\n\n forest\n\n .lines()\n\n .step_by(down)\n\n .zip((0..).step_by(right))\n\n .map(|(line, i)| line.as_bytes()[i % line.len()])\n\n}\n\n\n", "file_path": "src/day03/mod.rs", "rank": 83, "score": 38893.16939481183 }, { "content": "type RemainingTiles<'a> = HashMap<TileId, &'a Tile>;\n", "file_path": "src/day20/mod.rs", "rank": 86, "score": 25411.00184775837 }, { "content": "type TileArrangement<'a> = HashMap<Position, (TileId, TileView<'a>)>;\n\n\n", "file_path": "src/day20/mod.rs", "rank": 87, "score": 24298.89021780306 }, { "content": " if rule_name.starts_with(\"departure\") {\n\n Some(value)\n\n } else {\n\n None\n\n }\n\n })\n\n .product()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT_0: &str = include_str!(\"test_input_0.txt\");\n\n static TEST_INPUT_1: &str = include_str!(\"test_input_1.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT_0), 71);\n", "file_path": "src/day16/mod.rs", "rank": 92, "score": 4.621366527175105 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT, 5), 127);\n\n assert_eq!(part1(INPUT, 25), 25_918_798);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT, 5), 62);\n\n assert_eq!(part2(INPUT, 25), 3_340_942);\n\n }\n\n}\n", "file_path": "src/day09/mod.rs", "rank": 93, "score": 4.298807665217569 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static TEST_INPUT_2: &str = include_str!(\"test_input_2.txt\");\n\n static TEST_INPUT_3: &str = include_str!(\"test_input_3.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 2);\n\n assert_eq!(part1(INPUT), 192);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT_2), 0);\n\n assert_eq!(part2(TEST_INPUT_3), 4);\n\n assert_eq!(part2(INPUT), 101);\n\n }\n\n}\n", "file_path": "src/day04/mod.rs", "rank": 94, "score": 4.245363663498584 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 4);\n\n assert_eq!(part1(INPUT), 211);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 32);\n\n assert_eq!(part2(INPUT), 12_414);\n\n }\n\n}\n", "file_path": "src/day07/mod.rs", "rank": 95, "score": 4.222660820611114 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 37);\n\n assert_eq!(part1(INPUT), 2368);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 26);\n\n assert_eq!(part2(INPUT), 2124);\n\n }\n\n}\n", "file_path": "src/day11/mod.rs", "rank": 96, "score": 4.222660820611114 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 11);\n\n assert_eq!(part1(INPUT), 6565);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 6);\n\n assert_eq!(part2(INPUT), 3137);\n\n }\n\n}\n", "file_path": "src/day06/mod.rs", "rank": 97, "score": 4.222660820611114 }, { "content": " }\n\n\n\n active_cubes.len()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 112);\n\n assert_eq!(part1(INPUT), 240);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 848);\n\n assert_eq!(part2(INPUT), 1180);\n\n }\n\n}\n", "file_path": "src/day17/mod.rs", "rank": 98, "score": 4.113872468353776 }, { "content": " _ => {}\n\n }\n\n }\n\n\n\n let Vec2([x, y]) = position;\n\n x.abs() + y.abs()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static TEST_INPUT: &str = include_str!(\"test_input.txt\");\n\n static INPUT: &str = include_str!(\"input.txt\");\n\n\n\n #[test]\n\n fn part1_works() {\n\n assert_eq!(part1(TEST_INPUT), 25);\n\n assert_eq!(part1(INPUT), 1106);\n\n }\n\n\n\n #[test]\n\n fn part2_works() {\n\n assert_eq!(part2(TEST_INPUT), 286);\n\n assert_eq!(part2(INPUT), 107_281);\n\n }\n\n}\n", "file_path": "src/day12/mod.rs", "rank": 99, "score": 4.078968581166357 } ]
Rust
async-coap/src/option/iter.rs
Luro02/rust-async-coap
6a7b592a23de0c9d86ca399bf40ecfbf0bff6e62
use super::*; use std::convert::Into; #[derive(Debug, Clone)] pub struct OptionIterator<'a> { iter: core::slice::Iter<'a, u8>, last_option: OptionNumber, } impl<'a> Default for OptionIterator<'a> { fn default() -> Self { OptionIterator::new(&[]) } } impl<'a> OptionIterator<'a> { pub fn new(buffer: &'a [u8]) -> OptionIterator<'a> { OptionIterator { iter: buffer.iter(), last_option: Default::default(), } } pub fn as_slice(&self) -> &'a [u8] { self.iter.as_slice() } pub fn peek(&mut self) -> Option<Result<(OptionNumber, &'a [u8]), Error>> { decode_option(&mut self.iter.clone(), self.last_option).transpose() } pub fn peek_eq<T>(&mut self, key: OptionKey<T>, value: T) -> bool where T: Into<OptionValue<'a>>, { let mut temp_array = [0; 8]; match decode_option(&mut self.iter.clone(), self.last_option) { Ok(Some((number, iter_value))) => { number == key.0 && (match value.into() { OptionValue::Integer(x) => encode_u32(x, &mut temp_array), OptionValue::Bytes(x) => x, OptionValue::ETag(x) => { let temp_slice = &mut temp_array[0..x.len()]; temp_slice.copy_from_slice(x.as_bytes()); temp_slice } } == iter_value) } _ => false, } } } impl<'a> Iterator for OptionIterator<'a> { type Item = Result<(OptionNumber, &'a [u8]), Error>; fn next(&mut self) -> Option<Self::Item> { let ret = decode_option(&mut self.iter, self.last_option).transpose(); if let Some(Ok((key, _))) = ret { self.last_option = key; } ret } } impl AsRef<[u8]> for OptionIterator<'_> { fn as_ref(&self) -> &[u8] { self.as_slice() } } pub trait OptionIteratorExt<'a>: Iterator<Item = Result<(OptionNumber, &'a [u8]), Error>> { fn find_next(&mut self, key: OptionNumber) -> Option<Result<(OptionNumber, &'a [u8]), Error>>; fn find_next_of<T>(&mut self, key: OptionKey<T>) -> Option<Result<T, Error>> where T: TryOptionValueFrom<'a> + Sized, { if let Some(result) = self.find_next(key.0) { match result { Ok((_, value)) => { if let Some(x) = T::try_option_value_from(value) { return Some(Ok(x)); } else { return Some(Err(Error::ParseFailure)); } } Err(e) => return Some(Err(e)), } } None } fn extract_uri(&self) -> Result<RelRefBuf, Error> where Self: Sized + Clone, { let mut copy = self.clone(); let mut buf = String::new(); while let Some(seg) = copy.find_next_of(option::URI_PATH).transpose()? { if !buf.is_empty() { buf.push('/'); } buf.extend(seg.escape_uri()); } let mut has_query = false; while let Some(item) = copy.find_next_of(option::URI_QUERY).transpose()? { if has_query { buf.push('&'); } else { buf.push('?'); has_query = true; } buf.extend(item.escape_uri().for_query()); } let mut ret = RelRefBuf::from_string(buf).expect("Constructed URI was malformed"); ret.disambiguate(); Ok(ret) } fn extract_location(&self) -> Result<RelRefBuf, Error> where Self: Sized + Clone, { let mut copy = self.clone(); let mut buf = String::new(); while let Some(seg) = copy.find_next_of(option::LOCATION_PATH).transpose()? { if !buf.is_empty() { buf.push('/'); } buf.extend(seg.escape_uri()); } let mut has_query = false; while let Some(item) = copy.find_next_of(option::LOCATION_QUERY).transpose()? { if has_query { buf.push('&'); } else { buf.push('?'); has_query = true; } buf.extend(item.escape_uri().for_query()); } Ok(RelRefBuf::from_string(buf).expect("Constructed URI was malformed")) } } impl<'a, I> OptionIteratorExt<'a> for I where I: Iterator<Item = Result<(OptionNumber, &'a [u8]), Error>> + Sized + Clone, { fn find_next(&mut self, key: OptionNumber) -> Option<Result<(OptionNumber, &'a [u8]), Error>> { let next_value = loop { let mut iter = self.clone(); match iter.next()? { Err(x) => return Some(Err(x)), Ok((number, value)) => { if number == key { *self = iter; break (key, value); } if number < key.0 { *self = iter; continue; } } }; return None; }; Some(Ok(next_value)) } }
use super::*; use std::convert::Into; #[derive(Debug, Clone)] pub struct OptionIterator<'a> { iter: core::slice::Iter<'a, u8>, last_option: OptionNumber, } impl<'a> Default for OptionIterator<'a> { fn default() -> Self { OptionIterator::new(&[]) } } impl<'a> OptionIterator<'a> { pub fn new(buffer: &'a [u8]) -> OptionIterator<'a> { OptionIterator { iter: buffer.iter(), last_option: Default::default(), } } pub fn as_slice(&self) -> &'a [u8] { self.iter.as_slice() } pub fn peek(&mut self) -> Option<Result<(OptionNumber, &'a [u8]), Error>> { decode_option(&mut self.iter.clone(), self.last_option).transpose() } pub fn peek_eq<T>(&mut self, key: OptionKey<T>, value: T) -> bool where T: Into<OptionValue<'a>>, { let mut temp_array = [0; 8]; match decode_option(&mut self.iter.clone(), self.last_option) { Ok(Some((number, iter_value))) => { number == key.0 && (match value.into() { OptionValue::Integer(x) => encode_u32(x, &mut temp_array), OptionValue::Bytes(x) => x, OptionValue::ETag(x) => { let temp_slice = &mut temp_array[0..x.len()]; temp_slice.copy_from_slice(x.as_bytes()); temp_slice } } == iter_value) } _ => false, } } } impl<'a> Iterator for OptionIterator<'a> { type Item = Result<(OptionNumber, &'a [u8]), Error>; fn next(&mut self)
et { self.last_option = key; } ret } } impl AsRef<[u8]> for OptionIterator<'_> { fn as_ref(&self) -> &[u8] { self.as_slice() } } pub trait OptionIteratorExt<'a>: Iterator<Item = Result<(OptionNumber, &'a [u8]), Error>> { fn find_next(&mut self, key: OptionNumber) -> Option<Result<(OptionNumber, &'a [u8]), Error>>; fn find_next_of<T>(&mut self, key: OptionKey<T>) -> Option<Result<T, Error>> where T: TryOptionValueFrom<'a> + Sized, { if let Some(result) = self.find_next(key.0) { match result { Ok((_, value)) => { if let Some(x) = T::try_option_value_from(value) { return Some(Ok(x)); } else { return Some(Err(Error::ParseFailure)); } } Err(e) => return Some(Err(e)), } } None } fn extract_uri(&self) -> Result<RelRefBuf, Error> where Self: Sized + Clone, { let mut copy = self.clone(); let mut buf = String::new(); while let Some(seg) = copy.find_next_of(option::URI_PATH).transpose()? { if !buf.is_empty() { buf.push('/'); } buf.extend(seg.escape_uri()); } let mut has_query = false; while let Some(item) = copy.find_next_of(option::URI_QUERY).transpose()? { if has_query { buf.push('&'); } else { buf.push('?'); has_query = true; } buf.extend(item.escape_uri().for_query()); } let mut ret = RelRefBuf::from_string(buf).expect("Constructed URI was malformed"); ret.disambiguate(); Ok(ret) } fn extract_location(&self) -> Result<RelRefBuf, Error> where Self: Sized + Clone, { let mut copy = self.clone(); let mut buf = String::new(); while let Some(seg) = copy.find_next_of(option::LOCATION_PATH).transpose()? { if !buf.is_empty() { buf.push('/'); } buf.extend(seg.escape_uri()); } let mut has_query = false; while let Some(item) = copy.find_next_of(option::LOCATION_QUERY).transpose()? { if has_query { buf.push('&'); } else { buf.push('?'); has_query = true; } buf.extend(item.escape_uri().for_query()); } Ok(RelRefBuf::from_string(buf).expect("Constructed URI was malformed")) } } impl<'a, I> OptionIteratorExt<'a> for I where I: Iterator<Item = Result<(OptionNumber, &'a [u8]), Error>> + Sized + Clone, { fn find_next(&mut self, key: OptionNumber) -> Option<Result<(OptionNumber, &'a [u8]), Error>> { let next_value = loop { let mut iter = self.clone(); match iter.next()? { Err(x) => return Some(Err(x)), Ok((number, value)) => { if number == key { *self = iter; break (key, value); } if number < key.0 { *self = iter; continue; } } }; return None; }; Some(Ok(next_value)) } }
-> Option<Self::Item> { let ret = decode_option(&mut self.iter, self.last_option).transpose(); if let Some(Ok((key, _))) = r
function_block-random_span
[ { "content": "/// Encodes an unsigned 32-bit number into the given buffer, returning\n\n/// the resized buffer. The returned buffer may be smaller than the\n\n/// `dst`, and may even be empty. The returned buffer is only as large\n\n/// as it needs to be to represent the given value.\n\npub fn encode_u32(value: u32, dst: &mut [u8]) -> &mut [u8] {\n\n if value == 0 {\n\n &mut []\n\n } else if value <= 0xFF {\n\n dst[0] = value as u8;\n\n &mut dst[..1]\n\n } else if value <= 0xFFFF {\n\n dst[0] = (value >> 8) as u8;\n\n dst[1] = value as u8;\n\n &mut dst[..2]\n\n } else if value <= 0xFFFFFF {\n\n dst[0] = (value >> 16) as u8;\n\n dst[1] = (value >> 8) as u8;\n\n dst[2] = value as u8;\n\n &mut dst[..3]\n\n } else {\n\n dst[0] = (value >> 24) as u8;\n\n dst[1] = (value >> 16) as u8;\n\n dst[2] = (value >> 8) as u8;\n\n dst[3] = value as u8;\n\n &mut dst[..4]\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/util.rs", "rank": 1, "score": 250734.775099726 }, { "content": "/// Calculates the encoded size of a CoAP option.\n\npub fn calc_option_size(prev_key: OptionNumber, key: OptionNumber, mut value_len: usize) -> usize {\n\n if value_len >= 269 {\n\n value_len += 2;\n\n } else if value_len >= 13 {\n\n value_len += 1;\n\n }\n\n\n\n let option_delta = (key - prev_key) as u16;\n\n\n\n if option_delta >= 269 {\n\n value_len += 3;\n\n } else if option_delta >= 13 {\n\n value_len += 2;\n\n } else {\n\n value_len += 1;\n\n }\n\n\n\n return value_len;\n\n}\n\n\n", "file_path": "async-coap/src/message/codec.rs", "rank": 2, "score": 239012.18078503205 }, { "content": "/// Encodes all parts of an option into the given buffer *except* the value. All other parts,\n\n/// including the value length, are encoded. This is typically used directly when inserting\n\n/// options, otherwise `encode_option()` (which writes the value) is typically a better fit.\n\npub fn encode_option_without_value(\n\n buffer: &mut [u8],\n\n prev_key: OptionNumber,\n\n key: OptionNumber,\n\n value_len: usize,\n\n) -> Result<usize, Error> {\n\n if prev_key > key {\n\n return Err(Error::InvalidArgument);\n\n }\n\n\n\n let calc_len = calc_option_size(prev_key, key, value_len);\n\n if calc_len > buffer.len() {\n\n eprintln!(\"calc_len:{}, blen:{}\", calc_len, buffer.len());\n\n return Err(Error::OutOfSpace);\n\n }\n\n\n\n if value_len > MAX_OPTION_VALUE_SIZE {\n\n eprintln!(\"value_len:{}, max:{}\", value_len, MAX_OPTION_VALUE_SIZE);\n\n return Err(Error::InvalidArgument);\n\n }\n", "file_path": "async-coap/src/message/codec.rs", "rank": 3, "score": 149237.36571898358 }, { "content": "/// Attempts to decode the given little-endian-encoded integer to a `u32`.\n\n/// Input may be up to four bytes long. If the input is larger than four\n\n/// bytes long, returns `None`.\n\npub fn try_decode_u32(src: &[u8]) -> Option<u32> {\n\n match src.len() {\n\n 0 => Some(0u32),\n\n 1 => Some(src[0] as u32),\n\n 2 => Some(((src[0] as u32) << 8) + src[1] as u32),\n\n 3 => Some(((src[0] as u32) << 16) + ((src[1] as u32) << 8) + src[2] as u32),\n\n 4 => Some(\n\n ((src[0] as u32) << 24)\n\n + ((src[1] as u32) << 16)\n\n + ((src[2] as u32) << 8)\n\n + src[3] as u32,\n\n ),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/util.rs", "rank": 4, "score": 147040.2168301141 }, { "content": "/// Attempts to decode the given little-endian-encoded integer to a `u16`.\n\n/// Input may be up to two bytes long. If the input is larger than two\n\n/// bytes long, returns `None`.\n\npub fn try_decode_u16(src: &[u8]) -> Option<u16> {\n\n match src.len() {\n\n 0 => Some(0u16),\n\n 1 => Some(src[0] as u16),\n\n 2 => Some(((src[0] as u16) << 8) + src[1] as u16),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::util::*;\n\n\n\n #[test]\n\n fn encode_decode_u32() {\n\n for i in vec![\n\n 0x00, 0x01, 0x0FF, 0x100, 0x0FFFF, 0x10000, 0x0FFFFFF, 0x1000000, 0xFFFFFFFF,\n\n ] {\n\n assert_eq!(try_decode_u32(encode_u32(i, &mut [0; 4])).unwrap(), i);\n\n }\n", "file_path": "async-coap/src/util.rs", "rank": 5, "score": 147040.2168301141 }, { "content": "/// Encodes an option into the given buffer, including the value.\n\npub fn encode_option(\n\n buffer: &mut [u8],\n\n prev_key: OptionNumber,\n\n key: OptionNumber,\n\n value: &[u8],\n\n) -> Result<usize, Error> {\n\n let option_len = encode_option_without_value(buffer, prev_key, key, value.len())?;\n\n\n\n // The value bytes are always at the end.\n\n buffer[option_len - value.len()..option_len].copy_from_slice(value);\n\n\n\n return Ok(option_len);\n\n}\n\n\n", "file_path": "async-coap/src/message/codec.rs", "rank": 6, "score": 116641.688511405 }, { "content": "/// Inserts an option into an option list. Very slow unless called sequentially.\n\npub fn insert_option(\n\n buffer: &mut [u8],\n\n mut len: usize,\n\n last_option: OptionNumber,\n\n key: OptionNumber,\n\n value: &[u8],\n\n) -> Result<(usize, OptionNumber), Error> {\n\n if value.len() > MAX_OPTION_VALUE_SIZE {\n\n return Err(Error::InvalidArgument);\n\n }\n\n\n\n if key >= last_option {\n\n // This is the easy case: A simple append is adequate.\n\n len += encode_option(&mut buffer[len..], last_option, key, value)?;\n\n return Ok((len, key));\n\n }\n\n\n\n // What follows will only happen if this method is called with a property key\n\n // out-of-order. Hopefully this should only happen rarely, as there is a\n\n // significant performance penalty for doing so. This approach does have a\n", "file_path": "async-coap/src/message/codec.rs", "rank": 7, "score": 116636.75861572474 }, { "content": "/// Decodes one option from a `core::slice::Iter`, which can be obtained from a byte slice.\n\n/// The iterator is then advanced to the next option.\n\n///\n\n/// Will return `Ok(None)` if it either encounters the end-of-options marker (0xFF) or if the\n\n/// given iterator has been fully consumed.\n\npub fn decode_option<'a>(\n\n iter: &mut core::slice::Iter<'a, u8>,\n\n last_option: OptionNumber,\n\n) -> Result<Option<(OptionNumber, &'a [u8])>, Error> {\n\n // TODO(#5): Improve performance.\n\n macro_rules! try_next {\n\n ($iter:expr, $none:expr) => {\n\n match ($iter).next() {\n\n Some(x) => *x,\n\n None => return $none,\n\n }\n\n };\n\n }\n\n\n\n let header: u8 = try_next!(iter, Ok(None));\n\n\n\n if header == 0xFF {\n\n // End of options marker.\n\n return Ok(None);\n\n }\n", "file_path": "async-coap/src/message/codec.rs", "rank": 8, "score": 112609.73987331681 }, { "content": "#[doc(hidden)]\n\npub trait NeedsEscape: Clone {\n\n fn byte_needs_escape(b: u8) -> bool {\n\n Self::char_needs_escape(b as char) || (b & 0x80) != 0\n\n }\n\n fn char_needs_escape(c: char) -> bool;\n\n fn escape_space_as_plus() -> bool {\n\n false\n\n }\n\n}\n\n\n\n/// A zero-sized implementor of [`NeedsEscape`] that escapes all reserved characters.\n\n///\n\n/// Its behavior is subject to change and is not considered stable.\n\n#[doc(hidden)]\n\n#[derive(Default, Copy, Clone, Debug)]\n\npub struct EscapeUriFull;\n\nimpl NeedsEscape for EscapeUriFull {\n\n fn char_needs_escape(c: char) -> bool {\n\n !is_char_uri_unreserved(c)\n\n }\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 9, "score": 109780.39995556342 }, { "content": "fn is_char_uri_unreserved(c: char) -> bool {\n\n c.is_ascii_alphanumeric() || c == '-' || c == '.' || c == '_' || c == '~'\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 10, "score": 103961.49248473946 }, { "content": "fn is_char_uri_quote(c: char) -> bool {\n\n c != '+' && (is_char_uri_pchar(c) || c == '/' || c == '?')\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 11, "score": 103961.49248473946 }, { "content": "fn is_char_uri_fragment(c: char) -> bool {\n\n is_char_uri_pchar(c) || c == '/' || c == '?' || c == '#'\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone)]\n\npub(super) enum EscapeUriState {\n\n Normal,\n\n OutputHighNibble(u8),\n\n OutputLowNibble(u8),\n\n}\n\n\n\n/// An internal, unstable trait that is used to adjust the behavior of [`EscapeUri`].\n\n///\n\n/// It is subject to change and is not considered stable.\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 12, "score": 103961.49248473946 }, { "content": "fn is_char_uri_pchar(c: char) -> bool {\n\n is_char_uri_unreserved(c) || is_char_uri_sub_delim(c) || c == ':' || c == '@'\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 13, "score": 103961.49248473946 }, { "content": "fn is_char_uri_sub_delim(c: char) -> bool {\n\n c == '!'\n\n || c == '$'\n\n || c == '&'\n\n || c == '\\''\n\n || c == '('\n\n || c == ')'\n\n || c == '*'\n\n || c == '+'\n\n || c == ','\n\n || c == ';'\n\n || c == '='\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 14, "score": 102268.87623667641 }, { "content": "#[doc(hidden)]\n\npub trait TryOptionValueFrom<'a>: Sized {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self>;\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for ETag {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n if buffer.len() <= ETag::MAX_LEN {\n\n Some(ETag::new(buffer))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for &'a [u8] {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n Some(buffer)\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/option/value.rs", "rank": 15, "score": 100765.63464747448 }, { "content": "fn assert_uri_str(uri_str: &str) -> Result<(), Error> {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(uri_str)\n\n .ok_or(Error::MalformedStructure)?;\n\n\n\n let has_scheme = captures.get(2).is_some();\n\n let has_authority = captures.get(4).is_some();\n\n\n\n if !has_scheme && !has_authority {\n\n return Err(Error::MalformedStructure);\n\n }\n\n\n\n if let Some(scheme) = captures.get(2) {\n\n // Do an additional syntax check on the scheme to make sure it is valid.\n\n URI_CHECK_SCHEME\n\n .captures(scheme.as_str())\n\n .ok_or(Error::MalformedScheme)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 16, "score": 96164.67103462448 }, { "content": "#[doc(hidden)]\n\npub trait TransParams: Default + Copy + Sync + Send + Unpin {\n\n fn max_outbound_packet_length(&self) -> usize {\n\n Self::MAX_OUTBOUND_PACKET_LENGTH\n\n }\n\n\n\n fn coap_max_retransmit(&self) -> u32 {\n\n Self::COAP_MAX_RETRANSMIT\n\n }\n\n\n\n fn coap_ack_timeout(&self) -> Duration {\n\n Self::COAP_ACK_TIMEOUT\n\n }\n\n\n\n fn coap_ack_random_factor(&self) -> f32 {\n\n Self::COAP_ACK_RANDOM_FACTOR\n\n }\n\n\n\n fn coap_nstart(&self) -> u32 {\n\n Self::COAP_NSTART\n\n }\n", "file_path": "async-coap/src/trans_params.rs", "rank": 17, "score": 95175.89743614166 }, { "content": "fn assert_uri_ref_str(uri_str: &str) -> Result<(), Error> {\n\n // Not sure what additional checks to do in this case.\n\n RFC3986_APPENDIX_B\n\n .captures(uri_str)\n\n .ok_or(Error::MalformedStructure)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 18, "score": 94698.1294644592 }, { "content": "fn assert_rel_ref_str(uri_str: &str) -> Result<(), Error> {\n\n // We should not be able to parse as a URI.\n\n assert_uri_str(uri_str)\n\n .err()\n\n .map(|_| ())\n\n .ok_or(Error::Degenerate)?;\n\n\n\n // We should be able to parse as a URI-Reference\n\n assert_uri_ref_str(uri_str)\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 19, "score": 94698.1294644592 }, { "content": "#[proc_macro_hack]\n\npub fn assert_uri_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_uri_str(&uri_str) {\n\n panic!(\"Malformed uri literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 20, "score": 92600.42945060303 }, { "content": "#[proc_macro_hack]\n\npub fn assert_rel_ref_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_rel_ref_str(&uri_str) {\n\n panic!(\"Malformed rel_ref literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 21, "score": 91201.52790377951 }, { "content": "#[proc_macro_hack]\n\npub fn assert_uri_ref_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_uri_ref_str(&uri_str) {\n\n panic!(\"Malformed uri_ref literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn check_uri_str(uri_str: &str) -> Result<(), Error> {\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 22, "score": 91201.52790377951 }, { "content": "/// Trait implemented by a \"socket\" that describes the underlying `SocketAddr` and socket error\n\n/// types as associated types.\n\npub trait DatagramSocketTypes: Unpin {\n\n /// The \"`SocketAddr`\" type used by this \"socket\". Typically [`std::net::SocketAddr`].\n\n type SocketAddr: SocketAddrExt\n\n + core::fmt::Display\n\n + core::fmt::Debug\n\n + std::string::ToString\n\n + ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>\n\n + Send\n\n + Unpin\n\n + Copy;\n\n\n\n /// The error type for errors emitted from this socket. Typically [`std::io::Error`].\n\n type Error: std::fmt::Display + std::fmt::Debug;\n\n\n\n /// Returns the local `SocketAddr` of this \"socket\".\n\n fn local_addr(&self) -> Result<Self::SocketAddr, Self::Error>;\n\n\n\n /// Performs a blocking hostname lookup.\n\n fn lookup_host(\n\n host: &str,\n\n port: u16,\n\n ) -> Result<std::vec::IntoIter<Self::SocketAddr>, Self::Error>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 23, "score": 89146.34077948463 }, { "content": "/// Trait for providing `recv_from` functionality for asynchronous, datagram-based sockets.\n\n///\n\n/// The value returned on success is a tuple of the following:\n\n///\n\n/// ```\n\n/// # use std::net::SocketAddr;\n\n/// # fn ignore_this_line\n\n/// #\n\n/// (bytes_written: usize,\n\n/// remote_socket_addr: SocketAddr,\n\n/// local_socket_addr: Option<SocketAddr>)\n\n/// #\n\n/// # {} // ignore this line\n\n/// ```\n\n///\n\n/// `local_socket_addr` indicates the local address that the packet was sent to, and may not be\n\n/// supported. If this isn't supported, `local_socket_addr` will be set to `None`.\n\npub trait AsyncRecvFrom: DatagramSocketTypes {\n\n /// A non-blocking[^1], `poll_*` version of [`std::net::UdpSocket::recv_from`] that can\n\n /// optionally provide the destination (local) `SocketAddr`.\n\n ///\n\n /// If you need to receive a packet from within an async block, see\n\n /// [`AsyncRecvFrom::recv_from`], which returns a [`Future`][std::future::Future].\n\n ///\n\n /// [^1]: Note that while the spirit of this method intends for it to be non-blocking,\n\n /// [`AllowStdUdpSocket`] can in fact block execution depending on the state of the\n\n /// underlying [`std::net::UdpSocket`].\n\n fn poll_recv_from(\n\n self: Pin<&Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<Result<(usize, Self::SocketAddr, Option<Self::SocketAddr>), Self::Error>>;\n\n\n\n /// Returns a future that uses [`poll_recv_from`][AsyncRecvFrom::poll_recv_from].\n\n fn recv_from<'a, 'b>(&'a self, buf: &'b mut [u8]) -> RecvFromFuture<'a, 'b, Self> {\n\n RecvFromFuture {\n\n socket: self,\n\n buffer: buf,\n\n }\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 24, "score": 87368.50072661739 }, { "content": "/// Trait that provides methods for joining/leaving multicast groups.\n\npub trait MulticastSocket: DatagramSocketTypes {\n\n /// The \"address\" type for this socket.\n\n ///\n\n /// Note that this is different than a `SocketAddr`, which also includes a port number.\n\n /// This is just the address.\n\n type IpAddr;\n\n\n\n /// Attempts to join the given multicast group.\n\n fn join_multicast<A>(&self, addr: A) -> Result<(), Self::Error>\n\n where\n\n A: std::convert::Into<Self::IpAddr>;\n\n\n\n /// Attempts to leave the given multicast group.\n\n fn leave_multicast<A>(&self, addr: A) -> Result<(), Self::Error>\n\n where\n\n A: std::convert::Into<Self::IpAddr>;\n\n}\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 25, "score": 87358.69592263433 }, { "content": "/// Trait for providing `sent_to` functionality for asynchronous, datagram-based sockets.\n\npub trait AsyncSendTo: DatagramSocketTypes {\n\n /// A non-blocking[^1], `poll_*` version of `std::net::UdpSocket::send_to`.\n\n ///\n\n /// [^1]: Note that while the spirit of this method intends for it to be non-blocking,\n\n /// [`AllowStdUdpSocket`] can block execution depending on the implementation details\n\n /// of the underlying [`std::net::UdpSocket`].\n\n fn poll_send_to<B>(\n\n self: Pin<&Self>,\n\n cx: &mut Context<'_>,\n\n buf: &[u8],\n\n addr: B,\n\n ) -> Poll<Result<usize, Self::Error>>\n\n where\n\n B: super::ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>;\n\n\n\n /// Returns a future that uses [`AsyncSendTo::poll_send_to`].\n\n fn send_to<'a, 'b, B>(&'a self, buf: &'b [u8], addr: B) -> SendToFuture<'a, 'b, Self>\n\n where\n\n B: super::ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>,\n\n {\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 26, "score": 87358.69592263433 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq)]\n\nenum Error {\n\n #[allow(unused)]\n\n EncodingError,\n\n MalformedStructure,\n\n MalformedScheme,\n\n Degenerate,\n\n}\n\n\n\nimpl std::fmt::Debug for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n match self {\n\n Error::EncodingError => f.write_str(\"Encoding Error\"),\n\n Error::MalformedStructure => f.write_str(\"The structure of the URI is not recognized.\"),\n\n Error::MalformedScheme => f.write_str(\"The scheme of the URI is malformed.\"),\n\n Error::Degenerate => {\n\n f.write_str(\"This relative reference could be confused with a URI.\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 27, "score": 64173.41317947793 }, { "content": "#[test]\n\nfn uri_regex() {\n\n {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(\"http://www.ics.uci.edu/pub/ietf/uri/#Related\")\n\n .expect(\"Should have matched regex\");\n\n assert_eq!(\"http:\", &captures[1]);\n\n assert_eq!(\"http\", &captures[2]);\n\n assert_eq!(\"//www.ics.uci.edu\", &captures[3]);\n\n assert_eq!(\"www.ics.uci.edu\", &captures[4]);\n\n assert_eq!(\"/pub/ietf/uri/\", &captures[5]);\n\n assert_eq!(None, captures.get(6));\n\n assert_eq!(None, captures.get(7));\n\n assert_eq!(\"#Related\", &captures[8]);\n\n assert_eq!(\"Related\", &captures[9]);\n\n }\n\n {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(\"coap+sms://username:[email protected]:1234?query&d=3#frag\")\n\n .expect(\"Should have matched regex\");\n\n assert_eq!(\"coap+sms:\", &captures[1]);\n", "file_path": "async-coap-uri/src/test.rs", "rank": 28, "score": 61766.357465573245 }, { "content": "#[test]\n\nfn test_uri() {\n\n let _ = uri!(\"https://www.example.com/\");\n\n}\n\n\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 29, "score": 61766.357465573245 }, { "content": "/// Helper function for implementing option insertion.\n\n/// Return value is a tuple of several fields:\n\n///\n\n/// * `split_index` (`usize`) The index where the new option should be inserted.\n\n/// * `prev_option_key` (`OptionNumber`) The option number of the option immediately before the split.\n\n/// * `next_key` (`OptionNumber`) The option number of the option immediately after the split.\n\n/// * `next_value_len` (`usize`) The length of the value of the option immediately after the split.\n\n/// * `next_option_size` (`usize`) The length of the entire option immediately after the split.\n\n///\n\nfn insert_split_helper(\n\n buffer: &[u8],\n\n key: OptionNumber,\n\n) -> (usize, OptionNumber, OptionNumber, usize, usize) {\n\n // This is the key for the option immediately prior to\n\n // the option we are adding.\n\n let mut prev_option_key = OptionNumber(0);\n\n\n\n // This marks at what index we will split the two halves.\n\n let mut split_index;\n\n\n\n let mut iter = OptionIterator::new(buffer);\n\n\n\n loop {\n\n split_index = iter.as_slice().as_ptr() as usize - buffer.as_ptr() as usize;\n\n\n\n let (next_key, next_value) = iter\n\n .next()\n\n .expect(&format!(\n\n \"Unexpected end of options (prev: {}, iter: {:?})\",\n", "file_path": "async-coap/src/message/codec.rs", "rank": 30, "score": 60683.13676350099 }, { "content": "#[test]\n\nfn test_rel_ref() {\n\n let _ = rel_ref!(\"a/b/c?q=foobar#frag\");\n\n}\n\n\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 31, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_3() {\n\n let s = \"/1/2/3/\";\n\n assert_eq!(s.unescape_uri().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 32, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_2() {\n\n let s = \"/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(s.unescape_uri().starts_with(\"blåbær///syltetøy\"), None);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 33, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_4() {\n\n let s = \"/1/\";\n\n assert_eq!(s.unescape_uri().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 34, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_0() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(s.unescape_uri().starts_with(\"blåbær/%2F/syltetøy\"), None);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 35, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_1() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri().starts_with(\"blåbær///syltetøy\"),\n\n Some(\"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y\".len())\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 36, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn test_uri_ref() {\n\n let _ = uri_ref!(\"a/b/c?q=foobar#frag\");\n\n}\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 37, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_path_starts_with_3() {\n\n let s = \"/1/2/3/\";\n\n assert_eq!(s.unescape_uri().skip_slashes().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 38, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_4() {\n\n let s = \"/1/\";\n\n assert_eq!(s.unescape_uri().skip_slashes().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 39, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escape_uri_cow_1() {\n\n let s = \"needs-no-escaping\";\n\n let cow = s.escape_uri().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 40, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn unescape_uri_cow_2() {\n\n let s = \"needs%20unescaping\";\n\n let cow = s.unescape_uri().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"needs unescaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 41, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escape_uri_cow_2() {\n\n let s = \"needs escaping\";\n\n let cow = s.escape_uri().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"needs%20escaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 42, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn unescape_uri_cow_1() {\n\n let s = \"needs-no-unescaping\";\n\n let cow = s.unescape_uri().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 43, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_1() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri()\n\n .skip_slashes()\n\n .starts_with(\"blåbær/%2F/syltetøy\"),\n\n Some(\"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y\".len())\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 44, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_2() {\n\n let s = \"/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri()\n\n .skip_slashes()\n\n .starts_with(\"blåbær/%2F/syltetøy\"),\n\n None\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 45, "score": 59629.22092698778 }, { "content": "/// A flavor of `std::net::ToSocketAddrs` that allows the implementation of\n\n/// `SocketAddr` to be replaced.\n\n///\n\n/// This is necessary to enable support for things like\n\n/// CoAP-over-SMS, where socket addresses are telephone numbers.\n\npub trait ToSocketAddrs {\n\n /// Analogous to [`std::net::ToSocketAddrs::Iter`]\n\n type Iter: Iterator<Item = Self::SocketAddr>;\n\n\n\n /// The `SocketAddr` type returned by the above iterator.\n\n type SocketAddr: SocketAddrExt + Copy;\n\n\n\n /// The error type to use for errors while resolving.\n\n type Error: core::fmt::Debug;\n\n\n\n /// Analogous to [`std::net::ToSocketAddrs::to_socket_addrs`]\n\n fn to_socket_addrs(&self) -> Result<Self::Iter, Self::Error>;\n\n}\n\n\n\n/// Blanket implementation of `ToSocketAddrs` for all implementations of `std::net::ToSocketAddrs`.\n\n#[cfg(feature = \"std\")]\n\nimpl<T, I> ToSocketAddrs for T\n\nwhere\n\n T: std::net::ToSocketAddrs<Iter = I>,\n\n I: Iterator<Item = std::net::SocketAddr>,\n\n{\n\n type Iter = I;\n\n type SocketAddr = std::net::SocketAddr;\n\n type Error = std::io::Error;\n\n\n\n fn to_socket_addrs(&self) -> Result<Self::Iter, Self::Error> {\n\n std::net::ToSocketAddrs::to_socket_addrs(self)\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/socketaddr.rs", "rank": 46, "score": 59583.42504418337 }, { "content": "#[test]\n\nfn unescape_uri_path_cow_2() {\n\n let s = \"this/%20does%20/need%2Funescaping\";\n\n let cow = s.unescape_uri().skip_slashes().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"this/ does /need%2Funescaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 47, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_3() {\n\n let s = \"bad%10escaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_eq!(cow, Err(6));\n\n}\n\n\n\nmacro_rules! test_escape_unescape {\n\n ( $NAME:ident, $UNESCAPED:expr, $ESCAPED:expr ) => {\n\n #[test]\n\n fn $NAME() {\n\n assert_eq!(\n\n &$UNESCAPED.escape_uri().to_string(),\n\n $ESCAPED,\n\n \"Failed on escape_uri().to_string()\"\n\n );\n\n assert_eq!(\n\n &$ESCAPED.unescape_uri().to_string(),\n\n $UNESCAPED,\n\n \"Failed on unescape_uri().to_string()\"\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 48, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn unescape_uri_path_cow_1() {\n\n let s = \"needs/no/unescaping\";\n\n let cow = s.unescape_uri().skip_slashes().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 49, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_2() {\n\n let s = \"needs%20unescaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_ne!(cow, Ok(Cow::from(s)));\n\n assert_eq!(cow, Ok(Cow::from(\"needs unescaping\")));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 50, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_1() {\n\n let s = \"needs-no-unescaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_eq!(cow, Ok(Cow::from(s)));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 51, "score": 58644.88946022018 }, { "content": "/// An object that represents a remote CoAP endpoint with a default, overridable path.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// #\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint,AllowStdUdpSocket};\n\n/// #\n\n/// # // Create our asynchronous socket. In this case, it is just an\n\n/// # // (inefficient) wrapper around the standard rust `UdpSocket`,\n\n/// # // but that is quite adequate in this case.\n\n/// # let socket = AllowStdUdpSocket::bind(\"[::]:0\").expect(\"UDP bind failed\");\n\n/// #\n\n/// # // Create a new local endpoint from the socket we just created,\n\n/// # // wrapping it in a `Arc<>` to ensure it can live long enough.\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(socket));\n\n/// #\n\n/// # // Create a local execution pool for running our local endpoint.\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # // Add our local endpoint to the pool, so that it\n\n/// # // can receive packets.\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|err| panic!(\"Receive loop terminated: {}\", err))\n\n/// # );\n\n/// #\n\n/// # let future = async move {\n\n/// // Create a remote endpoint instance to represent the\n\n/// // device we wish to interact with.\n\n/// let remote_endpoint = local_endpoint\n\n/// .remote_endpoint_from_uri(uri!(\"coap://coap.me\"))\n\n/// .unwrap(); // Will only fail if the URI scheme or authority is unrecognizable\n\n///\n\n/// // Create a future that sends a request to a specific path\n\n/// // on the remote endpoint, collecting any blocks in the response\n\n/// // and returning `Ok(OwnedImmutableMessage)` upon success.\n\n/// let future = remote_endpoint.send_to(\n\n/// rel_ref!(\"large\"),\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .accept(ContentFormat::TEXT_PLAIN_UTF8) // We only want plaintext\n\n/// .block2(Some(Default::default())) // Enable block2 processing\n\n/// .emit_successful_collected_response() // Collect all blocks into a single message\n\n/// );\n\n///\n\n/// // Wait for the final result and print it.\n\n/// println!(\"result: {:?}\", future.await.unwrap());\n\n/// # };\n\n/// #\n\n/// # pool.run_until(future);\n\n/// ```\n\n///\n\npub trait RemoteEndpoint {\n\n /// The `SocketAddr` type to use with this local endpoint. This is usually\n\n /// simply `std::net::SocketAddr`, but may be different in some cases (like for CoAP-SMS\n\n /// endpoints).\n\n type SocketAddr: SocketAddrExt;\n\n\n\n /// Type used by closure that is passed into `send()`, representing the context for the\n\n /// response.\n\n type InboundContext: InboundContext<SocketAddr = Self::SocketAddr>;\n\n\n\n /// Returns a [`UriBuf`] describing the underlying destination of this remote endpoint.\n\n fn uri(&self) -> UriBuf;\n\n\n\n /// Returns a string slice containing the scheme for this `RemoteEndpoint`.\n\n fn scheme(&self) -> &'static str;\n\n\n\n /// Prevents this remote endpoint from including a `Uri-Host` option.\n\n fn remove_host_option(&mut self);\n\n\n\n /// Creates a clone of this `RemoteEndpoint` with a different relative path.\n", "file_path": "async-coap/src/remote_endpoint.rs", "rank": 52, "score": 58494.62924620985 }, { "content": "/// Trait for types that allow you to insert CoAP options into them.\n\npub trait OptionInsert {\n\n /// Inserts an option into the message with the given bytes as the value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_bytes(&mut self, key: OptionNumber, value: &[u8]) -> Result<(), Error>;\n\n\n\n /// Inserts an option into the message with no value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_empty(&mut self, key: OptionNumber) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, &[])\n\n }\n\n\n\n /// Inserts an option into the message with a string value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_str(&mut self, key: OptionNumber, value: &str) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, value.as_bytes())\n\n }\n\n\n\n /// Inserts an option into the message with an integer value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_u32(&mut self, key: OptionNumber, value: u32) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, encode_u32(value, &mut [0; 4]))\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/option/insert.rs", "rank": 53, "score": 58485.36381689628 }, { "content": "/// Extension trait for `SocketAddr` types that allows the local endpoint get the information\n\n/// it needs.\n\npub trait SocketAddrExt:\n\n Sized + ToSocketAddrs + Copy + core::fmt::Display + core::fmt::Debug + Send + Eq + Hash\n\n{\n\n /// Determines if the address in this `SocketAddr` is a multicast/broadcast address or not.\n\n fn is_multicast(&self) -> bool;\n\n\n\n /// Returns the port number for this socket.\n\n ///\n\n /// A value of zero indicates no specific value.\n\n fn port(&self) -> u16;\n\n\n\n /// Returns a version of this socket address that conforms to the address type of `local`,\n\n /// or `None` if such a conversion is not possible.\n\n ///\n\n /// This method is useful in mixed ipv6/ipv4 environments.\n\n #[allow(unused_variables)]\n\n fn conforming_to(&self, local: Self) -> Option<Self> {\n\n Some(*self)\n\n }\n\n\n", "file_path": "async-coap/src/socketaddr.rs", "rank": 54, "score": 58485.175659906556 }, { "content": "/// Trait for reading the various parts of a CoAP message.\n\npub trait MessageRead {\n\n /// Gets the message code for this message.\n\n fn msg_code(&self) -> MsgCode;\n\n\n\n /// Gets the message type for this message.\n\n fn msg_type(&self) -> MsgType;\n\n\n\n /// Gets the message id for this message.\n\n fn msg_id(&self) -> MsgId;\n\n\n\n /// Gets the message token for this message.\n\n fn msg_token(&self) -> MsgToken;\n\n\n\n /// Gets the payload as a byte slice.\n\n fn payload(&self) -> &[u8];\n\n\n\n /// Gets an iterator for processing the options of the message.\n\n fn options(&self) -> OptionIterator<'_>;\n\n\n\n /// Writes this message to the given `target` that implements [`MessageWrite`].\n", "file_path": "async-coap/src/message/read.rs", "rank": 55, "score": 58480.481770600774 }, { "content": "/// Trait for objects that represent logical URI-references. Useful for generic programming.\n\n///\n\npub trait AnyUriRef {\n\n /// Returns a `UriRawComponents` instance which contains all of the components for this\n\n /// URI reference.\n\n ///\n\n /// This is the only method that is required to be implemented---all other methods have\n\n /// defaults in place which use this method, but they may be inefficient.\n\n #[must_use]\n\n fn components(&self) -> UriRawComponents<'_>;\n\n\n\n /// Returns true if the underlying URI-reference is actually the empty reference.\n\n #[must_use]\n\n fn is_empty(&self) -> bool {\n\n self.components().is_empty()\n\n }\n\n\n\n /// Gets the [`UriType`] of the underlying URI-reference.\n\n ///\n\n /// [`UriType`]: enum.UriType.html\n\n #[must_use]\n\n fn uri_type(&self) -> UriType {\n", "file_path": "async-coap-uri/src/any_uri_ref.rs", "rank": 56, "score": 57445.6930516701 }, { "content": "/// Extension class for additional helper methods for `OptionInsertExt`.\n\npub trait OptionInsertExt {\n\n /// Inserts an option into the message with a value of the appropriate type.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option<'a, T>(&mut self, key: OptionKey<T>, value: T) -> Result<(), Error>\n\n where\n\n T: Into<OptionValue<'a>>;\n\n}\n\n\n\nimpl<O> OptionInsertExt for O\n\nwhere\n\n O: OptionInsert + ?Sized,\n\n{\n\n fn insert_option<'a, T>(&mut self, key: OptionKey<T>, value: T) -> Result<(), Error>\n\n where\n\n T: Into<OptionValue<'a>>,\n\n {\n\n match value.into() {\n\n OptionValue::Integer(x) => self.insert_option_with_u32(key.0, x),\n\n OptionValue::Bytes(x) => self.insert_option_with_bytes(key.0, x),\n\n OptionValue::ETag(x) => self.insert_option_with_bytes(key.0, x.as_bytes()),\n\n }\n\n }\n\n}\n", "file_path": "async-coap/src/option/insert.rs", "rank": 57, "score": 57441.10840720544 }, { "content": "/// Trait for `str` adding URI percent encoding/decoding\n\n///\n\n/// See the [module-level](index.html) documentation for more details.\n\n///\n\npub trait StrExt {\n\n /// Gets an iterator that performs general-purpose URI percent-encoding.\n\n ///\n\n /// By default, all characters described by [`IETF-RFC3986`] as `pchar`s will be escaped,\n\n /// which is appropriate for escaping path segments.\n\n /// This behavior can be modified by appending the following modifiers:\n\n ///\n\n /// * [`full()`]: Escapes all characters except those which are `unreserved`.\n\n /// * [`for_query()`]: Escaping appropriate for the query component.\n\n /// * [`for_fragment()`]: Escaping appropriate for the fragment component.\n\n ///\n\n /// The returned iterator will escape ASCII control characters.\n\n ///\n\n /// [`full()`]: struct.EscapeUri#method.full\n\n /// [`for_query()`]: struct.EscapeUri#method.for_query\n\n /// [`for_fragment()`]: struct.EscapeUri#method.for_fragment\n\n fn escape_uri(&self) -> EscapeUri<'_, EscapeUriSegment>;\n\n\n\n /// Gets an iterator that performs URI percent-decoding.\n\n ///\n", "file_path": "async-coap-uri/src/escape/mod.rs", "rank": 58, "score": 57441.10840720544 }, { "content": "/// A trait for asynchronous datagram sockets.\n\n///\n\n/// This is an empty convenience trait that requires several additional traits to be implemented:\n\n/// [`DatagramSocketTypes`], [`AsyncSendTo`], [`AsyncRecvFrom`], [`MulticastSocket`],\n\n/// and [`Send`]+[`Sync`].\n\n///\n\n/// Implementations of this trait can be used with [`DatagramLocalEndpoint`].\n\npub trait AsyncDatagramSocket:\n\n DatagramSocketTypes + AsyncSendTo + AsyncRecvFrom + MulticastSocket + Send + Sync\n\n{\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 59, "score": 56464.126691448786 }, { "content": "/// Marker trait for identifying that this `SendDesc` is for *unicast* requests.\n\n/// Also contains unicast-specific combinators, such as [`block2()`][SendDescUnicast::block2].\n\npub trait SendDescUnicast {\n\n /// Returns a send descriptor that will perform Block2 processing.\n\n ///\n\n /// Note that just adding this to your send descriptor chain alone is unlikely to do what\n\n /// you want. You've got three options:\n\n ///\n\n /// * Add a call to [`emit_successful_collected_response`][UnicastBlock2::emit_successful_collected_response]\n\n /// immediately after the call to this method. This will cause the message to be reconstructed from the blocks\n\n /// and returned as a value from the future from `send`. You can optionally add an\n\n /// [`inspect`][SendDescExt::inspect] combinator to get some feedback as the message is being\n\n /// reconstructed from all of the individual block messages.\n\n /// * Add a call to [`emit_successful_response`][SendDescExt::emit_successful_response] along\n\n /// with using `send_to_stream` instead of `send`. This will give you a `Stream` that will\n\n /// contain all of the individual block messages in the stream.\n\n /// * [Add your own handler][SendDescExt::use_handler] to do whatever you need to do, returning\n\n /// `ResponseStatus::SendNext` until all of the blocks have been received. This is\n\n /// useful if you want to avoid memory allocation.\n\n ///\n\n /// There may be other valid combinations of combinators, depending on what you are trying\n\n /// to do.\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 60, "score": 56456.35859401786 }, { "content": "/// Marker trait for identifying that this `SendDesc` is for *multicast* requests.\n\n/// Also contains multicast-specific extensions.\n\npub trait SendDescMulticast {}\n\n\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 61, "score": 56456.35859401786 }, { "content": "/// Trait representing a local (as opposed to remote) CoAP endpoint. Allows for sending and\n\n/// receiving CoAP requests.\n\n///\n\n/// # Implementations\n\n///\n\n/// `LocalEndpoint` is a trait, which allows for multiple back-end implementations.\n\n/// `async-coap` comes with two: [`NullLocalEndpoint`] and [`DatagramLocalEndpoint`].\n\n///\n\n/// [`NullLocalEndpoint`] does what you might expect: nothing. Attempts to send\n\n/// requests always results in [`Error::ResponseTimeout`] and [`LocalEndpoint::receive`]\n\n/// will block indefinitely. Creating an instance of it is quite straightforward:\n\n///\n\n/// [`NullLocalEndpoint`]: crate::null::NullLocalEndpoint\n\n/// [`DatagramLocalEndpoint`]: crate::datagram::DatagramLocalEndpoint\n\n///\n\n/// ```\n\n/// use std::sync::Arc;\n\n/// use async_coap::null::NullLocalEndpoint;\n\n///\n\n/// let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// ```\n\n///\n\n/// If you want to do something more useful, then [`DatagramLocalEndpoint`] is likely\n\n/// what you are looking for. It takes an instance of [`AsyncDatagramSocket`] at construction:\n\n///\n\n/// [`AsyncDatagramSocket`]: crate::datagram::AsyncDatagramSocket\n\n///\n\n/// ```\n\n/// use std::sync::Arc;\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::datagram::{DatagramLocalEndpoint,AllowStdUdpSocket};\n\n///\n\n/// // `AllowStdUdpSocket`, which is a (inefficient) wrapper around the\n\n/// // standard rust `UdpSocket`. It is convenient for testing and for examples\n\n/// // but should not be used in production code.\n\n/// let socket = AllowStdUdpSocket::bind(\"[::]:0\").expect(\"UDP bind failed\");\n\n///\n\n/// // Create a new local endpoint from the socket instance we just created,\n\n/// // wrapping it in a `Arc<>` to ensure it can live long enough.\n\n/// let local_endpoint = Arc::new(DatagramLocalEndpoint::new(socket));\n\n/// ```\n\n///\n\n/// # Client Usage\n\n///\n\n/// Before you can start sending requests and receiving responses, you\n\n/// will need to make sure that the [`LocalEndpoint::receive`] method\n\n/// gets called repeatedly. The easiest way to do that is to add the\n\n/// [`std::future::Future`] returned by [`LocalEndpointExt::receive_loop_arc`]\n\n/// to an execution pool:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// #\n\n/// use futures::{prelude::*,executor::ThreadPool,task::Spawn,task::SpawnExt};\n\n///\n\n/// let mut pool = ThreadPool::new().expect(\"Unable to create thread pool\");\n\n///\n\n/// // We use a receiver handler of `null_receiver!()` because this instance\n\n/// // will be used purely as a client, not a server.\n\n/// pool.spawn(local_endpoint\n\n/// .clone()\n\n/// .receive_loop_arc(null_receiver!())\n\n/// .map(|_|unreachable!())\n\n/// );\n\n/// ```\n\n///\n\n/// Once the `Arc<LocalEndpint>` has been added to an execution pool, the `run_until` method\n\n/// on the pool can be used to block execution of the futures emitted by `LocalEndpoint`:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # // Using a NullLocalEndpoint since this is just a simple usage example.\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// # let mut local_pool = LocalPool::new();\n\n/// #\n\n/// # local_pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n///\n\n/// let result = local_pool.run_until(\n\n/// local_endpoint.send(\n\n/// \"coap.me:5683\",\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .emit_any_response() // Return the first response we get\n\n/// )\n\n/// );\n\n///\n\n/// println!(\"result: {:?}\", result);\n\n/// ```\n\n///\n\n/// Or, more naturally, the returned futures can be used directly in `async` blocks:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # // Using a NullLocalEndpoint since this is just a simple usage example.\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n/// #\n\n/// # let future =\n\n/// async move {\n\n/// let future = local_endpoint.send(\n\n/// \"coap.me:5683\",\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .emit_any_response() // Return the first response we get\n\n/// );\n\n///\n\n/// // Wait for the final result and print it.\n\n/// println!(\"result: {:?}\", future.await);\n\n/// }\n\n/// # ;\n\n/// #\n\n/// # pool.run_until(future);\n\n/// ```\n\n///\n\n/// # Server Usage\n\n///\n\n/// In order to serve resources for other devices to interact with, you will\n\n/// need to replace the [`null_receiver!`] we were using earlier with something\n\n/// more substantial. The method takes a closure as an argument, and the closure\n\n/// itself has a single argument: a borrowed [`RespondableInboundContext`].\n\n///\n\n/// For example, to have our server return a response for a request instead of\n\n/// just returning an error, we could use the following function as our receive handler:\n\n///\n\n/// ```\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::{RespondableInboundContext, Error};\n\n///\n\n/// fn receive_handler<T: RespondableInboundContext>(context: &T) -> Result<(),Error> {\n\n/// context.respond(|msg_out|{\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(option::CONTENT_FORMAT, ContentFormat::TEXT_PLAIN_UTF8)?;\n\n/// msg_out.append_payload_string(\"Successfully fetched!\")?;\n\n/// Ok(())\n\n/// })?;\n\n/// Ok(())\n\n/// }\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket, LoopbackSocketAddr};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// # use async_coap::message::MessageRead;\n\n/// #\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(LoopbackSocket::new()));\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint.clone().receive_loop_arc(receive_handler).map(|_|unreachable!()));\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .emit_any_response() // Return the first response we get\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # let result = result.unwrap();\n\n/// # assert_eq!(result.msg_code(), MsgCode::SuccessContent);\n\n/// # assert_eq!(result.msg_type(), MsgType::Ack);\n\n/// ```\n\n///\n\n/// However, that's actually not super useful: it returns a successful result for\n\n/// every possible request: including bogus ones. Let's say that we wanted to expose a\n\n/// resource that lives at \"`/test`\" on our server, returning a [`4.04 Not Found`](MsgCode::ClientErrorNotFound)\n\n/// for every other request. That might look something like this:\n\n///\n\n/// ```\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::{RespondableInboundContext, Error, LinkFormatWrite, LINK_ATTR_TITLE};\n\n/// use core::fmt::Write; // For `write!()`\n\n/// use core::borrow::Borrow;\n\n/// use option::CONTENT_FORMAT;\n\n///\n\n/// fn receive_handler<T: RespondableInboundContext>(context: &T) -> Result<(),Error> {\n\n/// let msg = context.message();\n\n/// let uri = msg.options().extract_uri()?;\n\n/// let decoded_path = uri.raw_path().unescape_uri().skip_slashes().to_cow();\n\n///\n\n/// match (msg.msg_code(), decoded_path.borrow()) {\n\n/// // Handle GET /test\n\n/// (MsgCode::MethodGet, \"test\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(CONTENT_FORMAT, ContentFormat::TEXT_PLAIN_UTF8);\n\n/// write!(msg_out,\"Successfully fetched {:?}!\", uri.as_str())?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Handle GET /.well-known/core, for service discovery.\n\n/// (MsgCode::MethodGet, \".well-known/core\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(CONTENT_FORMAT, ContentFormat::APPLICATION_LINK_FORMAT);\n\n/// LinkFormatWrite::new(msg_out)\n\n/// .link(uri_ref!(\"/test\"))\n\n/// .attr(LINK_ATTR_TITLE, \"Test Resource\")\n\n/// .finish()?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Handle unsupported methods\n\n/// (_, \"test\") | (_, \".well-known/core\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::ClientErrorMethodNotAllowed);\n\n/// write!(msg_out,\"Method \\\"{:?}\\\" Not Allowed\", msg.msg_code())?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Everything else is a 4.04\n\n/// (_, _) => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::ClientErrorNotFound);\n\n/// write!(msg_out,\"{:?} Not Found\", uri.as_str())?;\n\n/// Ok(())\n\n/// }),\n\n/// }\n\n/// }\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket, LoopbackSocketAddr};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// # use async_coap::message::MessageRead;\n\n/// # use std::borrow::Cow;\n\n/// #\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(LoopbackSocket::new()));\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(receive_handler)\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .uri_host_path(None, rel_ref!(\"test\")) // Add a path to the message\n\n/// # .emit_any_response() // Return the first response we get\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # let result = result.unwrap();\n\n/// # assert_eq!(result.msg_code(), MsgCode::SuccessContent);\n\n/// # assert_eq!(result.msg_type(), MsgType::Ack);\n\n/// #\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::post() // This is a CoAP POST request\n\n/// # .uri_host_path(None, rel_ref!(\"test\")) // Add a path to the message\n\n/// # .emit_successful_response() // Return the first successful response we get\n\n/// # .inspect(|cx| {\n\n/// # // Inspect here since we currently can't do\n\n/// # // a detailed check in the return value.\n\n/// # assert_eq!(cx.message().msg_code(), MsgCode::ClientErrorMethodNotAllowed);\n\n/// # assert_eq!(cx.message().msg_type(), MsgType::Ack);\n\n/// # })\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # assert_eq!(result.err(), Some(Error::ClientRequestError));\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .emit_successful_response() // Return the first successful response we get\n\n/// # .uri_host_path(None, rel_ref!(\"/foobar\"))\n\n/// # .inspect(|cx| {\n\n/// # // Inspect here since we currently can't do\n\n/// # // a detailed check in the return value.\n\n/// # assert_eq!(cx.message().msg_code(), MsgCode::ClientErrorNotFound);\n\n/// # assert_eq!(cx.message().msg_type(), MsgType::Ack);\n\n/// # })\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # assert_eq!(result.err(), Some(Error::ResourceNotFound));\n\n/// ```\n\n///\n\npub trait LocalEndpoint: Sized {\n\n /// The `SocketAddr` type to use with this local endpoint. This is usually\n\n /// simply `std::net::SocketAddr`, but may be different in some cases (like for CoAP-SMS\n\n /// endpoints).\n\n type SocketAddr: SocketAddrExt\n\n + ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::SocketError>;\n\n\n\n /// The error type associated with errors generated by socket and address-lookup operations.\n\n /// Typically, this is `std::io::Error`, but it may be different if `Self::SocketAddr` isn't\n\n /// `std::net::SocketAddr`.\n\n type SocketError: core::fmt::Debug;\n\n\n\n /// The trait representing the default transmission parameters to use.\n\n type DefaultTransParams: TransParams;\n\n\n\n /// Type used by closure that is passed into `send()`, representing the context for the\n\n /// response.\n\n type InboundContext: InboundContext<SocketAddr = Self::SocketAddr>;\n\n\n\n /// Type used by closure that is passed into `receive()`, representing the context for\n", "file_path": "async-coap/src/local_endpoint.rs", "rank": 62, "score": 55427.96398174556 }, { "content": "/// Represents the context for processing an inbound message.\n\npub trait InboundContext: Send {\n\n /// The `SocketAddr` type from the associated `LocalEndpoint`.\n\n type SocketAddr: SocketAddrExt;\n\n\n\n /// Returns a copy of the remote address of the inbound message.\n\n fn remote_socket_addr(&self) -> Self::SocketAddr;\n\n\n\n /// Indicates if the endpoint thinks this message is a duplicate. This is used\n\n /// for non-idempotent methods (like POST) to determine if the operation should\n\n /// have real effects or if it should just go through the motions without changing\n\n /// state. Duplicates are generally only passed through when the underlying transport\n\n /// doesn't support support storing sent replies for this purpose.\n\n fn is_dupe(&self) -> bool;\n\n\n\n /// Returns a reference to a MessageRead trait to inspect the content\n\n /// of the inbound message.\n\n fn message(&self) -> &dyn MessageRead;\n\n}\n\n\n", "file_path": "async-coap/src/inbound_context.rs", "rank": 63, "score": 55409.17801572032 }, { "content": "/// Trait for writing/serializing a CoAP message.\n\npub trait MessageWrite: OptionInsert {\n\n /// Sets the CoAP message type. This may be called at any time during message writing\n\n /// without disrupting the operation. It may be called multiple times if necessary.\n\n /// The written value is that of the last call.\n\n fn set_msg_type(&mut self, tt: MsgType);\n\n\n\n /// Sets the CoAP message id. This may be called at any time during message writing\n\n /// without disrupting the operation. It may be called multiple times if necessary.\n\n /// The written value is that of the last call.\n\n fn set_msg_id(&mut self, msg_id: MsgId);\n\n\n\n /// Sets the CoAP message code. This may be called at any time during message writing\n\n /// without disrupting the operation. It may be called multiple times if necessary.\n\n /// The written value is that of the last call.\n\n fn set_msg_code(&mut self, code: MsgCode);\n\n\n\n /// Sets the CoAP message token. Calling this method out-of-order will cause any previously\n\n /// written options or payload to be lost. It may be called multiple times if necessary.\n\n /// The written value is that of the last call.\n\n fn set_msg_token(&mut self, token: MsgToken);\n", "file_path": "async-coap/src/message/write.rs", "rank": 64, "score": 54424.42820253274 }, { "content": "/// A convenience trait for `Arc<>` that makes it easier to construct `ArcGuard<>` instances.\n\n///\n\n/// See [Module Documentation](index.html) for more information.\n\npub trait ArcGuardExt<RC> {\n\n /// Convenience method for constructing `ArcGuard<>` instances.\n\n ///\n\n /// See [Module Documentation](index.html) for more information.\n\n fn guard<'head, F, T>(&self, getter: F) -> ArcGuard<RC, T>\n\n where\n\n F: FnOnce(&'head RC) -> T,\n\n RC: 'head,\n\n T: 'head;\n\n}\n\n\n\nimpl<RC> ArcGuardExt<RC> for Arc<RC> {\n\n fn guard<'head, F, T>(&self, getter: F) -> ArcGuard<RC, T>\n\n where\n\n F: FnOnce(&'head RC) -> T,\n\n RC: 'head,\n\n T: 'head,\n\n {\n\n ArcGuard::new(self.clone(), getter)\n\n }\n\n}\n", "file_path": "async-coap/src/arc_guard.rs", "rank": 65, "score": 54424.42820253274 }, { "content": "/// Represents the context for processing an inbound request that can be responded to.\n\npub trait RespondableInboundContext: InboundContext {\n\n /// Indicates if the inbound request was a multicast request or not. Multicast\n\n /// requests have additional response timing requirements in order to avoid\n\n /// congestion.\n\n fn is_multicast(&self) -> bool;\n\n\n\n /// Indicates if this inbound request is from a real inbound request or if it\n\n /// is a fake request that is being generated internally to solicit a response.\n\n /// Fake requests are only generated for the `GET` method.\n\n fn is_fake(&self) -> bool;\n\n\n\n /// Responds to this inbound request using a message generated from `msg_gen`.\n\n /// The `msg_id` and `msg_token` fields will be automatically populated.\n\n /// This method will return the value returned by `msg_gen`.\n\n fn respond<F>(&self, msg_gen: F) -> Result<(), Error>\n\n where\n\n F: Fn(&mut dyn MessageWrite) -> Result<(), Error>;\n\n}\n", "file_path": "async-coap/src/inbound_context.rs", "rank": 66, "score": 53490.106138754374 }, { "content": "/// Extension trait for [`LocalEndpoint`] which implements additional helper methods.\n\npub trait LocalEndpointExt: LocalEndpoint {\n\n /// Sends a message where multiple responses are expected, returned as a [`SendAsStream`].\n\n ///\n\n /// In this version of [`LocalEndpoint::send`], the `send_desc` can return\n\n /// [`ResponseStatus::Done`] from its handler multiple times, with the results being emitted\n\n /// from the returned [`SendAsStream`].\n\n ///\n\n /// The stream can be cleanly ended by the handler eventually returning\n\n /// [`Error::ResponseTimeout`] or [`Error::Cancelled`], neither of which will be emitted\n\n /// as an error.\n\n fn send_as_stream<'a, S, R, SD>(&'a self, dest: S, send_desc: SD) -> SendAsStream<'a, R>\n\n where\n\n S: ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::SocketError> + 'a,\n\n SD: SendDesc<Self::InboundContext, R> + 'a,\n\n R: Send + 'a,\n\n {\n\n let (sender, receiver) = futures::channel::mpsc::channel::<Result<R, Error>>(10);\n\n\n\n SendAsStream {\n\n receiver,\n", "file_path": "async-coap/src/local_endpoint.rs", "rank": 67, "score": 53490.106138754374 }, { "content": "/// Extension trait which implements additional helper methods.\n\npub trait RemoteEndpointExt: RemoteEndpoint {\n\n /// Sends an application-level ping to to one or more addresses specified by `dest`.\n\n /// The first response received causes the future to emit `Ok(())`.\n\n fn ping(&self) -> BoxFuture<'_, Result<(), Error>> {\n\n self.send(Ping::new())\n\n }\n\n\n\n /// Analogous to [`LocalEndpointExt::send_as_stream`], except using this `RemoteEndpoint` for\n\n /// the destination SocketAddr and path.\n\n fn send_as_stream<'a, R, SD>(&'a self, send_desc: SD) -> SendAsStream<'a, R>\n\n where\n\n SD: SendDesc<Self::InboundContext, R> + 'a,\n\n R: Send + 'a,\n\n {\n\n let (sender, receiver) = futures::channel::mpsc::channel::<Result<R, Error>>(10);\n\n\n\n SendAsStream {\n\n receiver,\n\n send_future: self.send(SendAsStreamDesc::new(send_desc, sender)),\n\n }\n", "file_path": "async-coap/src/remote_endpoint.rs", "rank": 68, "score": 53490.106138754374 }, { "content": "/// Extension trait for [`AnyUriRef`] that provides methods that cannot be overridden from\n\n/// their default implementations.\n\n///\n\n/// This trait is automatically implemented for all types that implement [`AnyUriRef`].\n\npub trait AnyUriRefExt: AnyUriRef {\n\n /// Wraps this `AnyUriRef` instance in a [`UriDisplay`] object for use with formatting\n\n /// macros like `write!` and `format!`.\n\n ///\n\n /// The resulting instance will ultimately use the [`AnyUriRef::write_to_unsafe`] method\n\n /// to render the URI-reference.\n\n ///\n\n /// This method is similar to the [`display`][display-path] method on [`std::path::Path`].\n\n ///\n\n /// [display-path]: std::path::Path::display\n\n ///\n\n /// ## Example\n\n ///\n\n /// ```\n\n /// use async_coap_uri::prelude::*;\n\n ///\n\n /// let uri_ref = uri_ref!(\"http://example.com/\");\n\n ///\n\n /// println!(\"uri_ref = {}\", uri_ref.display());\n\n /// ```\n", "file_path": "async-coap-uri/src/any_uri_ref.rs", "rank": 69, "score": 52611.43805944701 }, { "content": "fn string_literal_from_token_stream(input: TokenStream) -> String {\n\n use syn::LitStr;\n\n\n\n if let Some(nom) = syn::parse::<LitStr>(input.clone()).ok() {\n\n return nom.value();\n\n }\n\n\n\n panic!(\"Expected string literal, got {:?}\", input);\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 70, "score": 48265.60089720391 }, { "content": "/// # Send Descriptor Trait\n\n///\n\n/// Types implementing this trait can be passed to the `send*` methods of [`LocalEndpoint`]\n\n/// and [`RemoteEndpoint`], and can define almost every aspect of how a message transaction\n\n/// is handled.\n\n///\n\n/// See the [module level documentation](index.html) for more information on typical usage\n\n/// patterns.\n\n///\n\n/// ## Internals\n\n///\n\n/// There are several methods in this trait, but three of them are critical:\n\n///\n\n/// * [`write_options`](SendDesc::write_options)\\: Defines which options are going to be\n\n/// included in the outbound message.\n\n/// * [`write_payload`](SendDesc::write_payload)\\: Defines the contents of the payload for the\n\n/// outbound message.\n\n/// * [`handler`](SendDesc::handler)\\: Handles inbound reply messages, as well as error conditions.\n\n///\n\npub trait SendDesc<IC, R = (), TP = StandardCoapConstants>: Send\n\nwhere\n\n IC: InboundContext,\n\n R: Send,\n\n TP: TransParams,\n\n{\n\n /// **Experimental**: Gets custom transmission parameters.\n\n fn trans_params(&self) -> Option<TP> {\n\n None\n\n }\n\n\n\n /// **Experimental**: Used for determining if the given option seen in the reply message\n\n /// is supported or not.\n\n ///\n\n /// Response messages with any options that cause this\n\n /// method to return false will be rejected.\n\n ///\n\n fn supports_option(&self, option: OptionNumber) -> bool {\n\n !option.is_critical()\n\n }\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 71, "score": 44272.1466622986 }, { "content": "\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n <Self as Debug>::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl Default for Error {\n\n fn default() -> Self {\n\n Error::Unspecified\n\n }\n\n}\n\n\n\nimpl Extend<Result<(), Error>> for Error {\n\n fn extend<T: IntoIterator<Item = Result<(), Error>>>(&mut self, iter: T) {\n\n if let Some(Err(err)) = iter.into_iter().next() {\n\n *self = err;\n\n }\n\n }\n\n}\n", "file_path": "async-coap/src/error.rs", "rank": 72, "score": 43506.458858421785 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse std::fmt::{Debug, Display, Formatter};\n\n\n\n/// Type for errors encountered while sending or receiving CoAP requests and responses.\n\n#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)]\n\npub enum Error {\n", "file_path": "async-coap/src/error.rs", "rank": 73, "score": 43494.694362227696 }, { "content": "}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl std::convert::From<std::io::Error> for Error {\n\n fn from(_: std::io::Error) -> Self {\n\n Error::IOError\n\n }\n\n}\n\n\n\nimpl std::convert::From<Error> for core::fmt::Error {\n\n fn from(_: Error) -> Self {\n\n core::fmt::Error\n\n }\n\n}\n\n\n\nimpl From<std::fmt::Error> for crate::Error {\n\n fn from(_err: std::fmt::Error) -> Self {\n\n Error::OutOfSpace\n\n }\n\n}\n", "file_path": "async-coap/src/error.rs", "rank": 74, "score": 43489.657188207835 }, { "content": " /// The response indicated that the request was forbidden.\n\n Forbidden,\n\n\n\n /// The response indicated an unspecified client error.\n\n ClientRequestError,\n\n\n\n /// The response indicated an unspecified server error.\n\n ServerError,\n\n\n\n /// The transaction was reset.\n\n Reset,\n\n\n\n /// More than one instance of an option marked as non-repeatable was encountered.\n\n OptionNotRepeatable,\n\n\n\n /// The given URI scheme is not supported by the associated local endpoint.\n\n UnsupportedUriScheme,\n\n\n\n /// An unspecified error has occurred.\n\n Unspecified,\n", "file_path": "async-coap/src/error.rs", "rank": 75, "score": 43483.80817542839 }, { "content": " UnhandledCriticalOption,\n\n\n\n /// An I/O error occurred while performing this operation.\n\n IOError,\n\n\n\n /// This operation has been cancelled.\n\n Cancelled,\n\n\n\n /// Unable to look up the given host because it was not found.\n\n HostNotFound,\n\n\n\n /// Unable to look up the given host for an unspecified reason.\n\n HostLookupFailure,\n\n\n\n /// The response indicated that the given resource was not found.\n\n ResourceNotFound,\n\n\n\n /// The response indicated that the request was unauthorized.\n\n Unauthorized,\n\n\n", "file_path": "async-coap/src/error.rs", "rank": 76, "score": 43482.84133779072 }, { "content": " /// One or more of the supplied arguments are not valid for the given operation.\n\n InvalidArgument,\n\n\n\n /// There is not enough space in the given buffer to complete the operation.\n\n OutOfSpace,\n\n\n\n /// An error was encountered while attempting to parse the data.\n\n ParseFailure,\n\n\n\n /// Operation timed out waiting for a response.\n\n ResponseTimeout,\n\n\n\n /// The response was well-formed, but not appropriate for the given request.\n\n BadResponse,\n\n\n\n /// The [message code][async-coap::message::MsgCode] was not recognized by this\n\n /// version of rust-async-coap.\n\n UnknownMessageCode,\n\n\n\n /// A critical option present in the message was not supported.\n", "file_path": "async-coap/src/error.rs", "rank": 77, "score": 43481.40379128916 }, { "content": " write!(f, \"{:?}\", self.0)\n\n }\n\n}\n\n\n\nimpl<T> core::ops::Deref for OptionKey<T> {\n\n type Target = OptionNumber;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\n/// Typed key for IF_MATCH option.\n\npub const IF_MATCH: OptionKey<ETag> = OptionKey::new(OptionNumber::IF_MATCH);\n\n\n\n/// Typed key for URI_HOST option.\n\npub const URI_HOST: OptionKey<&str> = OptionKey::new(OptionNumber::URI_HOST);\n\n\n\n/// Typed key for ETAG option.\n\npub const ETAG: OptionKey<ETag> = OptionKey::new(OptionNumber::ETAG);\n", "file_path": "async-coap/src/option/key.rs", "rank": 78, "score": 42232.377141101235 }, { "content": "\n\n/// Typed key for IF_NONE_MATCH option.\n\npub const IF_NONE_MATCH: OptionKey<()> = OptionKey::new(OptionNumber::IF_NONE_MATCH);\n\n\n\n/// Typed key for Observe option.\n\npub const OBSERVE: OptionKey<u32> = OptionKey::new(OptionNumber::OBSERVE);\n\n\n\n/// Typed key for URI-Port option.\n\npub const URI_PORT: OptionKey<u16> = OptionKey::new(OptionNumber::URI_PORT);\n\n\n\n/// Typed key for Location-Path option.\n\npub const LOCATION_PATH: OptionKey<&str> = OptionKey::new(OptionNumber::LOCATION_PATH);\n\n\n\n/// Typed key for OSCORE option.\n\npub const OSCORE: OptionKey<&[u8]> = OptionKey::new(OptionNumber::OSCORE);\n\n\n\n/// Typed key for URI-Path option.\n\npub const URI_PATH: OptionKey<&str> = OptionKey::new(OptionNumber::URI_PATH);\n\n\n\n/// Typed key for Content-Format option.\n", "file_path": "async-coap/src/option/key.rs", "rank": 79, "score": 42232.31581611561 }, { "content": "\n\nimpl<T> OptionKey<T> {\n\n /// Creates a new instance with the given option number.\n\n pub const fn new(n: OptionNumber) -> OptionKey<T> {\n\n OptionKey(n, core::marker::PhantomData)\n\n }\n\n}\n\n\n\nimpl<T> Copy for OptionKey<T> {}\n\n\n\nimpl<T> Clone for OptionKey<T> {\n\n fn clone(&self) -> Self {\n\n OptionKey(self.0, core::marker::PhantomData)\n\n }\n\n}\n\n\n\nunsafe impl<T> Send for OptionKey<T> {}\n\n\n\nimpl<T> core::fmt::Debug for OptionKey<T> {\n\n fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {\n", "file_path": "async-coap/src/option/key.rs", "rank": 80, "score": 42226.8263560424 }, { "content": "pub const CONTENT_FORMAT: OptionKey<ContentFormat> = OptionKey::new(OptionNumber::CONTENT_FORMAT);\n\n\n\n/// Typed key for Max-Age option.\n\npub const MAX_AGE: OptionKey<u32> = OptionKey::new(OptionNumber::MAX_AGE);\n\n\n\n/// Typed key for URI-Query option.\n\npub const URI_QUERY: OptionKey<&str> = OptionKey::new(OptionNumber::URI_QUERY);\n\n\n\n/// Typed key for Accept option.\n\npub const ACCEPT: OptionKey<ContentFormat> = OptionKey::new(OptionNumber::ACCEPT);\n\n\n\n/// Typed key for Location-Query option.\n\npub const LOCATION_QUERY: OptionKey<&str> = OptionKey::new(OptionNumber::LOCATION_QUERY);\n\n\n\n/// Typed key for Block2 option.\n\npub const BLOCK2: OptionKey<BlockInfo> = OptionKey::new(OptionNumber::BLOCK2);\n\n\n\n/// Typed key for Block1 option.\n\npub const BLOCK1: OptionKey<BlockInfo> = OptionKey::new(OptionNumber::BLOCK1);\n\n\n", "file_path": "async-coap/src/option/key.rs", "rank": 81, "score": 42225.476348436845 }, { "content": "/// Typed key for Size2 option.\n\npub const SIZE2: OptionKey<u32> = OptionKey::new(OptionNumber::SIZE2);\n\n\n\n/// Typed key for Proxy-URI option.\n\npub const PROXY_URI: OptionKey<&str> = OptionKey::new(OptionNumber::PROXY_URI);\n\n\n\n/// Typed key for Proxy-Scheme option.\n\npub const PROXY_SCHEME: OptionKey<&str> = OptionKey::new(OptionNumber::PROXY_SCHEME);\n\n\n\n/// Typed key for Size1 option.\n\npub const SIZE1: OptionKey<u32> = OptionKey::new(OptionNumber::SIZE1);\n", "file_path": "async-coap/src/option/key.rs", "rank": 82, "score": 42225.25050244583 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::*;\n\n\n\n/// Typed option key, for type-safe access to CoAP options.\n\n#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]\n\npub struct OptionKey<T>(pub OptionNumber, core::marker::PhantomData<*const T>);\n", "file_path": "async-coap/src/option/key.rs", "rank": 83, "score": 42223.66196092604 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::*;\n\n\n\n/// Type describing the type of an option's value.\n\n#[derive(Debug, Copy, Eq, PartialEq, Hash, Clone)]\n\npub enum OptionValueType {\n", "file_path": "async-coap/src/option/value.rs", "rank": 87, "score": 42201.71773097985 }, { "content": "#[derive(Debug)]\n\npub enum OptionValue<'a> {\n\n Integer(u32),\n\n Bytes(&'a [u8]),\n\n ETag(ETag),\n\n}\n\n\n\nimpl<'a> From<u8> for OptionValue<'a> {\n\n fn from(value: u8) -> Self {\n\n OptionValue::Integer(value as u32)\n\n }\n\n}\n\n\n\nimpl<'a> From<u16> for OptionValue<'a> {\n\n fn from(value: u16) -> Self {\n\n OptionValue::Integer(value as u32)\n\n }\n\n}\n\n\n\nimpl<'a> From<u32> for OptionValue<'a> {\n", "file_path": "async-coap/src/option/value.rs", "rank": 89, "score": 42200.08102857338 }, { "content": " }\n\n}\n\n\n\nimpl<'a> From<&'a [u8]> for OptionValue<'a> {\n\n fn from(value: &'a [u8]) -> Self {\n\n OptionValue::Bytes(value)\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for OptionValue<'a> {\n\n fn from(value: &'a str) -> Self {\n\n OptionValue::Bytes(value.as_bytes())\n\n }\n\n}\n\n\n\nimpl<'a, 'b> From<&'b &'a str> for OptionValue<'a> {\n\n fn from(value: &'b &'a str) -> Self {\n\n OptionValue::Bytes(value.as_bytes())\n\n }\n\n}\n\n\n\nimpl<'a> From<()> for OptionValue<'a> {\n\n fn from(_: ()) -> Self {\n\n OptionValue::Bytes(&[])\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "async-coap/src/option/value.rs", "rank": 91, "score": 42196.951800277966 }, { "content": "impl<'a> TryOptionValueFrom<'a> for u32 {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n try_decode_u32(buffer)\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for ContentFormat {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n Some(ContentFormat(try_decode_u16(buffer)?))\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for BlockInfo {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n BlockInfo(try_decode_u32(buffer)?).valid()\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for u16 {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n", "file_path": "async-coap/src/option/value.rs", "rank": 92, "score": 42196.86602380028 }, { "content": " try_decode_u16(buffer)\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for () {\n\n fn try_option_value_from(_: &'a [u8]) -> Option<Self> {\n\n Some(())\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for &'a str {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n core::str::from_utf8(buffer).ok()\n\n }\n\n}\n", "file_path": "async-coap/src/option/value.rs", "rank": 94, "score": 42196.2315968156 }, { "content": " fn from(value: u32) -> Self {\n\n OptionValue::Integer(value)\n\n }\n\n}\n\n\n\nimpl<'a> From<ContentFormat> for OptionValue<'a> {\n\n fn from(value: ContentFormat) -> Self {\n\n OptionValue::Integer(value.0 as u32)\n\n }\n\n}\n\n\n\nimpl<'a> From<BlockInfo> for OptionValue<'a> {\n\n fn from(value: BlockInfo) -> Self {\n\n OptionValue::Integer(value.0 as u32)\n\n }\n\n}\n\n\n\nimpl<'a> From<ETag> for OptionValue<'a> {\n\n fn from(value: ETag) -> Self {\n\n OptionValue::ETag(value)\n", "file_path": "async-coap/src/option/value.rs", "rank": 96, "score": 42192.144701821984 } ]
Rust
examples/router_benchmark.rs
sers-dev/tyractorsaur
23679ee63296eaac1bc7cfaacdcd81f137950799
use std::process::exit; use std::thread::sleep; use std::time::{Duration, Instant}; use tyra::prelude::{Actor, ActorFactory, ActorMessage, ActorSystem, ActorContext, Handler, TyraConfig, ActorWrapper}; use tyra::router::{AddActorMessage, RoundRobinRouterFactory, RouterMessage}; struct MessageA {} impl ActorMessage for MessageA {} struct Finish {} impl ActorMessage for Finish {} struct Start {} impl ActorMessage for Start {} struct Benchmark { ctx: ActorContext<Self>, aggregator: ActorWrapper<Aggregator>, total_msgs: usize, name: String, count: usize, start: Instant, } struct BenchmarkFactory { total_msgs: usize, aggregator: ActorWrapper<Aggregator>, name: String, } impl ActorFactory<Benchmark> for BenchmarkFactory { fn new_actor(&self, context: ActorContext<Benchmark>) -> Benchmark { Benchmark::new(self.total_msgs, self.name.clone(), context, self.aggregator.clone()) } } impl Benchmark { pub fn new(total_msgs: usize, name: String, context: ActorContext<Self>, aggregator: ActorWrapper<Aggregator>) -> Self { Self { ctx: context, aggregator, total_msgs, name, count: 0, start: Instant::now(), } } } impl Actor for Benchmark { fn on_system_stop(&mut self) { self.ctx.actor_ref.stop(); } } impl Handler<MessageA> for Benchmark { fn handle(&mut self, _msg: MessageA, _context: &ActorContext<Self>) { if self.count == 0 { sleep(Duration::from_secs((3) as u64)); self.start = Instant::now(); } self.count += 1; if self.count % self.total_msgs == 0 { let duration = self.start.elapsed(); println!( "{} It took {:?} to process {} messages", self.name, duration, self.total_msgs ); } if self.count == self.total_msgs { self.aggregator.send(Finish {}); } } } struct Aggregator { ctx: ActorContext<Self>, total_actors: usize, name: String, actors_finished: usize, start: Instant, } struct AggregatorFactory { total_actors: usize, name: String, } impl Aggregator { pub fn new(total_actors: usize, name: String, context: ActorContext<Self>) -> Self { Self { ctx: context, total_actors, name, actors_finished: 0, start: Instant::now(), } } } impl Actor for Aggregator { fn on_system_stop(&mut self) { self.ctx.actor_ref.stop(); } } impl ActorFactory<Aggregator> for AggregatorFactory { fn new_actor(&self, context: ActorContext<Aggregator>) -> Aggregator { Aggregator::new(self.total_actors, self.name.clone(), context) } } impl Handler<Finish> for Aggregator { fn handle(&mut self, _msg: Finish, _context: &ActorContext<Self>) { self.actors_finished += 1; if self.actors_finished == self.total_actors { let duration = self.start.elapsed(); println!( "{} It took {:?} to finish {} actors", self.name, duration, self.total_actors ); self.ctx.system.stop(Duration::from_secs(60)); } } } impl Handler<Start> for Aggregator { fn handle(&mut self, _msg: Start, _context: &ActorContext<Self>) { sleep(Duration::from_secs((3) as u64)); self.start = Instant::now(); } } fn main() { let actor_config = TyraConfig::new().unwrap(); let actor_system = ActorSystem::new(actor_config); let message_count = 10000000; let actor_count = 7; let router_factory = RoundRobinRouterFactory::new(); let router = actor_system.builder().spawn("benchmark-router", router_factory).unwrap(); let aggregator = actor_system .builder() .spawn( "aggregator", AggregatorFactory { total_actors: actor_count, name: String::from("aggregator") }).unwrap(); for i in 0..actor_count { let actor = actor_system .builder() .spawn(format!("benchmark-single-actor-{}", i), BenchmarkFactory { name: String::from(format!("benchmark-{}", i)), total_msgs: (message_count.clone() / actor_count.clone()) as usize, aggregator: aggregator.clone(), }).unwrap(); router.send(AddActorMessage::new(actor)); } println!("Actors have been created"); let start = Instant::now(); aggregator.send(Start{}); for _i in 0..message_count { let msg = MessageA {}; router.send(RouterMessage::new(msg)); } let duration = start.elapsed(); println!("It took {:?} to send {} messages", duration, message_count); exit(actor_system.await_shutdown()); }
use std::process::exit; use std::thread::sleep; use std::time::{Duration, Instant}; use tyra::prelude::{Actor, ActorFactory, ActorMessage, ActorSystem, ActorContext, Handler, TyraConfig, ActorWrapper}; use tyra::router::{AddActorMessage, RoundRobinRouterFactory, RouterMessage}; struct MessageA {} impl ActorMessage for MessageA {} struct Finish {} impl ActorMessage for Finish {} struct Start {} impl ActorMessage for Start {} struct Benchmark { ctx: ActorContext<Self>, aggregator: ActorWrapper<Aggregator>, total_msgs: usize, name: String, count: usize, start: Instant, } struct BenchmarkFactory { total_msgs: usize, aggregator: ActorWrapper<Aggregator>, name: String, } impl ActorFactory<Benchmark> for BenchmarkFactory { fn new_actor(&self, context: ActorContext<Benchmark>) -> Benchmark { Benchmark::new(self.total_msgs, self.name.clone(), context, self.aggregator.clone()) } } impl Benchmark { pub fn new(total_msgs: usize, name: String, context: ActorContext<Self>, aggregator: ActorWrapper<Aggregator>) -> Self { Self { ctx: context, aggregator, total_msgs, name, count: 0, start: Instant::now(), } } } impl Actor for Benchmark { fn on_system_stop(&mut self) { self.ctx.actor_ref.stop(); } } impl Handler<MessageA> for Benchmark { fn handle(&mut self, _msg: MessageA, _context: &ActorContext<Self>) { if self.count == 0 { sleep(Duration::from_secs((3) as u64)); self.start = Instant::now(); } self.count += 1; if self.count % self.total_msgs == 0 { let duration = self.start.elapsed(); println!( "{} It took {:?} to process {} messages", self.name, duration, self.total_msgs ); } if self.count == self.total_msgs { self.aggregator.send(Finish {}); } } } struct Aggregator { ctx: ActorContext<Self>, total_actors: usize, name: String, actors_finished: usize, start: Instant, } struct AggregatorFactory { total_actors: usize, name: String, } impl Aggregator {
} impl Actor for Aggregator { fn on_system_stop(&mut self) { self.ctx.actor_ref.stop(); } } impl ActorFactory<Aggregator> for AggregatorFactory { fn new_actor(&self, context: ActorContext<Aggregator>) -> Aggregator { Aggregator::new(self.total_actors, self.name.clone(), context) } } impl Handler<Finish> for Aggregator { fn handle(&mut self, _msg: Finish, _context: &ActorContext<Self>) { self.actors_finished += 1; if self.actors_finished == self.total_actors { let duration = self.start.elapsed(); println!( "{} It took {:?} to finish {} actors", self.name, duration, self.total_actors ); self.ctx.system.stop(Duration::from_secs(60)); } } } impl Handler<Start> for Aggregator { fn handle(&mut self, _msg: Start, _context: &ActorContext<Self>) { sleep(Duration::from_secs((3) as u64)); self.start = Instant::now(); } } fn main() { let actor_config = TyraConfig::new().unwrap(); let actor_system = ActorSystem::new(actor_config); let message_count = 10000000; let actor_count = 7; let router_factory = RoundRobinRouterFactory::new(); let router = actor_system.builder().spawn("benchmark-router", router_factory).unwrap(); let aggregator = actor_system .builder() .spawn( "aggregator", AggregatorFactory { total_actors: actor_count, name: String::from("aggregator") }).unwrap(); for i in 0..actor_count { let actor = actor_system .builder() .spawn(format!("benchmark-single-actor-{}", i), BenchmarkFactory { name: String::from(format!("benchmark-{}", i)), total_msgs: (message_count.clone() / actor_count.clone()) as usize, aggregator: aggregator.clone(), }).unwrap(); router.send(AddActorMessage::new(actor)); } println!("Actors have been created"); let start = Instant::now(); aggregator.send(Start{}); for _i in 0..message_count { let msg = MessageA {}; router.send(RouterMessage::new(msg)); } let duration = start.elapsed(); println!("It took {:?} to send {} messages", duration, message_count); exit(actor_system.await_shutdown()); }
pub fn new(total_actors: usize, name: String, context: ActorContext<Self>) -> Self { Self { ctx: context, total_actors, name, actors_finished: 0, start: Instant::now(), } }
function_block-full_function
[ { "content": "struct MessageA {}\n\n\n\nimpl ActorMessage for MessageA {}\n\n\n", "file_path": "examples/benchmark.rs", "rank": 0, "score": 118826.0143007199 }, { "content": "struct MessageA {\n\n text: String,\n\n}\n\n\n", "file_path": "examples/actor.rs", "rank": 1, "score": 116364.18057765931 }, { "content": "/// Defines which [ActorMessage] is supported per [Actor]\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::{TyraConfig, ActorSystem, Actor, ActorFactory, ActorContext, SerializedMessage, ActorMessage, Handler};\n\n///\n\n/// struct TestActor {}\n\n/// impl Actor for TestActor {}\n\n///\n\n/// struct FooBar {}\n\n/// impl ActorMessage for FooBar {}\n\n///\n\n/// impl Handler<FooBar> for TestActor {\n\n/// fn handle(&mut self, _msg: FooBar, _context: &ActorContext<Self>) {\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Handler<M: ?Sized>\n\nwhere\n\n Self: Actor + Sized,\n\n M: ActorMessage,\n\n{\n\n fn handle(&mut self, msg: M, context: &ActorContext<Self>);\n\n}\n\n\n\nimpl<A> Handler<ActorStopMessage> for A\n\nwhere\n\n A: Actor + Sized,\n\n{\n\n fn handle(&mut self, _msg: ActorStopMessage, _context: &ActorContext<A>) {\n\n self.on_actor_stop();\n\n }\n\n}\n\n\n\nimpl<A> Handler<SystemStopMessage> for A\n\nwhere\n\n A: Actor + Sized,\n\n{\n\n fn handle(&mut self, _msg: SystemStopMessage, _context: &ActorContext<A>) {\n\n self.on_system_stop();\n\n }\n\n}\n", "file_path": "src/actor/handler.rs", "rank": 5, "score": 104347.59746708686 }, { "content": "struct Benchmark {\n\n ctx: ActorContext<Self>,\n\n total_msgs: usize,\n\n name: String,\n\n count: usize,\n\n start: Instant,\n\n}\n\n\n", "file_path": "examples/benchmark.rs", "rank": 6, "score": 98309.57951318775 }, { "content": "/// Core trait to define Messages\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::ActorMessage;\n\n///\n\n/// struct FooBar {}\n\n/// impl ActorMessage for FooBar {}\n\n/// ```\n\npub trait ActorMessage: Send + Sync {}\n", "file_path": "src/message/actor_message.rs", "rank": 7, "score": 97613.37336165726 }, { "content": "struct MessageUnsupported {}\n\n\n\nimpl ActorMessage for MessageUnsupported {}\n\n\n", "file_path": "examples/actor.rs", "rank": 10, "score": 93066.65734036929 }, { "content": "struct MessageB {\n\n text: String,\n\n}\n\n\n\nimpl ActorMessage for MessageA {}\n\n\n\nimpl ActorMessage for MessageB {}\n\n\n", "file_path": "examples/actor.rs", "rank": 11, "score": 93066.65734036927 }, { "content": "struct MessageA {}\n\nimpl ActorMessage for MessageA {}\n\n\n", "file_path": "examples/router.rs", "rank": 13, "score": 90259.5284697331 }, { "content": "/// Core trait to define Actors\n\n///\n\n///\n\n/// # Guaranteed Execution Order\n\n///\n\n/// 1. [ActorFactory.new_actor](../prelude/trait.ActorFactory.html#tymethod.new_actor)\n\n/// 2. [pre_start](../prelude/trait.Actor.html#method.pre_start)\n\n/// 3. Start processing [Handler Implementations](../prelude/trait.Handler.html#tymethod.handle)\n\n/// 4. [on_actor_stop](../prelude/trait.Actor.html#method.on_actor_stop)\n\n/// 5. Stops accepting new messages, but will continue to work through all existing Messages in Mailbox\n\n/// 6. [post_stop](../prelude/trait.Actor.html#method.post_stop)\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::{TyraConfig, ActorSystem, Actor, ActorFactory, ActorContext, SerializedMessage};\n\n///\n\n/// struct TestActor {}\n\n///\n\n/// impl Actor for TestActor {}\n\n/// ```\n\n///\n\n/// # Architecture\n\n///\n\n/// ## Actor Lifecycle\n\n///\n\n/// ```text\n\n/// ┌──────────────────────────┐\n\n/// │ │\n\n/// │ │\n\n/// ┌──────▼──────┐ │\n\n/// │ │ │\n\n/// │ new_actor │ │\n\n/// │ │ │\n\n/// └──────┬──────┘ │\n\n/// │ │\n\n/// ┌──────▼──────┐ │\n\n/// │ │ │\n\n/// │ pre_start │ │\n\n/// │ │ │\n\n/// └──────┬──────┘ │\n\n/// │ │\n\n/// ┌─────────▼─────────┐ │\n\n/// │ ◄─────┐ │\n\n/// │ handle messages │ │loop │panic &&\n\n/// │ ├─────┘ │\n\n/// └──┬──────┬─────┬──▲┘ │RestartPolicy == Always\n\n/// │ │ │ │ │\n\n/// │ │ │ │ │\n\n/// │ │ │ │ │\n\n/// ┌────────────────▼┐ │ ┌▼──┴──────────────┐ │\n\n/// │ │ │ │ │ │\n\n/// │ on_actor_stop │ │ │ on_system_stop │ │\n\n/// │ │ │ │ │ │\n\n/// └────┬────────────┘ │ └──────────────────┘ │\n\n/// │ │ │\n\n/// │ │ │\n\n/// │ │ │\n\n/// │ │panic │\n\n/// ┌────────────▼───────┐ │ │\n\n/// │ │ ┌──────▼──────┐ │\n\n/// │ handle remaining │ │ │ │\n\n/// │ ├───► post_stop │ │\n\n/// │ messages │ │ │ │\n\n/// │ │ └──────┬──────┘ │\n\n/// └────────────────────┘ │ │\n\n/// │ │\n\n/// │ │\n\n/// └──────────────────────────┘\n\n/// ```\n\npub trait Actor: Send + Sync + UnwindSafe {\n\n /// executed before the first message is handled\n\n ///\n\n /// re-executed after actor restart before first message is handled\n\n fn pre_start(&mut self) {}\n\n /// executed after the last message is handled\n\n ///\n\n /// also executed in case the actor panics while it handles a message\n\n fn post_stop(&mut self) {}\n\n /// executed when Actor handles internal ActorStopMessage\n\n ///\n\n /// After this is called, the Actor will not accept any more messages, but messages within the mailbox will still be processed\n\n fn on_actor_stop(&mut self) {}\n\n /// executed when Actor handles internal SystemStopMessage initiated by [ActorSystem.stop](../prelude/struct.ActorSystem.html#method.stop)\n\n ///\n\n /// Without any custom implementation, the [ActorSystem.stop](../prelude/struct.ActorSystem.html#method.stop) will always end in timeout\n\n fn on_system_stop(&mut self) {}\n\n /// executed when [ActorSystem.send_to_address](../prelude/struct.ActorSystem.html#method.send_to_address) is called\n\n ///\n\n /// # Important Note\n\n ///\n\n /// This is the only function that is not necessarily executed on the thread_pool of the Actor\n\n /// It is executed on whatever thread calls [ActorSystem.send_to_address](../prelude/struct.ActorSystem.html#method.send_to_address)\n\n fn handle_serialized_message(&self, _msg: SerializedMessage) {}\n\n}\n", "file_path": "src/actor/actor.rs", "rank": 14, "score": 77297.31588403565 }, { "content": "struct BenchmarkFactory {\n\n total_msgs: usize,\n\n name: String,\n\n}\n\n\n\nimpl ActorFactory<Benchmark> for BenchmarkFactory {\n\n fn new_actor(&self, context: ActorContext<Benchmark>) -> Benchmark {\n\n Benchmark::new(self.total_msgs, self.name.clone(), context)\n\n }\n\n}\n\n\n\nimpl Benchmark {\n\n pub fn new(total_msgs: usize, name: String, context: ActorContext<Self>) -> Self {\n\n Self {\n\n ctx: context,\n\n total_msgs,\n\n name,\n\n count: 0,\n\n start: Instant::now(),\n\n }\n", "file_path": "examples/benchmark.rs", "rank": 15, "score": 76299.33776402417 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let message_count = 10000000;\n\n\n\n let actor = actor_system\n\n .builder()\n\n .spawn(\"benchmark-single-actor\", BenchmarkFactory {\n\n name: String::from(\"benchmark\"),\n\n total_msgs: message_count as usize,\n\n }).unwrap();\n\n println!(\"Actors have been created\");\n\n let start = Instant::now();\n\n\n\n for _i in 0..message_count {\n\n let msg = MessageA {};\n\n actor.send(msg);\n\n }\n\n let duration = start.elapsed();\n\n println!(\"It took {:?} to send {} messages\", duration, message_count);\n\n\n\n exit(actor_system.await_shutdown());\n\n}\n", "file_path": "examples/benchmark.rs", "rank": 16, "score": 76285.71952929211 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n actor_system.add_pool(\"aye\");\n\n actor_system.add_pool(\"aye2\");\n\n\n\n let hw = HelloWorldFactory {\n\n text: String::from(\"sers\"),\n\n count: 0,\n\n };\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .set_pool_name(\"aye\")\n\n .spawn(\"hello-world\", hw).unwrap();\n\n x.send(MessageA {\n\n text: String::from(\"sers+1\"),\n\n });\n\n x.send(MessageA {\n", "file_path": "examples/actor.rs", "rank": 18, "score": 73823.88580623154 }, { "content": "/// [Actor] can only be created from a Factory\n\n///\n\n/// This factory approach is necessary because of the restart behavior.\n\n/// Without this factory we'd need to keep a `.clone()` of the initial Actor, which would force all Actor implementations to implement `Clone`.\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::{Actor, SerializedMessage, ActorFactory, ActorContext};\n\n///\n\n/// struct TestActor {}\n\n///\n\n/// impl Actor for TestActor {\n\n/// fn handle_serialized_message(&self, msg: SerializedMessage) {\n\n/// assert_eq!(0, msg.content.len());\n\n/// }\n\n/// }\n\n///\n\n/// struct TestFactory {}\n\n///\n\n/// impl ActorFactory<TestActor> for TestFactory {\n\n/// fn new_actor(&self, _context: ActorContext<TestActor>) -> TestActor {\n\n/// TestActor {}\n\n/// }\n\n/// }\n\n/// ```\n\npub trait ActorFactory<A>\n\nwhere\n\n A: Actor + UnwindSafe + 'static,\n\n{\n\n /// internally used to create the Actual Actor\n\n ///\n\n /// `ActorContext<A>` is injected and can optionally be stored within the actor itself.\n\n /// It can then be used to define clean a behavior for a clean [ActorSystem.stop](../prelude/struct.ActorSystem.html#method.stop)\n\n /// through [Actor.on_system_stop]\n\n fn new_actor(&self, context: ActorContext<A>) -> A;\n\n}\n", "file_path": "src/actor/actor_factory.rs", "rank": 20, "score": 71707.77153297393 }, { "content": "#[derive(Clone)]\n\nstruct StopActor {\n\n ctx: ActorContext<Self>,\n\n}\n\n\n\nimpl Actor for StopActor {\n\n fn pre_start(&mut self) {\n\n println!(\"PRE START\")\n\n }\n\n fn post_stop(&mut self) {\n\n self.ctx.system.stop(Duration::from_secs(1));\n\n println!(\"POST STOP\");\n\n }\n\n}\n\n\n\nimpl Handler<TestMsg> for StopActor {\n\n fn handle(&mut self, _msg: TestMsg, context: &ActorContext<Self>) {\n\n context.actor_ref.send(TestMsg {});\n\n println!(\"Message received!\");\n\n sleep(Duration::from_millis(100));\n\n }\n\n}\n\n\n", "file_path": "examples/stop.rs", "rank": 21, "score": 65875.680595389 }, { "content": "#[derive(Clone)]\n\nstruct SleepActor {\n\n text: String,\n\n counter: usize,\n\n}\n\n\n\nimpl Actor for SleepActor {}\n\n\n\nimpl Handler<SleepMsg> for SleepActor {\n\n fn handle(&mut self, _msg: SleepMsg, _context: &ActorContext<Self>) {\n\n self.counter += 1;\n\n //if self.counter == 1 {\n\n sleep(Duration::from_secs(3));\n\n //}\n\n //if self.counter % 1000000 == 0 {\n\n println!(\"Received SERS: {}\", self.counter);\n\n //}\n\n }\n\n}\n\n\n", "file_path": "examples/sleep.rs", "rank": 22, "score": 65875.680595389 }, { "content": "#[derive(Clone)]\n\nstruct RemoteActor {\n\n ctx: ActorContext<Self>,\n\n}\n\n\n\nimpl Actor for RemoteActor {\n\n fn handle_serialized_message(&self, msg: SerializedMessage) {\n\n let decoded :TestMsg = bincode::deserialize(&msg.content).unwrap();\n\n self.ctx.actor_ref.send(decoded)\n\n }\n\n}\n\n\n\nimpl Handler<TestMsg> for RemoteActor {\n\n fn handle(&mut self, msg: TestMsg, _context: &ActorContext<Self>) {\n\n println!(\"{}\", msg.content);\n\n }\n\n}\n\n\n", "file_path": "examples/serialize.rs", "rank": 23, "score": 65875.680595389 }, { "content": "struct HelloWorld {\n\n text: String,\n\n count: usize,\n\n}\n\n\n\nimpl Actor for HelloWorld {}\n\n\n", "file_path": "examples/actor.rs", "rank": 24, "score": 65875.680595389 }, { "content": "#[derive(Clone)]\n\nstruct ErrActor {\n\n text: String,\n\n counter: usize,\n\n}\n\n\n\nimpl Actor for ErrActor {}\n\n\n\nimpl Handler<ErrMsg> for ErrActor {\n\n fn handle(&mut self, msg: ErrMsg, _context: &ActorContext<Self>) {\n\n self.counter += 1;\n\n if msg.text == \"sers+1\" {\n\n panic!(\"ficl\");\n\n }\n\n println!(\"Received SERS: {}\", self.counter);\n\n }\n\n}\n\n\n", "file_path": "examples/error.rs", "rank": 25, "score": 65875.680595389 }, { "content": "pub trait MessageEnvelopeTrait<A>: Send + Sync\n\nwhere\n\n A: Actor,\n\n{\n\n fn handle(&mut self, actor: &mut A, context: &ActorContext<A>) -> MessageType;\n\n}\n\n\n\npub struct MessageEnvelope<A>(Box<dyn MessageEnvelopeTrait<A> + Send + Sync>);\n\n\n\nimpl<A> MessageEnvelope<A> {\n\n pub fn new<M>(msg: M) -> Self\n\n where\n\n A: Handler<M> + Actor,\n\n M: ActorMessage + Send + Sync + 'static,\n\n {\n\n MessageEnvelope(Box::new(SyncMessageEnvelope { msg: Some(msg) }))\n\n }\n\n}\n\n\n\nimpl<A> MessageEnvelopeTrait<A> for MessageEnvelope<A>\n", "file_path": "src/message/envelope.rs", "rank": 26, "score": 63976.89941358261 }, { "content": "struct RemoteActorFactory {}\n\n\n\nimpl ActorFactory<RemoteActor> for RemoteActorFactory {\n\n fn new_actor(&self, context: ActorContext<RemoteActor>) -> RemoteActor {\n\n RemoteActor { ctx: context }\n\n }\n\n}\n\n\n", "file_path": "examples/serialize.rs", "rank": 27, "score": 63444.73653275886 }, { "content": "struct HelloWorldFactory {\n\n text: String,\n\n count: usize,\n\n}\n\n\n\nimpl ActorFactory<HelloWorld> for HelloWorldFactory {\n\n fn new_actor(&self, _context: ActorContext<HelloWorld>) -> HelloWorld {\n\n HelloWorld {\n\n count: self.count,\n\n text: self.text.clone(),\n\n }\n\n }\n\n}\n\nimpl Handler<MessageA> for HelloWorld {\n\n fn handle(&mut self, msg: MessageA, _context: &ActorContext<Self>) {\n\n let text: String = [self.text.clone(), String::from(msg.text)].join(\" -> \");\n\n self.count += 1;\n\n println!(\"AAAA: {} Count: {}\", text, self.count)\n\n }\n\n}\n\n\n\nimpl Handler<MessageB> for HelloWorld {\n\n fn handle(&mut self, msg: MessageB, _context: &ActorContext<Self>) {\n\n let text: String = [self.text.clone(), String::from(msg.text)].join(\" -> \");\n\n self.count -= 1;\n\n println!(\"BBBB: {} Count: {}\", text, self.count)\n\n }\n\n}\n\n\n", "file_path": "examples/actor.rs", "rank": 28, "score": 63444.73653275886 }, { "content": "struct StopActorFactory {}\n\n\n\nimpl ActorFactory<StopActor> for StopActorFactory {\n\n fn new_actor(&self, context: ActorContext<StopActor>) -> StopActor {\n\n StopActor { ctx: context }\n\n }\n\n}\n\n\n", "file_path": "examples/stop.rs", "rank": 29, "score": 63444.73653275886 }, { "content": "struct ErrActorFactory {\n\n text: String,\n\n counter: usize,\n\n}\n\n\n\nimpl ActorFactory<ErrActor> for ErrActorFactory {\n\n fn new_actor(&self, _context: ActorContext<ErrActor>) -> ErrActor {\n\n ErrActor {\n\n text: self.text.clone(),\n\n counter: self.counter,\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/error.rs", "rank": 30, "score": 63444.73653275886 }, { "content": "struct SleepActorFactory {\n\n text: String,\n\n counter: usize,\n\n}\n\n\n\nimpl ActorFactory<SleepActor> for SleepActorFactory {\n\n fn new_actor(&self, _context: ActorContext<SleepActor>) -> SleepActor {\n\n SleepActor {\n\n counter: self.counter,\n\n text: self.text.clone(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/sleep.rs", "rank": 31, "score": 63444.73653275886 }, { "content": "/// Core trait to define Messages\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::ActorMessage;\n\n///\n\n/// struct FooBar {}\n\n/// impl ActorMessage for FooBar {}\n\n/// ```\n", "file_path": "src/message/actor_message.rs", "rank": 32, "score": 60110.66051268274 }, { "content": "use crate::message::actor_message::ActorMessage;\n\n\n\npub struct ActorStopMessage {}\n\n\n\nimpl ActorMessage for ActorStopMessage {}\n", "file_path": "src/message/actor_stop_message.rs", "rank": 33, "score": 57850.21190224348 }, { "content": "pub trait ExecutorTrait: Send + Sync {\n\n fn handle(&mut self, is_system_stopping: bool) -> ActorState;\n\n fn get_config(&self) -> &ActorConfig;\n\n fn get_address(&self) -> ActorAddress;\n\n fn is_sleeping(&self) -> bool;\n\n fn is_stopped(&self) -> bool;\n\n fn wakeup(&mut self);\n\n}\n\n\n\npub struct Executor<A, P>\n\nwhere\n\n A: Actor + 'static,\n\n P: ActorFactory<A>,\n\n{\n\n actor: A,\n\n actor_props: P,\n\n actor_config: ActorConfig,\n\n mailbox: Mailbox<A>,\n\n queue: Receiver<MessageEnvelope<A>>,\n\n actor_address: ActorAddress,\n", "file_path": "src/actor/executor.rs", "rank": 34, "score": 56464.01039586513 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::context::ActorContext;\n\nuse crate::message::actor_message::ActorMessage;\n\nuse crate::message::actor_stop_message::ActorStopMessage;\n\nuse crate::message::system_stop_message::SystemStopMessage;\n\n\n\n/// Defines which [ActorMessage] is supported per [Actor]\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::{TyraConfig, ActorSystem, Actor, ActorFactory, ActorContext, SerializedMessage, ActorMessage, Handler};\n\n///\n\n/// struct TestActor {}\n\n/// impl Actor for TestActor {}\n\n///\n\n/// struct FooBar {}\n\n/// impl ActorMessage for FooBar {}\n\n///\n\n/// impl Handler<FooBar> for TestActor {\n\n/// fn handle(&mut self, _msg: FooBar, _context: &ActorContext<Self>) {\n\n/// }\n\n/// }\n\n/// ```\n", "file_path": "src/actor/handler.rs", "rank": 35, "score": 55370.355252736044 }, { "content": "use crate::actor::actor_wrapper::ActorWrapper;\n\nuse crate::prelude::Actor;\n\nuse crate::system::actor_system::ActorSystem;\n\nuse std::panic::UnwindSafe;\n\n\n\n/// Enables access to [ActorSystem] and [Actor] within [Handler](./trait.Handler.html) implementations\n\n///\n\n/// Also injected into [ActorFactory.new_actor](../prelude/trait.ActorFactory.html#tymethod.new_actor), so that it can be stored within the Actor\n\npub struct ActorContext<A>\n\nwhere\n\n Self: Send + Sync,\n\n A: Actor + 'static,\n\n{\n\n pub actor_ref: ActorWrapper<A>,\n\n pub system: ActorSystem,\n\n}\n\n\n\nimpl<A> UnwindSafe for ActorContext<A> where A: Actor + 'static {}\n\n\n\nimpl<A> Clone for ActorContext<A>\n", "file_path": "src/actor/context.rs", "rank": 36, "score": 55194.34433557923 }, { "content": "where\n\n A: Actor + 'static,\n\n{\n\n fn clone(&self) -> Self {\n\n Self {\n\n system: self.system.clone(),\n\n actor_ref: self.actor_ref.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/actor/context.rs", "rank": 37, "score": 55183.46192290395 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::actor_wrapper::ActorWrapper;\n\nuse crate::message::actor_message::ActorMessage;\n\n\n\n/// Removes an Actor from the Router\n\npub struct RemoveActorMessage<A>\n\nwhere\n\n A: Actor + 'static,\n\n{\n\n pub actor: ActorWrapper<A>,\n\n}\n\n\n\nimpl<A> RemoveActorMessage<A>\n\nwhere\n\n A: Actor + 'static,\n\n{\n\n pub fn new(actor: ActorWrapper<A>) -> Self {\n\n Self { actor }\n\n }\n\n}\n\n\n\nimpl<A> ActorMessage for RemoveActorMessage<A> where A: Actor + 'static {}\n", "file_path": "src/routers/remove_actor_message.rs", "rank": 38, "score": 49575.02321158256 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::actor_wrapper::ActorWrapper;\n\nuse crate::message::actor_message::ActorMessage;\n\n\n\n/// Adds an Actor to the Router\n\npub struct AddActorMessage<A>\n\nwhere\n\n A: Actor + 'static,\n\n{\n\n pub actor: ActorWrapper<A>,\n\n}\n\n\n\nimpl<A> AddActorMessage<A>\n\nwhere\n\n A: Actor + 'static,\n\n{\n\n pub fn new(actor: ActorWrapper<A>) -> Self {\n\n Self { actor }\n\n }\n\n}\n\n\n\nimpl<A> ActorMessage for AddActorMessage<A> where A: Actor + 'static {}\n", "file_path": "src/routers/add_actor_message.rs", "rank": 39, "score": 49575.02321158256 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let hw = RemoteActorFactory {};\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .spawn(\"hello-world\", hw).unwrap();\n\n let msg = TestMsg {\n\n content: String::from(\"Hello World!\")\n\n };\n\n let serialized = bincode::serialize(&msg).unwrap();\n\n actor_system.send_to_address(x.get_address(), SerializedMessage::new(serialized));\n\n\n\n actor_system.stop(Duration::from_secs(1));\n\n let result = actor_system.await_shutdown();\n\n\n\n exit(result);\n\n}\n", "file_path": "examples/serialize.rs", "rank": 40, "score": 47719.233698305325 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let hw = ErrActorFactory {\n\n text: String::from(\"sers\"),\n\n counter: 0,\n\n };\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .spawn(\"hello-world\", hw).unwrap();\n\n x.send(ErrMsg {\n\n text: String::from(\"sers+1\"),\n\n });\n\n x.send(ErrMsg {\n\n text: String::from(\"sers+2\"),\n\n });\n\n x.send(ErrMsg {\n\n text: String::from(\"sers+2\"),\n", "file_path": "examples/error.rs", "rank": 41, "score": 47719.233698305325 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let hw = SleepActorFactory {\n\n text: String::from(\"sers\"),\n\n counter: 0,\n\n };\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_unbounded()\n\n .spawn(\"hello-world\", hw).unwrap();\n\n x.send(SleepMsg {\n\n text: String::from(\"sers+1\"),\n\n });\n\n\n\n sleep(Duration::from_secs(1));\n\n\n\n x.send(SleepMsg {\n\n text: String::from(\"sers+2\"),\n", "file_path": "examples/sleep.rs", "rank": 42, "score": 47719.233698305325 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let hw = StopActorFactory {};\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .spawn(\"hello-world\", hw).unwrap();\n\n // this is obviously handled, because it's the actor is still running\n\n x.send(TestMsg {});\n\n sleep(Duration::from_millis(700));\n\n\n\n x.stop();\n\n // this is still handled, because the actor has not handled the stop Message yet\n\n x.send(TestMsg {});\n\n sleep(Duration::from_millis(200));\n\n // this is no longer handled, because the actor has stopped by now\n\n x.send(TestMsg {});\n\n\n\n let result = actor_system.await_shutdown();\n\n\n\n exit(result);\n\n}\n", "file_path": "examples/stop.rs", "rank": 43, "score": 47719.233698305325 }, { "content": "fn main() {\n\n let actor_config = TyraConfig::new().unwrap();\n\n let actor_system = ActorSystem::new(actor_config);\n\n\n\n let hw = HelloWorldFactory {};\n\n let x = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .spawn(\"hello-world-1\", hw.clone()).unwrap();\n\n\n\n let y = actor_system\n\n .builder()\n\n .set_mailbox_size(7)\n\n .spawn(\"hello-world-2\", hw).unwrap();\n\n\n\n let router_factory = RoundRobinRouterFactory::new();\n\n let router = actor_system.builder().spawn(\"hello-router\", router_factory).unwrap();\n\n\n\n router.send(AddActorMessage::new(x));\n\n router.send(AddActorMessage::new(y.clone()));\n", "file_path": "examples/router.rs", "rank": 44, "score": 47719.233698305325 }, { "content": "#[derive(Clone)]\n\nstruct SleepMsg {\n\n text: String,\n\n}\n\n\n\nimpl ActorMessage for SleepMsg {}\n\n\n", "file_path": "examples/sleep.rs", "rank": 45, "score": 41060.67472507508 }, { "content": "#[derive(Clone)]\n\nstruct ErrMsg {\n\n text: String,\n\n}\n\n\n\nimpl ActorMessage for ErrMsg {}\n\n\n", "file_path": "examples/error.rs", "rank": 46, "score": 41060.67472507508 }, { "content": "#[derive(Clone)]\n\nstruct TestMsg {}\n\n\n\nimpl ActorMessage for TestMsg {}\n\n\n", "file_path": "examples/stop.rs", "rank": 47, "score": 41060.67472507508 }, { "content": "struct HelloWorld {\n\n counter: usize,\n\n}\n\nimpl Actor for HelloWorld {}\n\n\n", "file_path": "examples/router.rs", "rank": 48, "score": 41060.67472507508 }, { "content": "#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\nstruct TestMsg {\n\n content: String\n\n}\n\n\n\nimpl ActorMessage for TestMsg {}\n\n\n", "file_path": "examples/serialize.rs", "rank": 49, "score": 41060.67472507508 }, { "content": "#[derive(Clone)]\n\nstruct HelloWorldFactory {}\n\n\n\nimpl ActorFactory<HelloWorld> for HelloWorldFactory {\n\n fn new_actor(&self, _context: ActorContext<HelloWorld>) -> HelloWorld {\n\n HelloWorld { counter: 0 }\n\n }\n\n}\n\nimpl Handler<MessageA> for HelloWorld {\n\n fn handle(&mut self, _msg: MessageA, _context: &ActorContext<Self>) {\n\n self.counter += 1;\n\n println!(\"Received MSG {}\", self.counter);\n\n }\n\n}\n\n\n", "file_path": "examples/router.rs", "rank": 50, "score": 39797.95109113825 }, { "content": "use crate::message::actor_message::ActorMessage;\n\n\n\n/// For Remote message handling\n\n///\n\n/// WARNING: This is a working POC implementation and you should definitely expect changes before the 1.0.0 Release.\n\n///\n\n/// Namely this will include switching to a versioned serialization format (i.e. Protobuf/Flatbuffers)\n\n/// and it may also include some additional fields to make deserialization easier for Endusers\n\n///\n\n/// [ActorSystem.send_to_address](../prelude/struct.ActorSystem.html#method.send_to_address) uses this object to send serialized messages to Actors\n\npub struct SerializedMessage {\n\n pub content: Vec<u8>,\n\n}\n\n\n\nimpl SerializedMessage {\n\n pub fn new(content: Vec<u8>) -> Self {\n\n Self {\n\n content\n\n }\n\n }\n\n}\n\n\n\nimpl ActorMessage for SerializedMessage {}\n", "file_path": "src/message/serialized_message.rs", "rank": 51, "score": 35301.9555604113 }, { "content": "#[derive(PartialEq, Clone, Copy, Debug)]\n\npub enum MessageType {\n\n Other,\n\n ActorStopMessage,\n\n SystemStopMessage,\n\n}\n", "file_path": "src/message/message_type.rs", "rank": 52, "score": 35293.00157295811 }, { "content": "use crate::message::actor_message::ActorMessage;\n\n\n\npub struct SystemStopMessage {}\n\n\n\nimpl ActorMessage for SystemStopMessage {}\n", "file_path": "src/message/system_stop_message.rs", "rank": 53, "score": 34203.23644185615 }, { "content": "use crate::message::serialized_message::SerializedMessage;\n\nuse std::panic::UnwindSafe;\n\n\n\n/// Core trait to define Actors\n\n///\n\n///\n\n/// # Guaranteed Execution Order\n\n///\n\n/// 1. [ActorFactory.new_actor](../prelude/trait.ActorFactory.html#tymethod.new_actor)\n\n/// 2. [pre_start](../prelude/trait.Actor.html#method.pre_start)\n\n/// 3. Start processing [Handler Implementations](../prelude/trait.Handler.html#tymethod.handle)\n\n/// 4. [on_actor_stop](../prelude/trait.Actor.html#method.on_actor_stop)\n\n/// 5. Stops accepting new messages, but will continue to work through all existing Messages in Mailbox\n\n/// 6. [post_stop](../prelude/trait.Actor.html#method.post_stop)\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n", "file_path": "src/actor/actor.rs", "rank": 54, "score": 33282.386457188695 }, { "content": "/// use tyra::prelude::{TyraConfig, ActorSystem, Actor, ActorFactory, ActorContext, SerializedMessage};\n\n///\n\n/// struct TestActor {}\n\n///\n\n/// impl Actor for TestActor {}\n\n/// ```\n\n///\n\n/// # Architecture\n\n///\n\n/// ## Actor Lifecycle\n\n///\n\n/// ```text\n\n/// ┌──────────────────────────┐\n\n/// │ │\n\n/// │ │\n\n/// ┌──────▼──────┐ │\n\n/// │ │ │\n\n/// │ new_actor │ │\n\n/// │ │ │\n\n/// └──────┬──────┘ │\n", "file_path": "src/actor/actor.rs", "rank": 55, "score": 33280.31751753948 }, { "content": "/// │ │\n\n/// ┌──────▼──────┐ │\n\n/// │ │ │\n\n/// │ pre_start │ │\n\n/// │ │ │\n\n/// └──────┬──────┘ │\n\n/// │ │\n\n/// ┌─────────▼─────────┐ │\n\n/// │ ◄─────┐ │\n\n/// │ handle messages │ │loop │panic &&\n\n/// │ ├─────┘ │\n\n/// └──┬──────┬─────┬──▲┘ │RestartPolicy == Always\n\n/// │ │ │ │ │\n\n/// │ │ │ │ │\n\n/// │ │ │ │ │\n\n/// ┌────────────────▼┐ │ ┌▼──┴──────────────┐ │\n\n/// │ │ │ │ │ │\n\n/// │ on_actor_stop │ │ │ on_system_stop │ │\n\n/// │ │ │ │ │ │\n\n/// └────┬────────────┘ │ └──────────────────┘ │\n", "file_path": "src/actor/actor.rs", "rank": 56, "score": 33275.74366029576 }, { "content": "/// │ │ │\n\n/// │ │ │\n\n/// │ │ │\n\n/// │ │panic │\n\n/// ┌────────────▼───────┐ │ │\n\n/// │ │ ┌──────▼──────┐ │\n\n/// │ handle remaining │ │ │ │\n\n/// │ ├───► post_stop │ │\n\n/// │ messages │ │ │ │\n\n/// │ │ └──────┬──────┘ │\n\n/// └────────────────────┘ │ │\n\n/// │ │\n\n/// │ │\n\n/// └──────────────────────────┘\n\n/// ```\n", "file_path": "src/actor/actor.rs", "rank": 57, "score": 33271.1065290339 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct ActorConfig {\n\n //pub actor_name: String,\n\n pub pool_name: String,\n\n pub mailbox_size: usize,\n\n pub message_throughput: usize,\n\n pub restart_policy: RestartPolicy,\n\n}\n\n\n\n/// Defines behavior of [Actor](../prelude/trait.Actor.html) in case of a panic when handling a message\n\n#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)]\n\npub enum RestartPolicy {\n\n Never,\n\n Always,\n\n}\n", "file_path": "src/actor/actor_config.rs", "rank": 58, "score": 32219.754942983313 }, { "content": " pub fn set_restart_policy(mut self, restart_policy: RestartPolicy) -> ActorBuilder<A> {\n\n self.actor_config.restart_policy = restart_policy;\n\n self\n\n }\n\n\n\n pub fn set_pool_name(mut self, pool_name: impl Into<String>) -> ActorBuilder<A> {\n\n self.actor_config.pool_name = pool_name.into();\n\n self\n\n }\n\n\n\n pub fn set_message_throughput(mut self, message_throughput: usize) -> ActorBuilder<A> {\n\n self.actor_config.message_throughput = message_throughput;\n\n self\n\n }\n\n\n\n pub fn set_mailbox_unbounded(self) -> ActorBuilder<A> {\n\n self.set_mailbox_size(0)\n\n }\n\n\n\n pub fn set_mailbox_size(mut self, mailbox_size: usize) -> ActorBuilder<A> {\n", "file_path": "src/actor/actor_builder.rs", "rank": 59, "score": 32219.463557627412 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::actor_address::ActorAddress;\n\nuse crate::actor::handler::Handler;\n\nuse crate::actor::mailbox::Mailbox;\n\nuse crate::message::actor_message::ActorMessage;\n\nuse crate::message::actor_stop_message::ActorStopMessage;\n\nuse crate::system::wakeup_manager::WakeupManager;\n\nuse std::panic::UnwindSafe;\n\n\n\n/// Wrapper used to interact with [Actor]\n\npub struct ActorWrapper<A>\n\nwhere\n\n A: Actor + 'static,\n\n{\n\n mailbox: Mailbox<A>,\n\n address: ActorAddress,\n\n wakeup_manager: WakeupManager,\n\n}\n\n\n\nimpl<A> UnwindSafe for ActorWrapper<A> where A: Actor + 'static {}\n", "file_path": "src/actor/actor_wrapper.rs", "rank": 60, "score": 32216.60493944943 }, { "content": "\n\nimpl<A> ActorWrapper<A>\n\nwhere\n\n A: Actor + UnwindSafe,\n\n{\n\n /// Automatically called by the [ActorBuilder.build](../prelude/struct.ActorBuilder.html#method.build)\n\n pub fn new(mailbox: Mailbox<A>, address: ActorAddress, wakeup_manager: WakeupManager) -> Self {\n\n Self {\n\n mailbox,\n\n address,\n\n wakeup_manager,\n\n }\n\n }\n\n\n\n pub fn send<M>(&self, msg: M)\n\n where\n\n A: Handler<M>,\n\n M: ActorMessage + 'static,\n\n {\n\n if self.mailbox.is_stopped() {\n", "file_path": "src/actor/actor_wrapper.rs", "rank": 61, "score": 32215.41585649581 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::context::ActorContext;\n\nuse std::panic::UnwindSafe;\n\n\n\n/// [Actor] can only be created from a Factory\n\n///\n\n/// This factory approach is necessary because of the restart behavior.\n\n/// Without this factory we'd need to keep a `.clone()` of the initial Actor, which would force all Actor implementations to implement `Clone`.\n\n///\n\n/// # Examples\n\n///\n\n/// Basic usage:\n\n///\n\n/// ```rust\n\n/// use tyra::prelude::{Actor, SerializedMessage, ActorFactory, ActorContext};\n\n///\n\n/// struct TestActor {}\n\n///\n\n/// impl Actor for TestActor {\n\n/// fn handle_serialized_message(&self, msg: SerializedMessage) {\n", "file_path": "src/actor/actor_factory.rs", "rank": 62, "score": 32214.79802110298 }, { "content": " self.actor_config.mailbox_size = mailbox_size;\n\n self\n\n }\n\n\n\n /// Creates the defined [Actor] on the [ActorSystem]\n\n ///\n\n /// # Returns\n\n ///\n\n /// `Some(ActorWrapper<A>)` if actor is not running in the system\n\n ///\n\n /// `Some(ActorWrapper<A>)` if the actor is running on the system AND actor was created by the same builder or a clone of it\n\n ///\n\n /// `None` if actor is running on the system AND actor was not created by the same builder or a clone of it\n\n ///\n\n pub fn spawn<P>(&self, name: impl Into<String>, props: P) -> Option<ActorWrapper<A>>\n\n where\n\n P: ActorFactory<A> + 'static,\n\n {\n\n let actor_address = ActorAddress {\n\n actor: name.into(),\n", "file_path": "src/actor/actor_builder.rs", "rank": 63, "score": 32212.713984858507 }, { "content": " return;\n\n }\n\n\n\n self.mailbox.send(msg);\n\n\n\n if self.mailbox.is_sleeping() {\n\n self.wakeup_manager.wakeup(self.address.clone());\n\n }\n\n }\n\n\n\n pub fn stop(&self) {\n\n self.send(ActorStopMessage {});\n\n }\n\n\n\n pub fn get_address(&self) -> &ActorAddress {\n\n &self.address\n\n }\n\n}\n\n\n\nimpl<A> Clone for ActorWrapper<A>\n", "file_path": "src/actor/actor_wrapper.rs", "rank": 64, "score": 32211.81880402179 }, { "content": "#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]\n\npub struct ActorAddress {\n\n pub remote: String,\n\n pub system: String,\n\n pub pool: String,\n\n pub actor: String,\n\n}\n", "file_path": "src/actor/actor_address.rs", "rank": 65, "score": 32211.700833143266 }, { "content": " /// This is called through [ActorSystem.builder](../prelude/struct.ActorSystem.html#method.builder)\n\n pub fn new(system: ActorSystem, system_state: SystemState, wakeup_manager: WakeupManager) -> ActorBuilder<A> {\n\n let config = system.get_config();\n\n\n\n let actor_config = ActorConfig {\n\n pool_name: String::from(DEFAULT_POOL),\n\n mailbox_size: config.general.default_mailbox_size,\n\n message_throughput: config.general.default_message_throughput,\n\n restart_policy: config.general.default_restart_policy,\n\n };\n\n\n\n ActorBuilder {\n\n existing: Arc::new(DashMap::new()),\n\n system,\n\n system_state,\n\n wakeup_manager,\n\n actor_config,\n\n }\n\n }\n\n\n", "file_path": "src/actor/actor_builder.rs", "rank": 66, "score": 32211.101476164007 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::actor_config::{ActorConfig, RestartPolicy};\n\nuse crate::actor::actor_factory::ActorFactory;\n\nuse crate::actor::actor_wrapper::ActorWrapper;\n\nuse crate::config::tyra_config::DEFAULT_POOL;\n\nuse crate::system::actor_system::ActorSystem;\n\nuse std::panic::UnwindSafe;\n\nuse crossbeam_channel::{unbounded, bounded};\n\nuse crate::actor::mailbox::Mailbox;\n\nuse std::sync::{Arc, RwLock};\n\nuse std::sync::atomic::AtomicBool;\n\nuse crate::actor::actor_address::ActorAddress;\n\nuse crate::actor::context::ActorContext;\n\nuse crate::actor::executor::{Executor, ExecutorTrait};\n\nuse crate::system::wakeup_manager::WakeupManager;\n\nuse crate::system::system_state::SystemState;\n\nuse dashmap::DashMap;\n\n\n\n/// Used to create [Actor]s in the [ActorSystem]\n\n///\n", "file_path": "src/actor/actor_builder.rs", "rank": 67, "score": 32210.175586563888 }, { "content": "/// assert_eq!(0, msg.content.len());\n\n/// }\n\n/// }\n\n///\n\n/// struct TestFactory {}\n\n///\n\n/// impl ActorFactory<TestActor> for TestFactory {\n\n/// fn new_actor(&self, _context: ActorContext<TestActor>) -> TestActor {\n\n/// TestActor {}\n\n/// }\n\n/// }\n\n/// ```\n", "file_path": "src/actor/actor_factory.rs", "rank": 68, "score": 32210.10118138509 }, { "content": " system: String::from(self.system.get_name()),\n\n pool: self.actor_config.pool_name.clone(),\n\n remote: String::from(\"local\"),\n\n };\n\n\n\n if self.system_state.is_actor_active(&actor_address) {\n\n if !self.existing.contains_key(&actor_address) {\n\n return None\n\n }\n\n let to_return = self.existing.get(&actor_address).unwrap().value().clone();\n\n return Some(to_return)\n\n }\n\n\n\n let (sender, receiver) = if self.actor_config.mailbox_size == 0 {\n\n unbounded()\n\n } else {\n\n bounded(self.actor_config.mailbox_size)\n\n };\n\n\n\n let mailbox = Mailbox {\n", "file_path": "src/actor/actor_builder.rs", "rank": 69, "score": 32209.967448689964 }, { "content": " is_stopped: Arc::new(AtomicBool::new(false)),\n\n is_sleeping: Arc::new(AtomicBool::new(true)),\n\n msg_in: sender,\n\n };\n\n\n\n let actor_ref = ActorWrapper::new(\n\n mailbox.clone(),\n\n actor_address.clone(),\n\n self.wakeup_manager.clone(),\n\n );\n\n\n\n let context = ActorContext {\n\n system: self.system.clone(),\n\n actor_ref: actor_ref.clone(),\n\n };\n\n let actor = props.new_actor(context);\n\n let actor_handler = Executor::new(\n\n props,\n\n actor_address.clone(),\n\n self.actor_config.clone(),\n", "file_path": "src/actor/actor_builder.rs", "rank": 70, "score": 32209.271237755012 }, { "content": "/// Each builder keeps a clone-safe storage of already created Actors.\n\n///\n\n/// In case the same `ActorAddress` is used multiple times with the same builder for an already running actor, it will simply return the `ActorWrapper<A>` without creating the actor a second time.\n\n/// See [.spawn()](#method.spawn) for a detailed explanation\n\n#[derive(Clone)]\n\npub struct ActorBuilder<A>\n\nwhere\n\n A: Actor + UnwindSafe + 'static,\n\n{\n\n existing: Arc<DashMap<ActorAddress, ActorWrapper<A>>>,\n\n system: ActorSystem,\n\n system_state: SystemState,\n\n wakeup_manager: WakeupManager,\n\n actor_config: ActorConfig,\n\n}\n\n\n\nimpl<A> ActorBuilder<A>\n\n where\n\n A: Actor + UnwindSafe + 'static,\n\n{\n", "file_path": "src/actor/actor_builder.rs", "rank": 71, "score": 32209.057128385906 }, { "content": " mailbox.clone(),\n\n receiver,\n\n self.system.clone(),\n\n actor_ref.clone(),\n\n );\n\n\n\n self.system_state.add_actor(actor_address.clone(), Arc::new(actor));\n\n self.wakeup_manager.add_sleeping_actor(\n\n actor_handler.get_address(),\n\n Arc::new(RwLock::new(actor_handler)),\n\n );\n\n\n\n self.existing.insert(actor_address, actor_ref.clone());\n\n Some(actor_ref)\n\n }\n\n}\n", "file_path": "src/actor/actor_builder.rs", "rank": 72, "score": 32207.312884999457 }, { "content": "where\n\n A: Actor + UnwindSafe,\n\n{\n\n fn clone(&self) -> Self {\n\n Self {\n\n wakeup_manager: self.wakeup_manager.clone(),\n\n mailbox: self.mailbox.clone(),\n\n address: self.address.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/actor/actor_wrapper.rs", "rank": 73, "score": 32207.093002261907 }, { "content": "#[derive(PartialEq, Clone, Copy, Debug)]\n\npub enum ActorState {\n\n Running,\n\n Sleeping,\n\n Stopped,\n\n}\n", "file_path": "src/actor/actor_state.rs", "rank": 74, "score": 32206.539590198387 }, { "content": " }\n\n}\n\n\n\nimpl Actor for Benchmark {\n\n fn on_system_stop(&mut self) {\n\n self.ctx.actor_ref.stop();\n\n }\n\n}\n\n\n\nimpl Handler<MessageA> for Benchmark {\n\n fn handle(&mut self, _msg: MessageA, context: &ActorContext<Self>) {\n\n if self.count == 0 {\n\n println!(\"Sleep 3 now\");\n\n sleep(Duration::from_secs((3) as u64));\n\n println!(\"Sleep 3 end\");\n\n self.start = Instant::now();\n\n }\n\n self.count += 1;\n\n let wip_print = self.total_msgs / 10;\n\n if self.count % wip_print == 0 {\n", "file_path": "examples/benchmark.rs", "rank": 75, "score": 30167.147683810945 }, { "content": " println!(\"{} Counter: {}\", self.name, self.count)\n\n }\n\n if self.count % self.total_msgs == 0 {\n\n let duration = self.start.elapsed();\n\n println!(\n\n \"{} It took {:?} to process {} messages\",\n\n self.name, duration, self.total_msgs\n\n );\n\n }\n\n if self.count == self.total_msgs {\n\n context.system.stop(Duration::from_secs(60));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/benchmark.rs", "rank": 76, "score": 30159.0134226382 }, { "content": "use std::process::exit;\n\nuse std::thread::sleep;\n\nuse std::time::{Duration, Instant};\n\nuse tyra::prelude::{\n\n Actor, ActorFactory, ActorMessage, ActorSystem, ActorContext, Handler, TyraConfig,\n\n};\n\n\n", "file_path": "examples/benchmark.rs", "rank": 77, "score": 30152.980701181696 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::context::ActorContext;\n\nuse crate::actor::handler::Handler;\n\nuse crate::message::actor_message::ActorMessage;\n\nuse crate::message::actor_stop_message::ActorStopMessage;\n\nuse crate::message::message_type::MessageType;\n\nuse crate::message::system_stop_message::SystemStopMessage;\n\nuse std::any::{Any, TypeId};\n\n\n", "file_path": "src/message/envelope.rs", "rank": 78, "score": 28619.42900506932 }, { "content": "where\n\n A: Actor,\n\n{\n\n fn handle(&mut self, act: &mut A, context: &ActorContext<A>) -> MessageType {\n\n self.0.handle(act, context)\n\n }\n\n}\n\n\n\npub struct SyncMessageEnvelope<M>\n\nwhere\n\n M: ActorMessage + Send + Sync,\n\n{\n\n msg: Option<M>,\n\n}\n\n\n\nimpl<A, M> MessageEnvelopeTrait<A> for SyncMessageEnvelope<M>\n\nwhere\n\n M: ActorMessage + Send + 'static,\n\n A: Handler<M> + Actor,\n\n{\n", "file_path": "src/message/envelope.rs", "rank": 79, "score": 28619.16599036584 }, { "content": "pub mod actor_message;\n\npub mod actor_stop_message;\n\npub mod envelope;\n\npub mod message_type;\n\npub mod serialized_message;\n\npub mod system_stop_message;\n\n\n\npub mod prelude {\n\n pub use crate::message::actor_message::ActorMessage;\n\n pub use crate::message::serialized_message::SerializedMessage;\n\n}\n", "file_path": "src/message/mod.rs", "rank": 80, "score": 28616.604750696413 }, { "content": " fn handle(&mut self, act: &mut A, context: &ActorContext<A>) -> MessageType {\n\n if let Some(msg) = self.msg.take() {\n\n let msg_type_id = msg.type_id();\n\n act.handle(msg, context);\n\n if msg_type_id == TypeId::of::<ActorStopMessage>() {\n\n return MessageType::ActorStopMessage;\n\n } else if msg_type_id == TypeId::of::<SystemStopMessage>() {\n\n return MessageType::SystemStopMessage;\n\n }\n\n }\n\n MessageType::Other\n\n }\n\n}\n", "file_path": "src/message/envelope.rs", "rank": 81, "score": 28612.771289837645 }, { "content": "use std::process::exit;\n\nuse std::time::Duration;\n\nuse tyra::prelude::{\n\n Actor, ActorFactory, ActorMessage, ActorSystem, ActorContext, Handler, TyraConfig,\n\n};\n\n\n", "file_path": "examples/actor.rs", "rank": 88, "score": 27554.320265724695 }, { "content": " text: String::from(\"sers+2\"),\n\n });\n\n x.send(MessageB {\n\n text: String::from(\"sers-1\"),\n\n });\n\n x.send(MessageA {\n\n text: String::from(\"sers+3\"),\n\n });\n\n x.send(MessageA {\n\n text: String::from(\"sers+4\"),\n\n });\n\n x.send(MessageA {\n\n text: String::from(\"sers+5\"),\n\n });\n\n\n\n //x.send(MessageUnsupported{text: String::from(\"sers\")});\n\n\n\n actor_system.stop(Duration::from_secs(1));\n\n exit(actor_system.await_shutdown());\n\n}\n", "file_path": "examples/actor.rs", "rank": 89, "score": 27547.051665419476 }, { "content": "use crate::message::actor_message::ActorMessage;\n\n\n\n/// Wraps any [ActorMessage](../prelude/trait.ActorMessage.html) to be sent to a Router\n\npub struct RouterMessage<M>\n\nwhere\n\n M: ActorMessage + 'static,\n\n{\n\n pub msg: M,\n\n}\n\n\n\nimpl<M> ActorMessage for RouterMessage<M> where M: ActorMessage + 'static {}\n\n\n\nimpl<M> RouterMessage<M>\n\nwhere\n\n M: ActorMessage + 'static,\n\n{\n\n pub fn new(msg: M) -> Self {\n\n Self { msg }\n\n }\n\n}\n", "file_path": "src/routers/router_message.rs", "rank": 90, "score": 27207.071441937474 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::handler::Handler;\n\nuse crate::message::actor_message::ActorMessage;\n\nuse crate::message::envelope::MessageEnvelope;\n\nuse crossbeam_channel::Sender;\n\nuse std::panic::UnwindSafe;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\n\n\npub struct Mailbox<A> {\n\n pub is_stopped: Arc<AtomicBool>,\n\n pub is_sleeping: Arc<AtomicBool>,\n\n pub msg_in: Sender<MessageEnvelope<A>>,\n\n}\n\n\n\nimpl<A> Clone for Mailbox<A>\n\nwhere\n\n A: Actor + UnwindSafe,\n\n{\n\n fn clone(&self) -> Self {\n", "file_path": "src/actor/mailbox.rs", "rank": 91, "score": 26122.56635444836 }, { "content": "use crate::actor::actor::Actor;\n\nuse crate::actor::actor_address::ActorAddress;\n\nuse crate::actor::actor_config::{ActorConfig, RestartPolicy};\n\nuse crate::actor::actor_factory::ActorFactory;\n\nuse crate::actor::actor_state::ActorState;\n\nuse crate::actor::actor_wrapper::ActorWrapper;\n\nuse crate::actor::context::ActorContext;\n\nuse crate::actor::handler::Handler;\n\nuse crate::actor::mailbox::Mailbox;\n\nuse crate::message::actor_message::ActorMessage;\n\nuse crate::message::envelope::{MessageEnvelope, MessageEnvelopeTrait};\n\nuse crate::message::message_type::MessageType;\n\nuse crate::message::system_stop_message::SystemStopMessage;\n\nuse crate::system::actor_system::ActorSystem;\n\nuse crossbeam_channel::Receiver;\n\nuse std::panic::{catch_unwind, AssertUnwindSafe, UnwindSafe};\n\nuse std::sync::atomic::Ordering;\n\nuse std::time::{Duration, Instant};\n\n\n", "file_path": "src/actor/executor.rs", "rank": 92, "score": 26120.533767683748 }, { "content": "pub mod actor;\n\npub mod actor_address;\n\npub mod actor_builder;\n\npub mod actor_config;\n\npub mod actor_factory;\n\npub mod actor_state;\n\npub mod actor_wrapper;\n\npub mod context;\n\npub mod executor;\n\npub mod handler;\n\npub mod mailbox;\n\n\n\npub mod prelude {\n\n pub use crate::actor::actor::Actor;\n\n pub use crate::actor::actor_config::RestartPolicy;\n\n pub use crate::actor::actor_wrapper::ActorWrapper;\n\n pub use crate::actor::actor_builder::ActorBuilder;\n\n pub use crate::actor::actor_factory::ActorFactory;\n\n pub use crate::actor::context::ActorContext;\n\n pub use crate::actor::handler::Handler;\n\n}\n", "file_path": "src/actor/mod.rs", "rank": 93, "score": 26119.729410493437 }, { "content": " mailbox,\n\n queue: receiver,\n\n actor_address,\n\n is_startup: true,\n\n system_triggered_stop: false,\n\n last_wakeup: Instant::now(),\n\n context,\n\n }\n\n }\n\n pub fn send<M>(&self, msg: M)\n\n where\n\n A: Handler<M>,\n\n M: ActorMessage + 'static,\n\n {\n\n self.mailbox.msg_in.send(MessageEnvelope::new(msg)).unwrap();\n\n }\n\n}\n", "file_path": "src/actor/executor.rs", "rank": 94, "score": 26118.198080030634 }, { "content": " Self {\n\n msg_in: self.msg_in.clone(),\n\n is_stopped: self.is_stopped.clone(),\n\n is_sleeping: self.is_sleeping.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<A> Mailbox<A>\n\nwhere\n\n A: Actor,\n\n{\n\n pub fn send<M>(&self, msg: M)\n\n where\n\n A: Handler<M>,\n\n M: ActorMessage + 'static,\n\n {\n\n self.msg_in.send(MessageEnvelope::new(msg)).unwrap();\n\n }\n\n\n\n pub fn is_sleeping(&self) -> bool {\n\n self.is_sleeping.load(Ordering::Relaxed)\n\n }\n\n\n\n pub fn is_stopped(&self) -> bool {\n\n self.is_stopped.load(Ordering::Relaxed)\n\n }\n\n}\n", "file_path": "src/actor/mailbox.rs", "rank": 95, "score": 26118.113469385262 }, { "content": "{\n\n pub fn new(\n\n actor_props: P,\n\n actor_address: ActorAddress,\n\n actor_config: ActorConfig,\n\n mailbox: Mailbox<A>,\n\n receiver: Receiver<MessageEnvelope<A>>,\n\n system: ActorSystem,\n\n actor_ref: ActorWrapper<A>,\n\n ) -> Self {\n\n\n\n let context = ActorContext {\n\n actor_ref,\n\n system: system.clone(),\n\n };\n\n\n\n Self {\n\n actor: actor_props.new_actor(context.clone()),\n\n actor_props,\n\n actor_config,\n", "file_path": "src/actor/executor.rs", "rank": 96, "score": 26116.997216551405 }, { "content": " is_startup: bool,\n\n system_triggered_stop: bool,\n\n last_wakeup: Instant,\n\n context: ActorContext<A>,\n\n}\n\n\n\nunsafe impl<A, P> Send for Executor<A, P>\n\nwhere\n\n A: Actor + UnwindSafe + 'static,\n\n P: ActorFactory<A>,\n\n{\n\n}\n\nunsafe impl<A, P> Sync for Executor<A, P>\n\nwhere\n\n A: Actor + UnwindSafe + 'static,\n\n P: ActorFactory<A>,\n\n{\n\n}\n\n\n\nimpl<A, P> ExecutorTrait for Executor<A, P>\n", "file_path": "src/actor/executor.rs", "rank": 97, "score": 26115.114134056246 }, { "content": " self.mailbox.is_sleeping.store(true, Ordering::Relaxed);\n\n let duration = self.last_wakeup.elapsed();\n\n if duration >= Duration::from_secs(5) {\n\n return ActorState::Sleeping;\n\n }\n\n self.mailbox.is_sleeping.store(false, Ordering::Relaxed);\n\n return ActorState::Running;\n\n }\n\n\n\n let mut msg = m.unwrap();\n\n let result = catch_unwind(AssertUnwindSafe(|| {\n\n msg.handle(&mut self.actor, &self.context)\n\n }));\n\n if result.is_err() {\n\n println!(\"ACTOR PANIC\");\n\n self.actor.post_stop();\n\n\n\n if self.actor_config.restart_policy == RestartPolicy::Never || self.is_stopped() {\n\n self.mailbox.is_stopped.store(true, Ordering::Relaxed);\n\n return ActorState::Stopped;\n", "file_path": "src/actor/executor.rs", "rank": 98, "score": 26113.52233385133 }, { "content": " }\n\n self.actor = self.actor_props.new_actor(self.context.clone());\n\n self.is_startup = true;\n\n return ActorState::Running;\n\n }\n\n let message_type = result.unwrap();\n\n if message_type == MessageType::ActorStopMessage {\n\n self.mailbox.is_stopped.store(true, Ordering::Relaxed);\n\n return ActorState::Running;\n\n }\n\n\n\n ActorState::Running\n\n }\n\n\n\n fn get_config(&self) -> &ActorConfig {\n\n &self.actor_config\n\n }\n\n\n\n fn get_address(&self) -> ActorAddress {\n\n self.actor_address.clone()\n", "file_path": "src/actor/executor.rs", "rank": 99, "score": 26112.297939883454 } ]
Rust
solver/src/lib.rs
MattWhelan/words
ca0788715a3af47e4b09157b0f60aa36af513f6a
use std::collections::{HashMap, HashSet}; use strsim::hamming; use wordlib::{char_freq, freq_scored_guesses, Knowledge}; use crate::LetterOutcome::{ABSENT, HIT, MISS}; pub trait GuessStrategy { fn next_guess(&self, knowledge: &Knowledge) -> &str; } pub struct FreqStrategy<'a> { words: &'a [&'a str], } impl<'a> FreqStrategy<'a> { pub fn new(words: &'a [&'a str]) -> FreqStrategy<'a> { FreqStrategy { words, } } } impl<'a> GuessStrategy for FreqStrategy<'a> { fn next_guess(&self, knowledge: &Knowledge) -> &'a str { let candidates = knowledge.filter(&self.words); if candidates.len() < 100 { println!("Candidates ({}):", candidates.len()); for w in candidates.iter() { println!(" {}", w); } if candidates.len() < 3 { return &candidates[0]; } } else { println!("Candidates: {}", candidates.len()); } let freq = char_freq(&candidates); let mut coverage = HashSet::new(); coverage.extend(knowledge.get_covered()); let word_scores = freq_scored_guesses(self.words, &freq, &coverage); let top_score = word_scores[0].1; let mut guesses: Vec<&str> = word_scores .iter() .take_while(|(_, s)| *s == top_score) .map(|(w, _)| *w) .collect(); guesses.sort_by_cached_key(|w| { candidates .iter() .map(|c| hamming(w, c).unwrap()) .min() .unwrap() }); println!("Guesses:"); for guess in guesses.iter().take(5) { println!(" {}", guess); } guesses[0] } } pub fn valid_words(target_word_len: usize, all_words: &[String], disallowed: HashSet<String>) -> Vec<&str> { all_words .iter() .filter(|s| s.chars().all(|ch| ch.is_lowercase())) .filter(|s| s.len() == target_word_len) .filter(|s| !disallowed.contains(s.as_str())) .map(|s| s.as_str()) .collect() } pub struct EntropyStrategy<'a> { words: &'a [&'a str], } impl<'a> EntropyStrategy<'a> { pub fn new(words: &'a [&'a str]) -> EntropyStrategy<'a> { EntropyStrategy { words, } } fn entropy_of_guess(candidates: &[&str], w: &str) -> f32 { let num_candidates = candidates.len() as f32; let pattern_counts = candidates.iter() .map(|c| (LetterOutcome::pattern(w, c), 1i32)) .fold(HashMap::with_capacity(300), |mut acc, (p, c)| { *acc.entry(p).or_default() += c; acc }); let word_entropy = pattern_counts.values() .map(|c: &i32| { let p = *c as f32 / num_candidates; p * -p.log2() }) .sum::<f32>(); word_entropy } } #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)] enum LetterOutcome { HIT, MISS, ABSENT, } impl LetterOutcome { fn _each() -> [LetterOutcome; 3] { [HIT, MISS, ABSENT] } fn pattern(guess: &str, target: &str) -> Vec<LetterOutcome> { let mut ret: Vec<LetterOutcome> = Vec::with_capacity(guess.len()); ret.extend( guess.chars() .zip(target.chars()) .map(|(g, t)| { if g == t { HIT } else if target.chars().any(|ch| ch == g) { MISS } else { ABSENT } }) ); ret } } impl<'a> GuessStrategy for EntropyStrategy<'a> { fn next_guess(&self, knowledge: &Knowledge) -> &'a str { let candidates = knowledge.filter(&self.words); println!("Candidates: {}; H = {}", candidates.len(), (candidates.len() as f32).log2()); if candidates.len() < 3 { return &candidates[0]; } if candidates.len() == self.words.len() { let ret = "raise"; let word_entropy = Self::entropy_of_guess(&candidates, ret); println!("{}: ∆H = {}", ret, word_entropy); return "raise" } let (guess, entropy) = self.words.iter() .map(|w| { let word_entropy = Self::entropy_of_guess(&candidates, w); (w, word_entropy) }) .max_by(|(_, l), (_, r)| l.partial_cmp(r).unwrap()) .unwrap(); println!("{}: H = {}", guess, entropy); guess } } #[cfg(test)] mod test { use std::collections::HashSet; use wordlib::{Knowledge, words_from_file}; use crate::{EntropyStrategy, GuessStrategy, valid_words}; #[test] fn test() { let all_words: Vec<String> = words_from_file("/usr/share/dict/words").unwrap(); let target_words: Vec<&str> = valid_words(5, &all_words, HashSet::new()); let guesser = EntropyStrategy::new(&target_words); let knowledge = Knowledge::from_tries(".a.se", "aseup", &["raise", "croup"]); let guess = guesser.next_guess(&knowledge); assert_eq!("pause", guess); } }
use std::collections::{HashMap, HashSet}; use strsim::hamming; use wordlib::{char_freq, freq_scored_guesses, Knowledge}; use crate::LetterOutcome::{ABSENT, HIT, MISS}; pub trait GuessStrategy { fn next_guess(&self, knowledge: &Knowledge) -> &str; } pub struct FreqStrategy<'a> { words: &'a [&'a str], } impl<'a> FreqStrategy<'a> { pub fn new(words: &'a [&'a str]) -> FreqStrategy<'a> { FreqStrategy { words, } } } impl<'a> GuessStrategy for FreqStrategy<'a> { fn next_guess(&self, knowledge: &Knowledge) -> &'a str { let candidates = knowledge.filter(&self.words); if candidates.len() < 100 { println!("Candidates ({}):", candidates.len()); for w in candidates.iter() { println!(" {}", w); } if candidates.len() < 3 { return &candidates[0]; } } else { println!("Candidates: {}", candidates.len()); } let freq = char_freq(&candidates); let mut coverage = HashSet::new(); coverage.extend(knowledge.get_covered()); let word_scores = freq_scored_guesses(self.words, &freq, &coverage); let top_score = word_scores[0].1; let mut guesses: Vec<&str> = word_scores .iter() .take_while(|(_, s)| *s == top_score) .map(|(w, _)| *w) .collect(); guesses.sort_by_cached_key(|w| { candidates .iter() .map(|c| hamming(w, c).unwrap()) .m
return &candidates[0]; } if candidates.len() == self.words.len() { let ret = "raise"; let word_entropy = Self::entropy_of_guess(&candidates, ret); println!("{}: ∆H = {}", ret, word_entropy); return "raise" } let (guess, entropy) = self.words.iter() .map(|w| { let word_entropy = Self::entropy_of_guess(&candidates, w); (w, word_entropy) }) .max_by(|(_, l), (_, r)| l.partial_cmp(r).unwrap()) .unwrap(); println!("{}: H = {}", guess, entropy); guess } } #[cfg(test)] mod test { use std::collections::HashSet; use wordlib::{Knowledge, words_from_file}; use crate::{EntropyStrategy, GuessStrategy, valid_words}; #[test] fn test() { let all_words: Vec<String> = words_from_file("/usr/share/dict/words").unwrap(); let target_words: Vec<&str> = valid_words(5, &all_words, HashSet::new()); let guesser = EntropyStrategy::new(&target_words); let knowledge = Knowledge::from_tries(".a.se", "aseup", &["raise", "croup"]); let guess = guesser.next_guess(&knowledge); assert_eq!("pause", guess); } }
in() .unwrap() }); println!("Guesses:"); for guess in guesses.iter().take(5) { println!(" {}", guess); } guesses[0] } } pub fn valid_words(target_word_len: usize, all_words: &[String], disallowed: HashSet<String>) -> Vec<&str> { all_words .iter() .filter(|s| s.chars().all(|ch| ch.is_lowercase())) .filter(|s| s.len() == target_word_len) .filter(|s| !disallowed.contains(s.as_str())) .map(|s| s.as_str()) .collect() } pub struct EntropyStrategy<'a> { words: &'a [&'a str], } impl<'a> EntropyStrategy<'a> { pub fn new(words: &'a [&'a str]) -> EntropyStrategy<'a> { EntropyStrategy { words, } } fn entropy_of_guess(candidates: &[&str], w: &str) -> f32 { let num_candidates = candidates.len() as f32; let pattern_counts = candidates.iter() .map(|c| (LetterOutcome::pattern(w, c), 1i32)) .fold(HashMap::with_capacity(300), |mut acc, (p, c)| { *acc.entry(p).or_default() += c; acc }); let word_entropy = pattern_counts.values() .map(|c: &i32| { let p = *c as f32 / num_candidates; p * -p.log2() }) .sum::<f32>(); word_entropy } } #[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)] enum LetterOutcome { HIT, MISS, ABSENT, } impl LetterOutcome { fn _each() -> [LetterOutcome; 3] { [HIT, MISS, ABSENT] } fn pattern(guess: &str, target: &str) -> Vec<LetterOutcome> { let mut ret: Vec<LetterOutcome> = Vec::with_capacity(guess.len()); ret.extend( guess.chars() .zip(target.chars()) .map(|(g, t)| { if g == t { HIT } else if target.chars().any(|ch| ch == g) { MISS } else { ABSENT } }) ); ret } } impl<'a> GuessStrategy for EntropyStrategy<'a> { fn next_guess(&self, knowledge: &Knowledge) -> &'a str { let candidates = knowledge.filter(&self.words); println!("Candidates: {}; H = {}", candidates.len(), (candidates.len() as f32).log2()); if candidates.len() < 3 {
random
[ { "content": "pub fn char_freq(words: &[&str]) -> HashMap<char, usize> {\n\n words\n\n .iter()\n\n .flat_map(|w| w.chars())\n\n .fold(HashMap::new(), |mut acc, ch| {\n\n *acc.entry(ch).or_default() += 1;\n\n acc\n\n })\n\n}\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 0, "score": 89005.66664026173 }, { "content": "fn word_score(word: &str, freq: &HashMap<char, usize>, covered: &HashSet<char>) -> usize {\n\n let uniques: HashSet<char> = word.chars().collect();\n\n uniques\n\n .into_iter()\n\n .map(|ch| {\n\n if covered.contains(&ch) {\n\n 0\n\n } else {\n\n *freq.get(&ch).unwrap_or(&0)\n\n }\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 1, "score": 86753.33253137566 }, { "content": "pub fn coverage_guesses<'a>(\n\n words: &'a [&str],\n\n freq: &HashMap<char, usize>,\n\n covered: &mut HashSet<char>,\n\n) -> Vec<&'a str> {\n\n //Score each word by freq-points\n\n let best = words\n\n .iter()\n\n .max_by_key(|w| word_score(w, freq, covered))\n\n .unwrap();\n\n covered.extend(best.chars());\n\n\n\n let mut ret = if covered.len() < freq.len() {\n\n coverage_guesses(words, freq, covered)\n\n } else {\n\n Vec::new()\n\n };\n\n ret.insert(0, best);\n\n\n\n ret\n\n}\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 2, "score": 86035.89181256513 }, { "content": "pub fn freq_scored_guesses<'a>(\n\n words: &'a [&str],\n\n freq: &HashMap<char, usize>,\n\n covered: &HashSet<char>,\n\n) -> Vec<(&'a str, usize)> {\n\n //Score each word by freq-points\n\n let mut words: Vec<(&str, usize)> = words\n\n .iter()\n\n .map(|&w| (w, word_score(w, freq, covered)))\n\n .collect();\n\n words.sort_by_key(|(_, s)| *s);\n\n words.reverse();\n\n\n\n words\n\n}\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 3, "score": 82666.27442737386 }, { "content": "pub fn words_from_file<T: FromIterator<String>>(filename: &str) -> io::Result<T> {\n\n let mut file = File::open(filename)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n Ok(contents.lines()\n\n .map(|s| s.to_string())\n\n .collect())\n\n}\n\n\n\n#[derive(Clone, Hash, Debug)]\n\npub struct Knowledge {\n\n /// Represents the length and known character positions\n\n pattern: Vec<Option<char>>,\n\n /// All characters known to be present\n\n present: BTreeSet<char>,\n\n /// Characters known absent from the word\n\n absent: BTreeSet<char>,\n\n /// Characters present but not hits\n\n misses: Vec<BTreeSet<char>>,\n\n}\n", "file_path": "wordlib/src/lib.rs", "rank": 4, "score": 81004.11124332533 }, { "content": "pub fn show_freq(freq: &HashMap<char, usize>) {\n\n let mut by_freq = Vec::from_iter(freq.iter());\n\n by_freq.sort_by_key(|(_, v)| *v);\n\n by_freq.reverse();\n\n println!(\"Letter | Count\");\n\n for (letter, count) in by_freq {\n\n println!(\"{:7}| {}\", letter, count);\n\n }\n\n println!()\n\n}\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 7, "score": 72788.41412123948 }, { "content": "#[test]\n\nfn average_guesses() {\n\n const LEN: usize = 5;\n\n let all_words: Vec<String> = words_from_file(\"/usr/share/dict/words\").unwrap();\n\n let target_words: Vec<&str> = valid_words(LEN, &all_words, HashSet::new());\n\n\n\n let guesser = EntropyStrategy::new(&target_words);\n\n // let guesser = FreqStrategy::new(&target_words);\n\n\n\n let mut total_attempts: u64 = 0;\n\n let mut failures = 0;\n\n\n\n let mut rng = &mut rand::thread_rng();\n\n let puzzles: Vec<&str> = target_words.choose_multiple(&mut rng, 500).copied().collect();\n\n\n\n let start = time::Instant::now();\n\n for answer in puzzles.iter().copied() {\n\n let mut knowledge = Knowledge::new(LEN);\n\n\n\n let mut num_attempts: Option<u64> = None;\n\n for attempt in 1..7 {\n", "file_path": "solver/tests/score_guesses.rs", "rank": 8, "score": 48733.08145869901 }, { "content": "#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n\nstruct Args {\n\n /// The word list to draw guesses from\n\n #[clap(short, default_value = \"/usr/share/dict/words\", env = \"WORDLIST\")]\n\n wordlist: String,\n\n /// The list of known disallowed words in the word list\n\n #[clap(short, default_value = \"disallowed\")]\n\n disallowed: String,\n\n /// A pattern describing the known positions of letters, and the length of the word.\n\n /// Use '.' for unknown positions. E.g. \"f....\" for a five-letter word known to start with 'f'.\n\n pattern: String,\n\n /// Characters that are known to appear in the word, whether or not their positions are known.\n\n /// Order doesn't matter.\n\n #[clap(default_value = \"\")]\n\n chars: String,\n\n /// Previous guesses. Used to derive information about what letters are not present, and what\n\n /// positions letters are not in.\n\n tried: Vec<String>,\n\n}\n\n\n", "file_path": "solver/src/main.rs", "rank": 9, "score": 35792.43179049197 }, { "content": "Example output:\n\n\n\n Letter | Count\n\n a | 4467\n\n e | 4254\n\n r | 3043\n\n o | 2798\n\n i | 2581\n\n s | 2383\n\n t | 2381\n\n l | 2368\n\n n | 2214\n\n u | 1881\n\n y | 1605\n\n c | 1546\n\n d | 1399\n\n h | 1323\n\n m | 1299\n\n p | 1291\n\n b | 1160\n\n g | 1102\n\n k | 882\n\n w | 685\n\n f | 661\n\n v | 466\n\n z | 250\n\n x | 189\n\n j | 163\n\n q | 84\n\n \n\n arose\n\n until\n\n duchy\n\n plumb\n\n gawky\n\n vifda\n\n azoxy\n\n jiqui\n\n\n", "file_path": "guesses.md", "rank": 10, "score": 24587.200305271865 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let arguments: Vec<String> = args().collect();\n\n if arguments.len() < 2 {\n\n println!(\"Usage: {} <file> [disallowed_words ...]\", arguments[0]);\n\n process::exit(1);\n\n } else {\n\n // Parse args\n\n let input_filename = arguments[1].as_str();\n\n let disallowed: HashSet<String> = arguments[2..].iter().cloned().collect();\n\n\n\n // Read the word list\n\n let mut file = File::open(input_filename)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n let all_words: Vec<&str> = contents.lines().collect();\n\n\n\n // Filter out proper names, words of the wrong length, and disallowed words\n\n let target_words: Vec<&str> = all_words\n\n .iter()\n\n .filter(|s| s.chars().all(|ch| ch.is_lowercase()))\n", "file_path": "static/src/main.rs", "rank": 11, "score": 23596.20237993217 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let args = Args::parse();\n\n\n\n let target_word_len = args.pattern.len();\n\n\n\n let tries: Vec<&str> = args.tried.iter().map(|s| s.as_str()).collect();\n\n let knowledge = Knowledge::from_tries(\n\n args.pattern.as_str(),\n\n args.chars.as_str(),\n\n tries.as_slice()\n\n );\n\n\n\n // Read the word list\n\n let all_words: Vec<String> = words_from_file(args.wordlist.as_str())?;\n\n let disallowed: HashSet<String> = words_from_file(args.disallowed.as_str())\n\n .unwrap_or(HashSet::new());\n\n\n\n // Filter out proper names, words of the wrong length, and disallowed words\n\n let target_words: Vec<&str> = solver::valid_words(target_word_len, &all_words, disallowed);\n\n\n\n let guesser = EntropyStrategy::new(&target_words);\n\n let guess = guesser.next_guess(&knowledge);\n\n\n\n println!(\"Next guess: {}\", guess);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "solver/src/main.rs", "rank": 12, "score": 23596.20237993217 }, { "content": "use std::collections::HashSet;\n\nuse std::time;\n\n\n\nuse solver::{EntropyStrategy, GuessStrategy, valid_words};\n\nuse wordlib::{Knowledge, words_from_file};\n\n\n\nuse rand::seq::SliceRandom;\n\n\n\n#[test]\n", "file_path": "solver/tests/score_guesses.rs", "rank": 13, "score": 17860.105145782876 }, { "content": " let guess = guesser.next_guess(&knowledge);\n\n\n\n if guess == answer {\n\n num_attempts = Some(attempt);\n\n break;\n\n }\n\n knowledge.learn(guess, answer);\n\n }\n\n if let Some(x) = num_attempts {\n\n total_attempts += x;\n\n } else {\n\n total_attempts += 7;\n\n failures += 1;\n\n }\n\n }\n\n let end = time::Instant::now();\n\n\n\n let word_count = puzzles.len() as f32;\n\n let average_attempts: f32 = total_attempts as f32 / word_count;\n\n println!(\"{} words. Average attempts {}. {} failures.\", word_count, average_attempts, failures);\n", "file_path": "solver/tests/score_guesses.rs", "rank": 14, "score": 17858.273438193024 }, { "content": "\n\n let duration = end.duration_since(start);\n\n let guess_time = duration.div_f64(total_attempts as f64);\n\n\n\n println!(\"{} attempts in {}ms. {} µs/attempt\", total_attempts, duration.as_millis(), guess_time.as_micros());\n\n\n\n assert!(average_attempts < 4.1);\n\n assert!(failures as f32 / word_count < 0.001);\n\n}\n", "file_path": "solver/tests/score_guesses.rs", "rank": 15, "score": 17856.22197913767 }, { "content": "# Words\n\n\n\nWords takes a word list, and produces a sequence of optimal guesses for word games similar to Wordle by Josh Wardle.\n\n\n\nIn this case, \"optimal\" means maximizing the information that the guess reveals about the puzzle. The guesses produced cover the entire alphabet, and focus on the most frequent letters first. \n\n\n\nYou may also pass additional arguments, which are words from the word list you disallow. This is useful if your word list contains words that the puzzle's word list does not. \n\n\n\nObviously, you won't win by just playing these guesses, as they depend only on the word list, not the puzzle. This is not a wordle solver.\n", "file_path": "README.md", "rank": 16, "score": 12418.119893413426 }, { "content": " .filter(|s| s.len() == TARGET_WORD_LEN)\n\n .filter(|s| !disallowed.contains(**s))\n\n .copied()\n\n .collect();\n\n\n\n // Frequency analysis. Characters are worth points equal to their number of appearances in the word list\n\n let freq = char_freq(&target_words);\n\n\n\n show_freq(&freq);\n\n\n\n // Find the words which give you the best character coverage\n\n let mut coverage = HashSet::new();\n\n let guesses = coverage_guesses(&target_words, &freq, &mut coverage);\n\n for guess in guesses {\n\n println!(\"{}\", guess);\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "static/src/main.rs", "rank": 19, "score": 11.065798149883472 }, { "content": " self.pattern[at] = Some(ch);\n\n self.present.insert(ch);\n\n }\n\n\n\n fn add_miss(&mut self, ch: char, at: usize) {\n\n self.misses[at].insert(ch);\n\n self.present.insert(ch);\n\n }\n\n\n\n fn add_absent(&mut self, ch: char) {\n\n self.absent.insert(ch);\n\n }\n\n\n\n pub fn get_covered(&self) -> HashSet<char> {\n\n self.present.iter().chain(self.absent.iter()).copied().collect()\n\n }\n\n\n\n pub fn learn(&mut self, guess: &str, answer: &str) {\n\n let answer_set: HashSet<char> = answer.chars().collect();\n\n guess.chars().zip(answer.chars())\n", "file_path": "wordlib/src/lib.rs", "rank": 22, "score": 9.70549596774142 }, { "content": "use std::collections::HashSet;\n\nuse std::env::args;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::process;\n\nuse wordlib::{char_freq, coverage_guesses, show_freq};\n\n\n\nconst TARGET_WORD_LEN: usize = 5;\n\n\n", "file_path": "static/src/main.rs", "rank": 24, "score": 8.789077472376663 }, { "content": " if w.len() == self.pattern.len() {\n\n if w.chars()\n\n .enumerate()\n\n .all(|(i, ch)| self.fits(&ch, i)) {\n\n //Each character fits (is plausible), but does w cover all the things we know?\n\n let word_chars: BTreeSet<char> = w.chars().collect();\n\n word_chars.is_superset(&self.present)\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn filter<'a>(&self, words: &[&'a str]) -> Vec<&'a str> {\n\n words.iter().filter(|w| self.check_word(w)).copied().collect()\n\n }\n\n\n\n fn add_hit(&mut self, ch: char, at: usize) {\n", "file_path": "wordlib/src/lib.rs", "rank": 25, "score": 8.7555626429618 }, { "content": "\n\nimpl Knowledge {\n\n pub fn new(length: usize) -> Knowledge {\n\n Knowledge {\n\n pattern: vec![None; length],\n\n present: BTreeSet::new(),\n\n absent: BTreeSet::new(),\n\n misses: vec![BTreeSet::new(); length]\n\n }\n\n }\n\n\n\n pub fn from_tries(pattern: &str, present_chs: &str, tries: &[&str]) -> Knowledge {\n\n let length = pattern.len();\n\n let mut ret = Knowledge::new(length);\n\n for (i, ch) in pattern.chars().enumerate() {\n\n if ch != '.' {\n\n ret.add_hit(ch, i);\n\n }\n\n }\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 27, "score": 7.990747310212283 }, { "content": " }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.pattern.iter().all(|x| x.is_none()) &&\n\n self.absent.is_empty() &&\n\n self.present.is_empty() &&\n\n self.misses.iter().all(|s| s.is_empty())\n\n }\n\n\n\n pub fn fits(&self, ch: &char, at: usize) -> bool {\n\n if let Some(known) = self.pattern[at] {\n\n return known == *ch\n\n } else if self.present.contains(&ch){\n\n !self.misses[at].contains(&ch)\n\n } else {\n\n !self.absent.contains(&ch)\n\n }\n\n }\n\n\n\n pub fn check_word(&self, w: &str) -> bool {\n", "file_path": "wordlib/src/lib.rs", "rank": 28, "score": 7.493644957841539 }, { "content": "use std::collections::HashSet;\n\n\n\nuse clap::Parser;\n\nuse solver::{EntropyStrategy, GuessStrategy};\n\n\n\nuse wordlib::{Knowledge, words_from_file};\n\n\n\n#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n", "file_path": "solver/src/main.rs", "rank": 30, "score": 7.06266576526245 }, { "content": " let present: HashSet<char> = present_chs.chars().collect();\n\n\n\n for &w in tries {\n\n for (i, ch) in w.chars().enumerate() {\n\n if present.contains(&ch) {\n\n match &ret.pattern[i] {\n\n None => ret.add_miss(ch, i),\n\n Some(hit) => {\n\n if *hit != ch {\n\n ret.add_miss(ch, i);\n\n }\n\n }\n\n }\n\n } else {\n\n ret.absent.insert(ch);\n\n }\n\n }\n\n }\n\n\n\n ret\n", "file_path": "wordlib/src/lib.rs", "rank": 31, "score": 4.578368015294248 }, { "content": "use std::collections::{BTreeSet, HashMap, HashSet};\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::Read;\n\n\n", "file_path": "wordlib/src/lib.rs", "rank": 32, "score": 3.787836702360317 }, { "content": " .enumerate()\n\n .for_each(|(i, (ch, answer_char))| {\n\n if ch == answer_char {\n\n self.add_hit(ch, i);\n\n } else if answer_set.contains(&ch) {\n\n self.add_miss(ch, i);\n\n } else {\n\n self.add_absent(ch);\n\n }\n\n });\n\n }\n\n}\n", "file_path": "wordlib/src/lib.rs", "rank": 33, "score": 3.059695177832593 } ]
Rust
bfffs/src/bin/bfffs.rs
fkorotkov/bfffs
e0f3fddae49a19bcbe5593058cd2ef54b2496572
use bfffs::common::{ database::TreeID, device_manager::DevManager, property::Property }; use clap::crate_version; use futures::future; use std::{ path::Path, process::exit, sync::Arc }; use tokio::{ executor::current_thread::TaskExecutor, runtime::current_thread::Runtime }; mod check { use super::*; pub fn main(args: &clap::ArgMatches) { let poolname = args.value_of("name").unwrap().to_owned(); let disks = args.values_of("disks").unwrap(); let dev_manager = DevManager::default(); for dev in disks.map(str::to_string) { dev_manager.taste(dev); } let mut rt = tokio_io_pool::Runtime::new(); let handle = rt.handle().clone(); let db = Arc::new(rt.block_on(future::lazy(move || { dev_manager.import_by_name(poolname, handle) .unwrap_or_else(|_e| { eprintln!("Error: pool not found"); exit(1); }) })).unwrap()); rt.block_on(future::lazy(move || { db.check() })).unwrap(); } } mod debug { use super::*; use tokio::runtime::current_thread::Runtime; fn dump_fsm<P: AsRef<Path>, S: AsRef<str>>(poolname: S, disks: &[P]) { let dev_manager = DevManager::default(); for disk in disks { dev_manager.taste(disk); } let uuid = dev_manager.importable_pools().iter() .filter(|(name, _uuid)| { *name == poolname.as_ref() }).nth(0).unwrap().1; let mut rt = Runtime::new().unwrap(); let clusters = rt.block_on(future::lazy(move || { dev_manager.import_clusters(uuid) })).unwrap(); for c in clusters { println!("{}", c.dump_fsm()); } } fn dump_tree<P: AsRef<Path>>(poolname: String, disks: &[P]) { let poolname2 = poolname.to_owned(); let dev_manager = DevManager::default(); for disk in disks { dev_manager.taste(disk); } let mut rt = tokio_io_pool::Runtime::new(); let handle = rt.handle().clone(); let db = Arc::new(rt.block_on(future::lazy(move || { dev_manager.import_by_name(poolname2, handle) .unwrap_or_else(|_e| { eprintln!("Error: pool not found"); exit(1); }) })).unwrap()); let tree_id = TreeID::Fs(0); db.dump(&mut std::io::stdout(), tree_id).unwrap() } pub fn main(args: &clap::ArgMatches) { match args.subcommand() { ("dump", Some(args)) => { let poolname = args.value_of("name").unwrap(); let disks = args.values_of("disks").unwrap().collect::<Vec<&str>>(); if args.is_present("fsm") { dump_fsm(&poolname, &disks[..]); } if args.is_present("tree") { dump_tree(poolname.to_string(), &disks[..]); } }, _ => { println!("Error: subcommand required\n{}", args.usage()); std::process::exit(2); }, } } } mod pool { use bfffs::common::BYTES_PER_LBA; use bfffs::common::cache::Cache; use bfffs::common::database::*; use bfffs::common::ddml::DDML; use bfffs::common::idml::IDML; use bfffs::common::pool::{ClusterProxy, Pool}; use futures::Future; use std::{ convert::TryFrom, num::NonZeroU64, str::FromStr, sync::Mutex }; use super::*; fn create(args: &clap::ArgMatches) { let rt = Runtime::new().unwrap(); let name = args.value_of("name").unwrap().to_owned(); let zone_size = args.value_of("zone_size") .map(|s| { let lbas = u64::from_str(s) .expect("zone_size must be a decimal integer") * 1024 * 1024 / (BYTES_PER_LBA as u64); NonZeroU64::new(lbas).expect("zone_size may not be zero") }); let propstrings = if let Some(it) = args.values_of("property") { it.collect::<Vec<_>>() } else { Vec::new() }; let mut builder = Builder::new(name, propstrings, zone_size, rt); let mut vdev_tokens = args.values_of("vdev").unwrap(); let mut cluster_type = None; let mut devs = vec![]; loop { let next = vdev_tokens.next(); match next { None => { if !devs.is_empty() { match cluster_type { Some("mirror") => builder.create_mirror(&devs[..]), Some("raid") => builder.create_raid(&devs[..]), None => assert!(devs.is_empty()), _ => unreachable!() } } break; }, Some("mirror") => { if !devs.is_empty() { builder.create_cluster(cluster_type.as_ref().unwrap(), &devs[..]); } devs.clear(); cluster_type = Some("mirror") }, Some("raid") => { if !devs.is_empty() { builder.create_cluster(cluster_type.as_ref().unwrap(), &devs[..]); } devs.clear(); cluster_type = Some("raid") }, Some(ref dev) => { if cluster_type == None { builder.create_single(dev); } else { devs.push(dev); } } } } builder.format() } struct Builder { clusters: Vec<ClusterProxy>, name: String, properties: Vec<Property>, rt: Runtime, zone_size: Option<NonZeroU64> } impl Builder { pub fn new(name: String, propstrings: Vec<&str>, zone_size: Option<NonZeroU64>, rt: Runtime) -> Self { let clusters = Vec::new(); let properties = propstrings.into_iter() .map(|ps| { Property::try_from(ps).unwrap_or_else(|_e| { eprintln!("Invalid property specification {}", ps); std::process::exit(2); }) }) .collect::<Vec<_>>(); Builder{clusters, name, properties, rt, zone_size} } pub fn create_cluster(&mut self, vtype: &str, devs: &[&str]) { match vtype { "mirror" => self.create_mirror(devs), "raid" => self.create_raid(devs), _ => panic!("Unsupported vdev type {}", vtype) } } pub fn create_mirror(&mut self, devs: &[&str]) { let k = devs.len() as i16; let f = devs.len() as i16 - 1; self.do_create_cluster(k, f, &devs[2..]) } pub fn create_raid(&mut self, devs: &[&str]) { let k = i16::from_str_radix(devs[0], 10) .expect("Disks per stripe must be an integer"); let f = i16::from_str_radix(devs[1], 10) .expect("Disks per stripe must be an integer"); self.do_create_cluster(k, f, &devs[2..]) } pub fn create_single(&mut self, dev: &str) { self.do_create_cluster(1, 0, &[&dev]) } fn do_create_cluster(&mut self, k: i16, f: i16, devs: &[&str]) { let zone_size = self.zone_size; let c = self.rt.block_on(future::lazy(move || { Pool::create_cluster(None, k, zone_size, f, devs) })).unwrap(); self.clusters.push(c); } pub fn format(&mut self) { let name = self.name.clone(); let clusters = self.clusters.drain(..).collect(); let db = self.rt.block_on(future::lazy(|| { Pool::create(name, clusters) .map(|pool| { let cache = Arc::new(Mutex::new(Cache::with_capacity(1000))); let ddml = Arc::new(DDML::new(pool, cache.clone())); let idml = Arc::new(IDML::create(ddml, cache)); let task_executor = TaskExecutor::current(); Database::create(idml, task_executor) }) })).unwrap(); let props = self.properties.clone(); self.rt.block_on(future::lazy(|| { db.new_fs(props) .and_then(|_tree_id| db.sync_transaction()) })).unwrap(); } } pub fn main(args: &clap::ArgMatches) { match args.subcommand() { ("create", Some(create_args)) => create(create_args), _ => { println!("Error: subcommand required\n{}", args.usage()); std::process::exit(2); }, } } } fn main() { let app = clap::App::new("bfffs") .version(crate_version!()) .subcommand(clap::SubCommand::with_name("check") .about("Consistency check") .arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("disks") .multiple(true) .required(true) ) ).subcommand(clap::SubCommand::with_name("debug") .about("Debugging tools") .subcommand(clap::SubCommand::with_name("dump") .about("Dump internal filesystem information") .arg(clap::Arg::with_name("fsm") .help("Dump the Free Space Map") .long("fsm") .short("f") ).arg(clap::Arg::with_name("tree") .help("Dump the file system tree") .long("tree") .short("t") ).arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("disks") .multiple(true) .required(true) ) ) ).subcommand(clap::SubCommand::with_name("pool") .about("create, destroy, and modify storage pools") .subcommand(clap::SubCommand::with_name("create") .about("create a new storage pool") .arg(clap::Arg::with_name("zone_size") .help("Simulated Zone size in MB") .long("zone_size") .takes_value(true) ).arg(clap::Arg::with_name("property") .help("Dataset properties, comma delimited") .short("o") .takes_value(true) .multiple(true) .require_delimiter(true) ).arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("vdev") .multiple(true) .required(true) ) ) ); let matches = app.get_matches(); match matches.subcommand() { ("check", Some(args)) => check::main(args), ("debug", Some(args)) => debug::main(args), ("pool", Some(args)) => pool::main(args), _ => { println!("Error: subcommand required\n{}", matches.usage()); std::process::exit(2); }, } }
use bfffs::common::{ database::TreeID, device_manager::DevManager, property::Property }; use clap::crate_version; use futures::future; use std::{ path::Path, process::exit, sync::Arc }; use tokio::{ executor::current_thread::TaskExecutor, runtime::current_thread::Runtime }; mod check { use super::*; pub fn main(args: &clap::ArgMatches) { let poolname = args.value_of("name").unwrap().to_owned(); let disks = args.values_of("disks").unwrap(); let dev_manager = DevManager::default(); for dev in disks.map(str::to_string) { dev_manager.taste(dev); } let mut rt = tokio_io_pool::Runtime::new(); let handle = rt.handle().clone(); let db = Arc::new(rt.block_on(future::lazy(move || { dev_manager.import_by_name(poolname, handle) .unwrap_or_else(|_e| { eprintln!("Error: pool not found"); exit(1); }) })).unwrap()); rt.block_on(future::lazy(move || { db.check() })).unwrap(); } } mod debug { use super::*; use tokio::runtime::current_thread::Runtime; fn dump_fsm<P: AsRef<Path>, S: AsRef<str>>(poolname: S, disks: &[P]) { let dev_manager = DevManager::default(); for disk in disks { dev_manager.taste(disk); } let uuid = dev_manager.importable_pools().iter() .filter(|(name, _uuid)| { *name == poolname.as_ref() }).nth(0).unwrap().1; let mut rt = Runtime::new().unwrap(); let clusters = rt.block_on(future::lazy(move || { dev_manager.import_clusters(uuid) })).unwrap(); for c in clusters { println!("{}", c.dump_fsm()); } } fn dump_tree<P: AsRef<Path>>(poolname: String, disks: &[P]) { let poolname2 = poolname.to_owned(); let dev_manager = DevManager::default(); for disk in disks { dev_manager.taste(disk); } let mut rt = tokio_io_pool::Runtime::new(); let handle = rt.handle().clone(); let db = Arc::new(rt.block_on(future::lazy(move || { dev_manager.import_by_name(poolname2, handle) .unwrap_or_else(|_e| { eprintln!("Error: pool not found"); exit(1); }) })).unwrap()); let tree_id = TreeID::Fs(0); db.dump(&mut std::io::stdout(), tree_id).unwrap() } pub fn main(args: &clap::ArgMatches) { match args.subcommand() { ("dump", Some(args)) => { let poolname = args.value_of("name").unwrap(); let disks = args.values_of("disks").unwrap().collect::<Vec<&str>>(); if args.is_present("fsm") { dump_fsm(&poolname, &disks[..]); } if args.is_present("tree") { dump_tree(poolname.to_string(), &disks[..]); } }, _ => { println!("Error: subcommand required\n{}", args.usage()); std::process::exit(2); }, } } } mod pool { use bfffs::common::BYTES_PER_LBA; use bfffs::common::cache::Cache; use bfffs::common::database::*; use bfffs::common::ddml::DDML; use bfffs::common::idml::IDML; use bfffs::common::pool::{ClusterProxy, Pool}; use futures::Future; use std::{ convert::TryFrom, num::NonZeroU64, str::FromStr, sync::Mutex }; use super::*; fn create(args: &clap::ArgMatches) { let rt = Runtime::new().unwrap(); let name = args.value_of("name").unwrap().to_owned(); let zone_size = args.value_of("zone_size") .map(|s| { let lbas = u64::from_str(s) .expect("zone_size must be a decimal integer") * 1024 * 1024 / (BYTES_PER_LBA as u64); NonZeroU64::new(lbas).expect("zone_size may not be zero") }); let propstrings = if let Some(it) = args.values_of("property") { it.collect::<Vec<_>>() } else { Vec::new() }; let mut builder = Builder::new(name, propstrings, zone_size, rt); let mut vdev_tokens = args.values_of("vdev").unwrap(); let mut cluster_type = None; let mut devs = vec![]; loop { let next = vdev_tokens.next(); match next { None => { if !devs.is_empty() { match cluster_type { Some("mirror") => builder.create_mirror(&devs[..]), Some("raid") => builder.create_raid(&devs[..]), None => assert!(devs.is_empty()), _ => unreachable!() } } break; }, Some("mirror") => { if !devs.is_empty() { builder.create_cluster(cluster_type.as_ref().unwrap(), &devs[..]); } devs.clear(); cluster_type = Some("mirror") }, Some("raid") => { if !devs.is_empty() { builder.create_cluster(cluster_type.as_ref().unwrap(), &devs[..]); } devs.clear(); cluster_type = Some("raid") }, Some(ref dev) => { if cluster_type == None { builder.create_single(dev); } else { devs.push(dev); } } } } builder.format() } struct Builder { clusters: Vec<ClusterProxy>, name: String, properties: Vec<Property>, rt: Runtime, zone_size: Option<NonZeroU64> } impl Builder { pub fn new(name: String, propstrings: Vec<&str>, zone_size: Option<NonZeroU64>, rt: Runtime) -> Self { let clusters = Vec::new(); let properties = propstrings.into_iter() .map(|ps| { Property::try_from(ps).unwrap_or_else(|_e| { eprintln!("Invalid property specification {}", ps); std::process::exit(2); }) }) .collect::<Vec<_>>(); Builder{clusters, name, properties, rt, zone_size} } pub fn create_cluster(&mut self, vtype: &str, devs: &[&str]) { match vtype { "mirror" => self.create_mirror(devs), "raid" => self.create_raid(devs), _ => panic!("Unsupported vdev type {}", vtype) } } pub fn create_mirror(&mut self, devs: &[&str]) { let k = devs.len() as i16; let f = devs.len() as i16 - 1; self.do_create_cluster(k, f, &devs[2..]) } pub fn create_raid(&mut self, devs: &[&str]) { let k = i16::from_str_radix(devs[0], 10) .expect("Disks per stripe must be an integer"); let f = i16::from_str_radix(devs[1], 10) .expect("Disks per stripe must be an integer"); self.do_create_cluster(k, f, &devs[2..]) } pub fn create_single(&mut self, dev: &str) { self.do_create_cluster(1, 0, &[&dev]) } fn do_create_cluster(&mut self, k: i16, f: i16, devs: &[&str]) { let zone_size = self.zone_size; let c = self.rt.block_on(future::lazy(move || { Pool::create_cluster(None, k, zone_size, f, devs) })).unwrap(); self.clusters.push(c); } pub fn format(&mut self) { let name = self.name.clone(); let clusters = self.clusters.drain(..).collect(); let db = self.rt.block_on(future::lazy(|| { Pool::create(name, clusters) .map(|pool| { let cache = Arc::new(Mutex::new(Cache::with_capacity(1000))); let ddml = Arc::new(DDML::new(pool, cache.clone())); let idml = Arc::new(IDML::create(ddml, cache)); let task_executor = TaskExecutor::current(); Database::create(idml, task_executor) }) })).unwrap(); let props = self.properties.clone(); self.rt.block_on(future::lazy(|| { db.new_fs(props) .and_then(|_tree_id| db.sync_transaction()) })).unwrap(); } } pub fn main(args: &clap::ArgMatches) { match args.subcommand() { ("create", Some(create_args)) => create(create_args), _ => { println!("Error: subcommand required\n{}", args.usage()); std::process::exit(2); }, } } } fn main() { let app = clap::App::new("bfffs") .version(crate_version!()) .subcommand(clap::SubCommand::with_name("check") .about("Consistency check") .arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("disks") .multiple(true) .required(true) ) ).subcommand(clap::SubCommand::with_name("debug") .about("Debugging tools") .subcommand(clap::SubCommand::with_name("dump") .about("Dump internal filesystem information") .arg(clap::Arg::with_name("fsm") .help("Dump the Free Space Map") .long("fsm") .short("f") ).arg(clap::Arg::with_name("tree") .help("Dump the file system tree") .long("tree") .short("t") ).arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("disks") .multiple(true) .required(true) ) ) ).subcommand(clap::SubCommand::with_name("pool") .about("create, destroy, and modify storage pools") .subcommand(clap::SubCommand::with_name("create") .about("create a new storage pool") .arg(clap::Arg::with_name("zone_size") .help("Simulated Zone size in MB") .long("zone_size") .takes_value(true) ).arg(clap::Arg::with_name("property") .help("Dataset properties, comma delimited") .short("o") .takes_value(true) .multiple(true) .require_delimiter(true) ).arg(clap::Arg::with_name("name") .help("Pool name") .required(true) ).arg(clap::Arg::with_name("vdev") .multiple(true) .required(true) ) ) ); let matches = app.get_matches();
}
match matches.subcommand() { ("check", Some(args)) => check::main(args), ("debug", Some(args)) => debug::main(args), ("pool", Some(args)) => pool::main(args), _ => { println!("Error: subcommand required\n{}", matches.usage()); std::process::exit(2); }, }
if_condition
[ { "content": "/// Create a raid-like `Vdev` from its components.\n\n///\n\n///\n\n/// * `chunksize`: RAID chunksize in LBAs, if specified. This is the\n\n/// largest amount of data that will be read/written to\n\n/// a single device before the `Locator` switches to the\n\n/// next device.\n\n/// * `disks_per_stripe`: Number of data plus parity chunks in each\n\n/// self-contained RAID stripe. Must be less than or\n\n/// equal to the number of disks in `paths`.\n\n/// * `lbas_per_zone`: If specified, this many LBAs will be assigned to\n\n/// simulated zones on devices that don't have\n\n/// native zones.\n\n/// * `redundancy`: Degree of RAID redundancy. Up to this many\n\n/// disks may fail before the array becomes\n\n/// inoperable.\n\n/// * `paths`: Slice of pathnames of files and/or devices\n\npub fn create<P>(chunksize: Option<NonZeroU64>, disks_per_stripe: i16,\n\n lbas_per_zone: Option<NonZeroU64>, redundancy: i16,\n\n mut paths: Vec<P>) -> Rc<dyn VdevRaidApi>\n\n where P: AsRef<Path> + 'static\n\n{\n\n if paths.len() == 1 {\n\n assert_eq!(disks_per_stripe, 1);\n\n assert_eq!(redundancy, 0);\n\n Rc::new(VdevOneDisk::create(lbas_per_zone, paths.pop().unwrap()))\n\n } else {\n\n Rc::new(VdevRaid::create(chunksize, disks_per_stripe, lbas_per_zone,\n\n redundancy, paths))\n\n }\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/mod.rs", "rank": 0, "score": 478027.1991485404 }, { "content": "/// Open some kind of RAID `Vdev` from its components `Vdev`s.\n\n///\n\n/// # Parameters\n\n///\n\n/// * `uuid`: Uuid of the desired `Vdev`, if present. If `None`,\n\n/// then it will not be verified.\n\n/// * `combined`: An array of pairs of `VdevBlock`s and their\n\n/// associated `LabelReader`. The labels of each will be\n\n/// verified.\n\npub fn open(uuid: Option<Uuid>, combined: Vec<(VdevBlock, LabelReader)>)\n\n -> (Rc<dyn VdevRaidApi>, LabelReader)\n\n{\n\n let mut label_pair = None;\n\n let all_blockdevs = combined.into_iter()\n\n .map(|(vdev_block, mut label_reader)| {\n\n let label: Label = label_reader.deserialize().unwrap();\n\n if let Some(u) = uuid {\n\n assert_eq!(u, label.uuid(), \"Opening disk from wrong cluster\");\n\n }\n\n if label_pair.is_none() {\n\n label_pair = Some((label, label_reader));\n\n }\n\n (vdev_block.uuid(), vdev_block)\n\n }).collect::<BTreeMap<Uuid, VdevBlock>>();\n\n let (label, label_reader) = label_pair.unwrap();\n\n let vdev = match label {\n\n Label::Raid(l) => {\n\n Rc::new(VdevRaid::open(l, all_blockdevs)) as Rc<dyn VdevRaidApi>\n\n },\n\n Label::OneDisk(l) => {\n\n Rc::new(VdevOneDisk::open(l, all_blockdevs)) as Rc<dyn VdevRaidApi>\n\n },\n\n };\n\n (vdev, label_reader)\n\n}\n\n\n\n#[cfg(test)]\n\nmock!{\n\n pub VdevRaid {}\n", "file_path": "bfffs/src/common/raid/mod.rs", "rank": 1, "score": 334820.7671662166 }, { "content": " let tree: Tree<DRP, DDML, u32, f32> = Tree::from_str(ddml, false, r#\"\n\n---\n\nheight: 3\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 8\n\n end: 42\n\n ptr:\n\n Mem:\n\n Int:\n\n children:\n\n - key: 0\n\n txgs:\n", "file_path": "bfffs/src/common/tree/tree/tests/clean_zone.rs", "rank": 2, "score": 275498.2781188216 }, { "content": "/// Private trait bound for functions that can be used as callbacks for\n\n/// Fs::create\n\ntype CreateCallback = fn(&Arc<ReadWriteFilesystem>, u64, u64)\n\n -> Box<dyn Future<Item=(), Error=Error> + Send + 'static>;\n\n\n", "file_path": "bfffs/src/common/fs.rs", "rank": 3, "score": 275133.8043138485 }, { "content": "#[test]\n\nfn open_zone_zero_fill_wasted_stripes() {\n\n let k = 5;\n\n let f = 1;\n\n const CHUNKSIZE : LbaT = 1;\n\n let zl0 = (1, 32);\n\n let zl1 = (32, 64);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = |gap_chunks: LbaT| {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(1))\n\n .return_const(Some(0));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 4, "score": 272048.8265811781 }, { "content": "/// How many LBAs should be reserved for each spacemap?\n\npub fn spacemap_space(nzones: u64) -> LbaT {\n\n div_roundup(nzones, SPACEMAP_ZONES_PER_LBA as u64)\n\n}\n\n\n\n/// Used to read successive structs out of the label\n\npub struct LabelReader {\n\n cursor: io::Cursor<DivBuf>\n\n}\n\n\n\nimpl<'de> LabelReader {\n\n /// Attempt to read a `T` out of the label\n\n pub fn deserialize<T>(&mut self) -> bincode::Result<T>\n\n where T: DeserializeOwned\n\n {\n\n bincode::deserialize_from(&mut self.cursor)\n\n }\n\n\n\n /// Construct a `LabelReader` using the raw buffer read from disk\n\n pub fn from_dbs(buffer: DivBufShared) -> Result<Self, Error> {\n\n let db = buffer.try_const().unwrap();\n", "file_path": "bfffs/src/common/label.rs", "rank": 5, "score": 266390.65759095777 }, { "content": "fn experiment<F>(nelems: u64, save: bool, mut f: F)\n\n where F: FnMut(u64) -> u64 + Send + 'static\n\n{\n\n const INODE: u64 = 2;\n\n\n\n let mut rt = Runtime::new();\n\n let next_lba = Arc::new(AtomicU64::default());\n\n let alloct_ddml = Arc::new(FakeDDML::new(&\"alloct\", next_lba.clone(),\n\n save));\n\n let ridt_ddml = Arc::new(FakeDDML::new(&\"ridt\", next_lba.clone(), save));\n\n let data_ddml = Arc::new(FakeDDML::new(&\"data\", next_lba, false));\n\n let idml = Arc::new(FakeIDML::new(&\"fs\", alloct_ddml.clone(), data_ddml,\n\n ridt_ddml.clone(), save));\n\n let idml2 = idml.clone();\n\n let idml3 = idml.clone();\n\n let idml4 = idml.clone();\n\n let tree = Arc::new(\n\n Tree::<RID, FakeIDML, FSKey, FSValue<RID>>::create(idml2, false, 9.00,\n\n 1.61)\n\n );\n", "file_path": "bfffs/examples/fanout.rs", "rank": 6, "score": 258257.23148352158 }, { "content": "#[test]\n\nfn debug() {\n\n let label = Label {\n\n uuid: Uuid::new_v4(),\n\n chunksize: 1,\n\n disks_per_stripe: 2,\n\n redundancy: 1,\n\n layout_algorithm: LayoutAlgorithm::PrimeS,\n\n children: vec![Uuid::new_v4(), Uuid::new_v4(), Uuid::new_v4()]\n\n };\n\n format!(\"{:?}\", label);\n\n}\n\n\n\ntest_suite! {\n\n // Test basic layout properties\n\n name basic;\n\n\n\n use super::*;\n\n use mockall::PredicateBooleanExt;\n\n use pretty_assertions::assert_eq;\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 7, "score": 255560.19021762558 }, { "content": "#[derive(Debug)]\n\nstruct FreeSpaceMap {\n\n /// Which Zones have been modified since the last Cluster::flush?\n\n dirty: FixedBitSet,\n\n\n\n /// Stores the set of empty zones with id less than zones.len(). All zones\n\n /// with id greater than or equal to zones.len() are implicitly empty\n\n empty_zones: BTreeSet<ZoneT>,\n\n\n\n /// Currently open zones\n\n open_zones: BTreeMap<ZoneT, OpenZone>,\n\n\n\n /// Total number of zones in the vdev\n\n total_zones: ZoneT,\n\n\n\n /// `Vec` of all zones in the Vdev. Any zones past the end of the `Vec` are\n\n /// implicitly Empty. Any zones whose index is present in `empty_zones` are\n\n /// also Empty. Any zones whose index is also present in `open_zones` are\n\n /// implicitly open. All other zones are Closed.\n\n zones: Vec<Zone>,\n\n}\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 8, "score": 254040.1109552788 }, { "content": "/// Result type of `Tree::clean_zone`\n\nstruct CleanZonePass1<D, K, V>\n\n where D: DML<Addr=ddml::DRP>,\n\n K: Key,\n\n V: Value\n\n{\n\n inner: RefCell<CleanZonePass1Inner<D, K, V>>\n\n}\n\n\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 9, "score": 252416.85400634748 }, { "content": "struct CleanZonePass1Inner<D, K, V>\n\n where D: DML<Addr=ddml::DRP>,\n\n K: Key,\n\n V: Value\n\n{\n\n /// If Some, then there are more nodes in the Tree to query\n\n cursor: Option<K>,\n\n\n\n /// Data that can be returned immediately\n\n data: VecDeque<NodeId<K>>,\n\n\n\n /// Level of the Tree that this object is meant to clean. Leaves are 0.\n\n echelon: u8,\n\n\n\n /// Used when an operation must block\n\n last_fut: Option<Box<dyn Future<Item=(VecDeque<NodeId<K>>, Option<K>),\n\n Error=Error> + Send>>,\n\n\n\n /// Range of addresses to move\n\n pbas: Range<PBA>,\n\n\n\n /// Range of transactions that may contain PBAs of interest\n\n txgs: Range<TxgT>,\n\n\n\n /// Handle to the tree's inner struct\n\n inner: Arc<Inner<ddml::DRP, D, K, V>>,\n\n}\n\n\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 10, "score": 247699.02605459443 }, { "content": "/// Tokio-File requires boxed `DivBufMuts`, but the upper layers of BFFFS don't.\n\n/// Take care of the mismatch here, by wrapping `DivBufMut` in a new struct\n\nstruct IoVecMutContainer(IoVecMut);\n\nimpl Borrow<[u8]> for IoVecMutContainer {\n\n fn borrow(&self) -> &[u8] {\n\n self.0.as_ref()\n\n }\n\n}\n\nimpl BorrowMut<[u8]> for IoVecMutContainer {\n\n fn borrow_mut(&mut self) -> &mut [u8] {\n\n self.0.as_mut()\n\n }\n\n}\n\n\n\nimpl Vdev for VdevFile {\n\n fn lba2zone(&self, lba: LbaT) -> Option<ZoneT> {\n\n if lba >= self.reserved_space() {\n\n Some((lba / (self.lbas_per_zone as u64)) as ZoneT)\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "bfffs/src/common/vdev_file.rs", "rank": 11, "score": 247110.89632488322 }, { "content": "fn open_db(rt: &mut Runtime, path: PathBuf) -> Database {\n\n rt.block_on(future::lazy(|| {\n\n VdevFile::open(path)\n\n .and_then(|(leaf, reader)| {\n\n let block = VdevBlock::new(leaf);\n\n let (vr, lr) = raid::open(None, vec![(block, reader)]);\n\n cluster::Cluster::open(vr)\n\n .map(move |cluster| (cluster, lr))\n\n }).and_then(move |(cluster, reader)|{\n\n let proxy = ClusterProxy::new(cluster);\n\n Pool::open(None, vec![(proxy, reader)])\n\n }).map(|(pool, reader)| {\n\n let cache = Cache::with_capacity(1_000_000);\n\n let arc_cache = Arc::new(Mutex::new(cache));\n\n let ddml = Arc::new(DDML::open(pool, arc_cache.clone()));\n\n let (idml, reader) = IDML::open(ddml, arc_cache, reader);\n\n let te = TaskExecutor::current();\n\n Database::open(Arc::new(idml), te, reader)\n\n })\n\n })).unwrap()\n", "file_path": "bfffs/tests/common/database.rs", "rank": 12, "score": 246343.02212243804 }, { "content": " let tree: Tree<DRP, DDML, u32, u32> = Tree::from_str(ddml, false, r#\"\n\n---\n\nheight: 1\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 0\n\n end: 42\n\n ptr:\n\n Addr:\n\n pba:\n\n cluster: 2\n\n lba: 0x0102030405060708\n\n compressed: true\n\n lsize: 78\n\n csize: 36\n\n checksum: 0x0807060504030201\n\n\"#);\n\n\n\n assert_eq!(expected, tree.serialize().unwrap())\n\n}\n\n\n\n// If the tree isn't dirty, then there's nothing to do\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 13, "score": 245689.74768375437 }, { "content": "#[test]\n\nfn debug() {\n\n let label = Label {\n\n uuid: Uuid::new_v4(),\n\n child: Uuid::new_v4()\n\n };\n\n format!(\"{:?}\", label);\n\n}\n\n\n\n}\n", "file_path": "bfffs/src/common/raid/vdev_onedisk.rs", "rank": 14, "score": 243422.60954846285 }, { "content": "#[test]\n\nfn flush_zone_empty_stripe_buffer() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n let zl1 = (60_000, 120_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(60_000))\n\n .return_const(Some(1));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 15, "score": 239226.26533686792 }, { "content": "#[test]\n\nfn open_zone_zero_fill_wasted_chunks() {\n\n let k = 5;\n\n let f = 1;\n\n const CHUNKSIZE : LbaT = 5;\n\n let zl0 = (1, 32);\n\n let zl1 = (32, 64);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(1))\n\n .return_const(Some(0));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 16, "score": 234279.21673733654 }, { "content": "/// The return type of `Tree::check_r`\n\ntype CheckR<K> = Box<dyn Future<Item=(bool, RangeInclusive<K>, Range<TxgT>),\n\n Error=Error> + Send>;\n\n\n\n\n\n/// In-memory representation of a COW B+-Tree\n\n///\n\n/// # Generic Parameters\n\n///\n\n/// *`K`: Key type. Must be ordered and copyable; should be compact\n\n/// *`V`: Value type in the leaves.\n\npub struct Tree<A: Addr, D: DML<Addr=A>, K: Key, V: Value> {\n\n i: Arc<Inner<A, D, K, V>>\n\n}\n\n\n\nimpl<A, D, K, V> Tree<A, D, K, V>\n\n where A: Addr,\n\n D: DML<Addr=A> + 'static,\n\n K: Key,\n\n V: Value\n\n{\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 17, "score": 222920.31076263182 }, { "content": "/// Arguments to Tree::get_dirty_nodes which are static\n\nstruct GetDirtyNodeParams<K: Key> {\n\n /// Starting point for search\n\n key: K,\n\n /// Range of PBAs considered dirty\n\n pbas: Range<PBA>,\n\n /// Range of transactions in which the dirty PBAs were written\n\n txgs: Range<TxgT>,\n\n /// Level of the tree to search\n\n echelon: u8\n\n}\n\n\n\nimpl<D, K, V> CleanZonePass1<D, K, V>\n\n where D: DML<Addr=ddml::DRP>,\n\n K: Key,\n\n V: Value\n\n {\n\n\n\n fn new(inner: Arc<Inner<ddml::DRP, D, K, V>>, pbas: Range<PBA>,\n\n txgs: Range<TxgT>, echelon: u8)\n\n -> CleanZonePass1<D, K, V>\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 18, "score": 219969.7431212337 }, { "content": "#[test]\n\nfn debug() {\n\n format!(\"{:?}\", Cache::with_capacity(100));\n\n let dbs = DivBufShared::from(Vec::new());\n\n let entry = LruEntry{buf: Box::new(dbs), lru: None, mru: None};\n\n assert_eq!(\"LruEntry { lru: None, mru: None }\", format!(\"{:?}\", entry));\n\n}\n\n\n", "file_path": "bfffs/src/common/cache/cache.rs", "rank": 19, "score": 217258.53445483895 }, { "content": "/// In-memory cache of data that has not yet been flushed the Block devices.\n\n///\n\n/// Typically there will be one of these for each open zone.\n\nstruct StripeBuffer {\n\n /// Cache of `IoVec`s that haven't yet been flushed\n\n buf: SGList,\n\n\n\n /// The LBA of the beginning of the cached stripe\n\n lba: LbaT,\n\n\n\n /// Amount of data in a full stripe, in bytes\n\n stripesize: usize,\n\n}\n\n\n\nimpl StripeBuffer {\n\n /// Store more data into this `StripeBuffer`.\n\n ///\n\n /// Don't overflow one row. Do zero-pad up to the next full LBA. Return\n\n /// the unused part of the `IoVec`\n\n pub fn fill(&mut self, mut iovec: IoVec) -> IoVec {\n\n let want_bytes = self.stripesize - self.len();\n\n let have_bytes = iovec.len();\n\n let get_bytes = cmp::min(want_bytes, have_bytes);\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 20, "score": 216653.036002849 }, { "content": "#[derive(Serialize, Deserialize, Debug, Hash)]\n\nstruct ZoneOnDisk {\n\n /// The number of blocks that have been allocated in each Zone. If zero,\n\n /// then the zone is empty. If `u32::max_value()`, then the zone is closed.\n\n allocated_blocks: u32,\n\n\n\n /// Number of LBAs that have been freed from this `Zone` since it was\n\n /// opened.\n\n freed_blocks: u32,\n\n\n\n /// The range of transactions that have been written to this Zone. The\n\n /// start is inclusive, and the end is exclusive\n\n ///\n\n /// The end is invalid for open zones, and both start and end are invalid\n\n /// for empty zones.\n\n txgs: Range<TxgT>\n\n}\n\n\n\n/// Persists the `FreeSpaceMap` in the reserved region of the disk. Each one of\n\n/// these structures stores the allocations of as many zones as can fit into\n\n/// 4KB.\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 21, "score": 215820.9771206997 }, { "content": " let mut tree: Tree<u32, MockDML, u32, u32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 2\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 30\n\n end: 42\n\n ptr:\n\n Mem:\n\n Int:\n\n children:\n\n - key: 0\n\n txgs:\n", "file_path": "bfffs/src/common/tree/tree/tests/txg.rs", "rank": 22, "score": 214369.55093559378 }, { "content": " let mut tree: Tree<u32, MockDML, u32, u32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 1\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 0\n\n end: 1\n\n ptr:\n\n Mem:\n\n Leaf:\n\n items:\n\n 0: 100\n\n 1: 200\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 23, "score": 214369.55093559378 }, { "content": "#[test]\n\nfn erase_zone() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n let zl1 = (60_000, 120_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(60_000))\n\n .return_const(Some(1));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 24, "score": 213549.65633257298 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Zone {\n\n /// Number of LBAs that have been freed from this `Zone` since it was\n\n /// opened.\n\n pub freed_blocks: u32,\n\n /// Total number of LBAs in the `Zone`. It may never change while the\n\n /// `Zone` is open or full.\n\n pub total_blocks: u32,\n\n /// The range of transactions that have been written to this Zone. The\n\n /// start is inclusive, and the end is exclusive\n\n ///\n\n /// The end is invalid for open zones, and both start and end are invalid\n\n /// for empty zones.\n\n pub txgs: Range<TxgT>\n\n}\n\n\n\nimpl Default for Zone {\n\n fn default() -> Self {\n\n let txgs = TxgT::from(0)..TxgT::from(0);\n\n Zone{freed_blocks: 0, total_blocks: 0, txgs}\n\n }\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 25, "score": 212441.17299390252 }, { "content": "#[test]\n\n#[should_panic]\n\nfn create_stripesize_too_big() {\n\n // VdevRaid::create should panic if the stripesize is greater than the\n\n // number of disks\n\n let len = 1 << 30; // 1 GB\n\n let num_disks = 3;\n\n let stripesize = 4;\n\n let redundancy = 1;\n\n let tempdir = t!(TempDir::new(\"create_stripesize_too_big\"));\n\n let paths = (0..num_disks).map(|i| {\n\n let fname = format!(\"{}/vdev.{}\", tempdir.path().display(), i);\n\n let file = t!(fs::File::create(&fname));\n\n t!(file.set_len(len));\n\n fname\n\n }).collect::<Vec<_>>();\n\n VdevRaid::create(None, stripesize, None, redundancy, paths);\n\n}\n\n\n\ntest_suite! {\n\n // These tests use real VdevBlock and VdevLeaf objects\n\n name vdev_raid;\n", "file_path": "bfffs/tests/common/raid/vdev_raid.rs", "rank": 26, "score": 209026.12965711422 }, { "content": "#[test]\n\n#[should_panic]\n\nfn create_redundancy_too_big() {\n\n // VdevRaid::create should panic if the stripesize is greater than the\n\n // number of disks\n\n let len = 1 << 30; // 1 GB\n\n let num_disks = 5;\n\n let stripesize = 3;\n\n let redundancy = 3;\n\n let tempdir = t!(TempDir::new(\"create_redundancy_too_big\"));\n\n let paths = (0..num_disks).map(|i| {\n\n let fname = format!(\"{}/vdev.{}\", tempdir.path().display(), i);\n\n let file = t!(fs::File::create(&fname));\n\n t!(file.set_len(len));\n\n fname\n\n }).collect::<Vec<_>>();\n\n VdevRaid::create(None, stripesize, None, redundancy, paths);\n\n}\n\n\n", "file_path": "bfffs/tests/common/raid/vdev_raid.rs", "rank": 27, "score": 209026.12965711422 }, { "content": "#[test]\n\nfn read_at_one_stripe() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE : LbaT = 2;\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let mut m0 = VdevBlock::default();\n\n m0.expect_size()\n\n .return_const(262_144u64);\n\n m0.expect_open_zone()\n\n .once()\n\n .with(eq(65536))\n\n .return_once(|_| Box::new(future::ok::<(), Error>(())));\n\n m0.expect_optimum_queue_depth()\n\n .return_const(10u32);\n\n m0.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const((1, 65536));\n\n m0.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 28, "score": 209010.7246267319 }, { "content": "#[test]\n\nfn stripe_buffer_pad() {\n\n let zero_region_lbas = (ZERO_REGION.len() / BYTES_PER_LBA) as LbaT;\n\n let stripesize = 2 * zero_region_lbas + 1;\n\n let mut sb = StripeBuffer::new(102, stripesize);\n\n let dbs = DivBufShared::from(vec![0; BYTES_PER_LBA]);\n\n let db = dbs.try_const().unwrap();\n\n assert!(sb.fill(db).is_empty());\n\n assert!(sb.pad() == stripesize - 1);\n\n let sglist = sb.pop();\n\n assert_eq!(sglist.len(), 3);\n\n assert_eq!(sglist.iter().map(|v| v.len()).sum::<usize>(),\n\n stripesize as usize * BYTES_PER_LBA);\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 29, "score": 209010.7246267319 }, { "content": "#[test]\n\nfn stripe_buffer_reset() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n assert_eq!(sb.lba(), 96);\n\n sb.reset(108);\n\n assert_eq!(sb.lba(), 108);\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 30, "score": 209010.7246267319 }, { "content": "#[test]\n\nfn stripe_buffer_empty() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n assert!(!sb.is_full());\n\n assert!(sb.is_empty());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 96);\n\n assert_eq!(sb.len(), 0);\n\n assert!(sb.peek().is_empty());\n\n let sglist = sb.pop();\n\n assert!(sglist.is_empty());\n\n // Adding an empty iovec should change nothing, but add a useless sender\n\n let dbs = DivBufShared::from(vec![0; 4096]);\n\n let db = dbs.try_const().unwrap();\n\n let db0 = db.slice(0, 0);\n\n assert!(sb.fill(db0).is_empty());\n\n assert!(!sb.is_full());\n\n assert!(sb.is_empty());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 96);\n\n assert_eq!(sb.len(), 0);\n\n assert!(sb.peek().is_empty());\n\n let sglist = sb.pop();\n\n assert!(sglist.is_empty());\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 31, "score": 209010.7246267319 }, { "content": "#[test]\n\nfn write_at_one_stripe() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE : LbaT = 2;\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n let mut m0 = VdevBlock::default();\n\n m0.expect_size()\n\n .return_const(262_144u64);\n\n m0.expect_lba2zone()\n\n .with(eq(65536))\n\n .return_const(Some(1));\n\n m0.expect_open_zone()\n\n .with(eq(65536))\n\n .once()\n\n .return_once(|_| Box::new(future::ok::<(), Error>(())));\n\n m0.expect_optimum_queue_depth()\n\n .return_const(10u32);\n\n m0.expect_zone_limits()\n\n .with(eq(0))\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 32, "score": 209010.7246267319 }, { "content": "fn setup() -> (tokio_io_pool::Runtime, Database, TreeID) {\n\n let mut rt = tokio_io_pool::Builder::default()\n\n .pool_size(1)\n\n .build()\n\n .unwrap();\n\n let mut ds = ReadOnlyFilesystem::default();\n\n ds.expect_last_key()\n\n .once()\n\n .returning(|| {\n\n let root_inode_key = FSKey::new(1, ObjKey::Inode);\n\n Box::new(Ok(Some(root_inode_key)).into_future())\n\n });\n\n let mut db = Database::default();\n\n db.expect_new_fs()\n\n .once()\n\n .returning(|_| Box::new(Ok(TreeID::Fs(0)).into_future()));\n\n db.expect_fsread_inner()\n\n .once()\n\n .return_once(move |_| ds);\n\n db.expect_get_prop()\n\n .times(2)\n\n .returning(|_tree_id, propname| {\n\n let prop = Property::default_value(propname);\n\n let source = PropertySource::Default;\n\n Box::new(Ok((prop, source)).into_future())\n\n });\n\n let tree_id = rt.block_on(db.new_fs(Vec::new())).unwrap();\n\n (rt, db, tree_id)\n\n}\n\n\n", "file_path": "bfffs/src/common/fs.rs", "rank": 33, "score": 208997.46207464815 }, { "content": "#[test]\n\nfn open_zone_reopen() {\n\n let k = 2;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 1;\n\n let zl0 = (1, 4096);\n\n let zl1 = (4096, 8192);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(1))\n\n .return_const(Some(0));\n\n bd.expect_lba2zone()\n\n .with(eq(4196))\n\n .return_const(Some(1));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 34, "score": 208946.61751825418 }, { "content": "#[test]\n\nfn flush_zone_closed() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(60_000))\n\n .return_const(Some(1));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_optimum_queue_depth()\n\n .return_const(10u32);\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 35, "score": 208946.61751825418 }, { "content": "#[test]\n\nfn write_at_and_flush_zone() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n let zl1 = (60_000, 120_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_lba2zone()\n\n .with(eq(60_000))\n\n .return_const(Some(1));\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 36, "score": 208946.61751825418 }, { "content": "#[test]\n\nfn dump() {\n\n let mut mock = MockDML::new();\n\n let addrl0 = 0;\n\n let addrl1 = 1;\n\n let addrl2 = 2;\n\n let addri0 = 3;\n\n let addri2 = 4;\n\n let addrl4 = 5;\n\n let addrl5 = 6;\n\n\n\n let children0 = vec![\n\n IntElem::new(0u32, TxgT::from(8)..TxgT::from(9), TreePtr::Addr(addrl0)),\n\n IntElem::new(2u32, TxgT::from(8)..TxgT::from(9), TreePtr::Addr(addrl1)),\n\n ];\n\n let intnode0 = Arc::new(Node::new(NodeData::Int(IntData::new(children0))));\n\n\n\n let mut ld0 = LeafData::default();\n\n ld0.insert(0, 0.0);\n\n ld0.insert(1, 1.0);\n\n let leafnode0 = Arc::new(Node::new(NodeData::Leaf(ld0)));\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 37, "score": 206631.67248255733 }, { "content": "#[allow(clippy::len_without_is_empty)]\n\npub trait Cacheable: Any + Debug + Send + Sync {\n\n /// Deserialize a buffer into Self. Will panic if deserialization fails.\n\n fn deserialize(dbs: DivBufShared) -> Self where Self: Sized;\n\n\n\n /// Returns true if the two `Cacheable`s' contents are equal\n\n // This doesn't implement PartialEq because the rhs is &Cacheable instead of\n\n // &Self.\n\n fn eq(&self, other: &dyn Cacheable) -> bool;\n\n\n\n /// How much space does this object use in the Cache?\n\n fn len(&self) -> usize;\n\n\n\n /// Return a read-only handle to this object.\n\n ///\n\n /// As long as this handle is alive, the object will not be evicted from\n\n /// cache.\n\n fn make_ref(&self) -> Box<dyn CacheRef>;\n\n}\n\n\n\ndowncast!(dyn Cacheable);\n\n\n", "file_path": "bfffs/src/common/cache/mod.rs", "rank": 38, "score": 205088.13027047904 }, { "content": "#[test]\n\n#[should_panic(expected = \"A StripeBuffer with data cannot be moved\")]\n\nfn stripe_buffer_reset_nonempty() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n let dbs = DivBufShared::from(vec![0; 4096]);\n\n let db = dbs.try_const().unwrap();\n\n let _ = sb.fill(db);\n\n sb.reset(108);\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 39, "score": 204648.43530749998 }, { "content": "#[test]\n\nfn stripe_buffer_one_iovec() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n let dbs = DivBufShared::from(vec![0; 4096]);\n\n let db = dbs.try_const().unwrap();\n\n assert!(sb.fill(db).is_empty());\n\n assert!(!sb.is_full());\n\n assert!(!sb.is_empty());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 97);\n\n assert_eq!(sb.len(), 4096);\n\n {\n\n let sglist = sb.peek();\n\n assert_eq!(sglist.len(), 1);\n\n assert_eq!(&sglist[0][..], &vec![0; 4096][..]);\n\n }\n\n let sglist = sb.pop();\n\n assert_eq!(sglist.len(), 1);\n\n assert_eq!(&sglist[0][..], &vec![0; 4096][..]);\n\n}\n\n\n\n// Pad a StripeBuffer that is larger than the ZERO_REGION\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 40, "score": 204643.06812803954 }, { "content": "#[test]\n\nfn stripe_buffer_fill_when_full() {\n\n let dbs0 = DivBufShared::from(vec![0; 24576]);\n\n let db0 = dbs0.try_const().unwrap();\n\n let dbs1 = DivBufShared::from(vec![1; 4096]);\n\n let db1 = dbs1.try_const().unwrap();\n\n {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n assert!(sb.fill(db0).is_empty());\n\n assert_eq!(sb.fill(db1).len(), 4096);\n\n assert!(sb.is_full());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 102);\n\n assert_eq!(sb.len(), 24576);\n\n }\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 41, "score": 204643.06812803954 }, { "content": "#[test]\n\nfn stripe_buffer_two_iovecs() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n let dbs0 = DivBufShared::from(vec![0; 8192]);\n\n let db0 = dbs0.try_const().unwrap();\n\n assert!(sb.fill(db0).is_empty());\n\n let dbs1 = DivBufShared::from(vec![1; 4096]);\n\n let db1 = dbs1.try_const().unwrap();\n\n assert!(sb.fill(db1).is_empty());\n\n assert!(!sb.is_full());\n\n assert!(!sb.is_empty());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 99);\n\n assert_eq!(sb.len(), 12288);\n\n {\n\n let sglist = sb.peek();\n\n assert_eq!(sglist.len(), 2);\n\n assert_eq!(&sglist[0][..], &vec![0; 8192][..]);\n\n assert_eq!(&sglist[1][..], &vec![1; 4096][..]);\n\n }\n\n let sglist = sb.pop();\n\n assert_eq!(sglist.len(), 2);\n\n assert_eq!(&sglist[0][..], &vec![0; 8192][..]);\n\n assert_eq!(&sglist[1][..], &vec![1; 4096][..]);\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 42, "score": 204643.06812803954 }, { "content": "/// Tokio-File requires boxed `DivBufs`, but the upper layers of BFFFS don't.\n\n/// Take care of the mismatch here, by wrapping `DivBuf` in a new struct\n\nstruct IoVecContainer(IoVec);\n\nimpl Borrow<[u8]> for IoVecContainer {\n\n fn borrow(&self) -> &[u8] {\n\n self.0.as_ref()\n\n }\n\n}\n\n\n", "file_path": "bfffs/src/common/vdev_file.rs", "rank": 43, "score": 204390.8984071039 }, { "content": "/// The public interface for all RAID Vdevs. All Vdevs that slot beneath a\n\n/// cluster must implement this API.\n\npub trait VdevRaidApi : Vdev + 'static {\n\n /// Asynchronously erase a zone on a RAID device\n\n ///\n\n /// # Parameters\n\n /// - `zone`: The target zone ID\n\n fn erase_zone(&self, zone: ZoneT) -> BoxVdevFut;\n\n\n\n /// Asynchronously finish a zone on a RAID device\n\n ///\n\n /// # Parameters\n\n /// - `zone`: The target zone ID\n\n fn finish_zone(&self, zone: ZoneT) -> BoxVdevFut;\n\n\n\n /// Asynchronously flush any data cached in the RAID device\n\n ///\n\n /// # Returns\n\n ///\n\n /// The number of LBAs that were zero-filled, and `Future` that will\n\n /// complete when the zone's contents are fully written\n\n fn flush_zone(&self, zone: ZoneT) -> (LbaT, BoxVdevFut);\n", "file_path": "bfffs/src/common/raid/vdev_raid_api.rs", "rank": 44, "score": 202860.13994791495 }, { "content": "#[test]\n\nfn debug() {\n\n assert_eq!(\"DirEntry(0)\", format!(\"{:?}\", ObjKey::DirEntry(0)));\n\n assert_eq!(\"Extent(0)\", format!(\"{:?}\", ObjKey::Extent(0)));\n\n assert_eq!(\"ExtAttr(0)\", format!(\"{:?}\", ObjKey::ExtAttr(0)));\n\n assert_eq!(\"Property(Atime)\",\n\n format!(\"{:?}\", ObjKey::Property(PropertyName::Atime)));\n\n}\n\n\n", "file_path": "bfffs/src/common/fs_tree.rs", "rank": 45, "score": 202512.81856092738 }, { "content": "#[test]\n\nfn debug() {\n\n let items: BTreeMap<u32, u32> = BTreeMap::new();\n\n let node: Arc<Node<DRP, u32, u32>> =\n\n Arc::new(Node(RwLock::new(NodeData::Leaf(LeafData{items}))));\n\n format!(\"{:?}\", node);\n\n\n\n let mut children: Vec<IntElem<u32, u32, u32>> = Vec::new();\n\n let txgs = TxgT(1)..TxgT(3);\n\n children.push(IntElem::new(0, txgs, TreePtr::Addr(4)));\n\n format!(\"{:?}\", NodeData::Int(IntData{children}));\n\n}\n\n\n", "file_path": "bfffs/src/common/tree/node.rs", "rank": 46, "score": 202512.81856092738 }, { "content": "#[test]\n\nfn stripe_buffer_two_iovecs_overflow() {\n\n let mut sb = StripeBuffer::new(96, 6);\n\n let dbs0 = DivBufShared::from(vec![0; 16384]);\n\n let db0 = dbs0.try_const().unwrap();\n\n assert!(sb.fill(db0).is_empty());\n\n let dbs1 = DivBufShared::from(vec![1; 16384]);\n\n let db1 = dbs1.try_const().unwrap();\n\n assert_eq!(sb.fill(db1).len(), 8192);\n\n assert!(sb.is_full());\n\n assert!(!sb.is_empty());\n\n assert_eq!(sb.lba(), 96);\n\n assert_eq!(sb.next_lba(), 102);\n\n assert_eq!(sb.len(), 24576);\n\n {\n\n let sglist = sb.peek();\n\n assert_eq!(sglist.len(), 2);\n\n assert_eq!(&sglist[0][..], &vec![0; 16384][..]);\n\n assert_eq!(&sglist[1][..], &vec![1; 8192][..]);\n\n }\n\n let sglist = sb.pop();\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 47, "score": 200494.41339299167 }, { "content": "#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(bound(deserialize = \"K: DeserializeOwned\"))]\n\n#[cfg(test)]\n\nstruct InnerLiteral<A: Addr, K: Key, V: Value> {\n\n height: u64,\n\n limits: Limits,\n\n root: IntElem<A, K, V>\n\n}\n\n\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 48, "score": 199584.4736122784 }, { "content": "/// Checksum an `SGList`.\n\n///\n\n/// Unfortunately, hashing a slice is not the same thing as hashing that slice's\n\n/// contents. The former includes the length of the hash. That is deliberate\n\n/// so that, for example, the tuples `([0, 1], [2, 3])` and `([0], [1, 2, 3])`\n\n/// have different hashes. That property is desirable for example when storing\n\n/// tuples in a hash table. But for our purposes, we *want* such tuples to\n\n/// compare the same so that a record will have the same hash whether it's\n\n/// written as a single `iovec` or an `SGList`.\n\n///\n\n/// Ideally we would just `impl Hash for SGList`, but that's not allowed on type\n\n/// aliases.\n\n///\n\n/// See Also [Rust issue 5237](https://github.com/rust-lang/rust/issues/5257)\n\npub fn checksum_sglist<T, H>(sglist: &[T], hasher: &mut H)\n\n where T: AsRef<[u8]>, H: Hasher {\n\n\n\n for buf in sglist {\n\n let s: &[u8] = buf.as_ref();\n\n hasher.write(s);\n\n }\n\n}\n\n\n", "file_path": "bfffs/src/common/mod.rs", "rank": 49, "score": 197571.56200613175 }, { "content": "#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(bound(deserialize = \"A: DeserializeOwned\"))]\n\nstruct InnerOnDisk<A: Addr> {\n\n height: u64,\n\n limits: Limits,\n\n root: A,\n\n txgs: Range<TxgT>,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl<A: Addr + Default> Default for InnerOnDisk<A> {\n\n fn default() -> Self {\n\n InnerOnDisk {\n\n height: u64::default(),\n\n limits: Limits::default(),\n\n root: A::default(),\n\n txgs: TxgT(0)..TxgT(1),\n\n }\n\n }\n\n}\n\n\n\n/// The serialized, on-disk representation of a `Tree`\n", "file_path": "bfffs/src/common/tree/mod.rs", "rank": 50, "score": 195384.28158500814 }, { "content": "/// Initialize tables for fast Erasure Code encode and decode.\n\n///\n\n/// Generates the expanded tables needed for fast encode or decode for erasure\n\n/// codes on blocks of data. 32bytes is generated for each input coefficient.\n\n///\n\n/// # Parameters\n\n///\n\n/// - `k`: The number of vector sources or rows in the generator matrix for\n\n/// coding.\n\n/// - `f`: The number of output vectors to concurrently encode/decode.\n\n/// - `a`: Pointer to sets of arrays of input coefficients used to encode\n\n/// or decode data. Must be of size `\n\n/// - `gftbls`: Pointer to start of space for concatenated output tables\n\n/// generated from input coefficients. Must be of size `32×k×f`.\n\npub fn ec_init_tables(k: u32, f: u32, a: &[u8], gftbls: &mut [u8]) {\n\n assert_eq!(a.len(), (f * k) as usize);\n\n assert_eq!(gftbls.len(), (32 * f * k) as usize);\n\n unsafe {\n\n // Note: isa-l defines a as non-const, even though the implementation\n\n // doesn't modify it.\n\n ffi::ec_init_tables(k as c_int, f as c_int, a.as_ptr() as *mut c_uchar,\n\n gftbls.as_mut_ptr() as *mut c_uchar);\n\n }\n\n}\n\n\n", "file_path": "isa-l/src/lib.rs", "rank": 51, "score": 192319.44041715903 }, { "content": "#[derive(Debug)]\n\n#[derive(Serialize)]\n\nstruct Inner<A: Addr, D: DML, K: Key, V: Value> {\n\n /// Tree height. 1 if the Tree consists of a single Leaf node.\n\n // Use atomics so it can be modified from an immutable reference. Accesses\n\n // should be very rare, so performance is not a concern.\n\n #[serde(with = \"atomic_u64_serializer\")]\n\n height: AtomicU64,\n\n limits: Limits,\n\n /// Root node\n\n #[serde(with = \"tree_root_serializer\")]\n\n root: RwLock<IntElem<A, K, V>>,\n\n #[serde(skip)]\n\n dml: Arc<D>,\n\n /// Compression function used for interior nodes\n\n #[serde(skip)]\n\n int_compressor: Compression,\n\n /// Compression function used for leaves\n\n #[serde(skip)]\n\n leaf_compressor: Compression,\n\n /// Should tree operations assume that access will be mostly sequential in\n\n /// increasing order?\n", "file_path": "bfffs/src/common/tree/tree/mod.rs", "rank": 52, "score": 190767.4404468976 }, { "content": " Tree::from_str(idml, false, r#\"\n\n---\n\nheight: 1\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 0\n\n end: 42\n\n ptr:\n\n Addr:\n\n 1\n\n\"#);\n\n\n\n let typical_tod = typical_tree.serialize().unwrap();\n\n assert_eq!(TreeOnDisk::<RID>::TYPICAL_SIZE,\n\n bincode::serialized_size(&typical_tod).unwrap() as usize);\n\n}\n\n\n\n// Tree::serialize should serialize the Tree::Inner object\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 53, "score": 188574.7423967147 }, { "content": "/// Generate a matrix of coefficients to be used for encoding.\n\n///\n\n/// Vandermonde matrix example of encoding coefficients where high portion of\n\n/// matrix is identity matrix I and lower portion is constructed as 2^{i*(j-k+1)}\n\n/// i:{0,k-1} j:{k,m-1}. Commonly used method for choosing coefficients in\n\n/// erasure encoding but does not guarantee invertable for every sub matrix. For\n\n/// large pairs of m and k it is possible to find cases where the decode matrix\n\n/// chosen from sources and parity is not invertable. Users may want to adjust\n\n/// for certain pairs m and k. If m and k satisfy one of the following\n\n/// inequalities, no adjustment is required:\n\n///\n\n/// - `k <= 3`\n\n/// - `k = 4, m <= 25`\n\n/// - `k = 5, m <= 10`\n\n/// - `k <= 21, m-k = 4`\n\n/// - `m - k <= 3`\n\n///\n\n/// # Parameters\n\n/// - `a`: `[m × k]` array to hold coefficients\n\n/// - `m`: number of rows in matrix corresponding to srcs + parity.\n\n/// - `k`: number of columns in matrix corresponding to srcs.\n\npub fn gf_gen_rs_matrix(a: &mut [u8], m: u32, k: u32) {\n\n assert_eq!(a.len(), (m * k) as usize);\n\n assert!( ( k <= 3 ) ||\n\n ( k == 4 && m <= 25 ) ||\n\n ( k == 5 && m <= 10 ) ||\n\n ( k <= 21 && m - k == 4) ||\n\n ( m - k <= 3 ), \"Matrix not guaranteed to be invertible!\");\n\n unsafe {\n\n ffi::gf_gen_rs_matrix(a.as_mut_ptr() as *mut c_uchar,\n\n m as c_int, k as c_int);\n\n }\n\n}\n\n\n", "file_path": "isa-l/src/lib.rs", "rank": 54, "score": 186484.5344227219 }, { "content": "/// Generate a Cauchy matrix of coefficients to be used for encoding.\n\n///\n\n/// Cauchy matrix example of encoding coefficients where high portion of matrix\n\n/// is identity matrix I and lower portion is constructed as 1/(i + j) | i != j,\n\n/// i:{0,k-1} j:{k,m-1}. Any sub-matrix of a Cauchy matrix should be invertable.\n\n///\n\n/// # Parameters\n\n///\n\n/// - `a`: `[m × k]` array to hold coefficients\n\n/// - `m`: number of rows in matrix corresponding to srcs + parity.\n\n/// - `k`: number of columns in matrix corresponding to srcs.\n\npub fn gf_gen_cauchy1_matrix(a: &mut [u8], m: u32, k: u32) {\n\n assert_eq!(a.len(), (m * k) as usize);\n\n unsafe {\n\n ffi::gf_gen_cauchy1_matrix(a.as_mut_ptr() as *mut c_uchar,\n\n m as c_int, k as c_int);\n\n }\n\n}\n\n\n", "file_path": "isa-l/src/lib.rs", "rank": 55, "score": 186480.96903662072 }, { "content": "/// Container for the IDML's private trees\n\nstruct Trees {\n\n /// Allocation table. The reverse of `ridt`.\n\n ///\n\n /// Maps disk addresses back to record IDs. Used for operations like\n\n /// garbage collection and defragmentation.\n\n // TODO: consider a lazy delete strategy to reduce the amount of tree\n\n // activity on pop/delete by deferring alloct removals to the cleaner.\n\n alloct: DTree<PBA, RID>,\n\n\n\n /// Record indirection table. Maps record IDs to disk addresses.\n\n ridt: DTree<RID, RidtEntry>,\n\n}\n\n\n\n/// Indirect Data Management Layer for a single `Pool`\n\npub struct IDML {\n\n cache: Arc<Mutex<Cache>>,\n\n\n\n ddml: Arc<DDML>,\n\n\n\n /// Holds the next RID to allocate. They are never reused.\n", "file_path": "bfffs/src/common/idml/idml.rs", "rank": 56, "score": 180348.16475047736 }, { "content": "/// Checksum an `IoVec`\n\n///\n\n/// See also [`checksum_sglist`](fn.checksum_sglist.html) for an explanation of\n\n/// why this function is necessary.\n\npub fn checksum_iovec<T: AsRef<[u8]>, H: Hasher>(iovec: &T, hasher: &mut H) {\n\n hasher.write(iovec.as_ref());\n\n}\n\n\n", "file_path": "bfffs/src/common/mod.rs", "rank": 57, "score": 179048.90762160852 }, { "content": "/// Helper that creates a mock RangeQuery from the vec of items that it should\n\n/// return\n\nfn mock_range_query<K, T, V>(items: Vec<(K, V)>) -> RangeQuery<K, T, V>\n\n where K: Key + Borrow<T>,\n\n T: Ord + Clone + Send,\n\n V: Value\n\n{\n\n let mut rq = RangeQuery::new();\n\n let mut seq = Sequence::new();\n\n for item in items.into_iter() {\n\n rq.expect_poll()\n\n .once()\n\n .in_sequence(&mut seq)\n\n .return_once(|| Ok(Async::Ready(Some(item))));\n\n }\n\n rq.expect_poll()\n\n .once()\n\n .in_sequence(&mut seq)\n\n .return_once(|| Ok(Async::Ready(None)));\n\n rq\n\n}\n\n\n", "file_path": "bfffs/src/common/fs.rs", "rank": 58, "score": 178292.93180881834 }, { "content": "/// A simple primality tester. Optimized for size, not speed\n\nfn is_prime(n: i16) -> bool {\n\n if n <= 1 {\n\n return false;\n\n } else if n <= 3 {\n\n return true;\n\n } else if n % 2 == 0 || n % 3 == 0 {\n\n return false;\n\n }\n\n let mut i = 5;\n\n while i * i <= n {\n\n if n % i == 0 || n % (i + 2) == 0 {\n\n return false;\n\n }\n\n i += 6;\n\n }\n\n true\n\n}\n\n\n", "file_path": "bfffs/src/common/raid/prime_s.rs", "rank": 59, "score": 177887.04071491899 }, { "content": "struct VdevFileFut(AioFut);\n\n\n\nimpl Future for VdevFileFut {\n\n type Item = ();\n\n type Error = Error;\n\n\n\n // aio_write and friends will sometimes return an error synchronously (like\n\n // EAGAIN). VdevBlock handles those errors synchronously by calling poll()\n\n // once before spawning the future into the event loop. But that results in\n\n // calling poll again after it returns an error, which is incompatible with\n\n // FuturesExt::{map, map_err}'s implementations. So we have to define a\n\n // custom poll method here, with map's and map_err's functionality inlined.\n\n fn poll(&mut self) -> Poll<(), Error> {\n\n match self.0.poll() {\n\n Ok(Async::Ready(_aio_result)) => Ok(Async::Ready(())),\n\n Ok(Async::NotReady) => Ok(Async::NotReady),\n\n Err(e) => Err(Error::from(e))\n\n }\n\n }\n\n}\n\n\n", "file_path": "bfffs/src/common/vdev_file.rs", "rank": 60, "score": 177101.85033945995 }, { "content": "/// Invert a matrix in GF(2^8)\n\n///\n\n/// # Parameters\n\n///\n\n/// - `input`: input matrix. A two-dimensional square array.\n\n/// - `output`: output matrix such that `[input] × [output] = [I]`\n\n/// - `n`: size of matrix `[n × n]`\n\n///\n\n/// # Returns\n\n///\n\n/// `()` on success, or one of these errors on failure:\n\n/// - `InvalidData`: The input matrix was singular\n\npub fn gf_invert_matrix(input: &[u8], output: &mut [u8],\n\n n: u32) -> Result<(), Error> {\n\n assert_eq!(input.len(), (n * n) as usize);\n\n assert_eq!(output.len(), (n * n) as usize);\n\n if 0 == unsafe {\n\n ffi::gf_invert_matrix(input.as_ptr() as *mut c_uchar,\n\n output.as_mut_ptr() as *mut c_uchar,\n\n n as c_int)\n\n } {\n\n Ok(())\n\n } else {\n\n Err(Error::new(ErrorKind::InvalidData, \"Singular matrix\"))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod t {\n\n\n\nuse super::*;\n\n\n", "file_path": "isa-l/src/lib.rs", "rank": 61, "score": 175386.35304519022 }, { "content": "#[test]\n\nfn sync_all() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::default();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_size().return_const(262_144u64);\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_sync_all()\n\n .return_once(|| Box::new(future::ok::<(), Error>(())));\n\n bd.expect_optimum_queue_depth()\n\n .return_const(10u32);\n\n bd\n\n };\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 62, "score": 174972.92129812465 }, { "content": "struct VdevFileLioFut(LioFut);\n\n\n\nimpl Future for VdevFileLioFut {\n\n type Item = ();\n\n type Error = Error;\n\n\n\n // See comments for VdevFileFut::poll\n\n fn poll(&mut self) -> Poll<(), Error>{\n\n match self.0.poll() {\n\n Ok(Async::Ready(mut lio_result_iter)) => {\n\n // We must drain the iterator to free the AioCb resources\n\n lio_result_iter.find(|ref r| r.value.is_err())\n\n .map(|r| Err(Error::from(r.value.unwrap_err())))\n\n .unwrap_or(Ok(Async::Ready(())))\n\n },\n\n Ok(Async::NotReady) => Ok(Async::NotReady),\n\n Err(e) => Err(Error::from(e))\n\n }\n\n }\n\n}\n", "file_path": "bfffs/src/common/vdev_file.rs", "rank": 63, "score": 174134.28064841183 }, { "content": "#[test]\n\n#[cfg(debug_assertions)]\n\n#[should_panic(expected = \"Must call flush_zone before sync_all\")]\n\nfn sync_all_unflushed() {\n\n let k = 3;\n\n let f = 1;\n\n const CHUNKSIZE: LbaT = 2;\n\n let zl0 = (1, 60_000);\n\n let zl1 = (60_000, 120_000);\n\n\n\n let mut blockdevs = Vec::<VdevBlock>::new();\n\n\n\n let bd = || {\n\n let mut bd = VdevBlock::default();\n\n bd.expect_lba2zone()\n\n .with(eq(60_000))\n\n .return_const(Some(1));\n\n bd.expect_size()\n\n .return_const(262_144u64);\n\n bd.expect_zone_limits()\n\n .with(eq(0))\n\n .return_const(zl0);\n\n bd.expect_zone_limits()\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 64, "score": 171407.9681539393 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct SpacemapOnDisk {\n\n /// MetroHash64 self-checksum. Includes the index of this `SpacemapOnDisk`\n\n /// within the overall spacemap, to detect misdirected writes.\n\n checksum: u64,\n\n zones: Vec<ZoneOnDisk>\n\n}\n\n\n\nimpl SpacemapOnDisk {\n\n fn deserialize(block: LbaT, buf: &DivBuf)\n\n -> bincode::Result<Result<Self, Error>>\n\n {\n\n bincode::deserialize::<SpacemapOnDisk>(&buf[..])\n\n .map(|sod| {\n\n let mut hasher = MetroHash64::new();\n\n hasher.write_u64(block);\n\n sod.zones.hash(&mut hasher);\n\n if hasher.finish() == sod.checksum {\n\n Ok(sod)\n\n } else {\n\n Err(Error::ECKSUM)\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 65, "score": 171023.10514661775 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct OpenZone {\n\n /// First LBA of the `Zone`. It may never change while the `Zone` is open\n\n /// or full.\n\n pub start: LbaT,\n\n /// Number of LBAs that have been allocated within this `Zone` so far.\n\n pub allocated_blocks: u32,\n\n}\n\n\n\nimpl OpenZone {\n\n /// Returns the next LBA within this `Zone` that should be allocated\n\n fn write_pointer(&self) -> LbaT {\n\n self.start + LbaT::from(self.allocated_blocks)\n\n }\n\n\n\n /// Mark some space in this Zone as wasted, usually because `VdevRaid`\n\n /// zero-filled them.\n\n fn waste_space(&mut self, space: LbaT) {\n\n self.allocated_blocks += space as u32;\n\n }\n\n}\n\n\n\n/// In-core representation of the free-space map. Used for deciding when to\n\n/// open new zones, close old ones, and reclaim full ones.\n\n// Common operations include:\n\n// * Choose an open zone to write X bytes, or open a new one\n\n// * Choose a zone to reclaim\n\n// * Find a zone by Zone ID, to rebuild it\n\n// * Find all zones modified in a certain txg range\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 66, "score": 170943.34723367594 }, { "content": "/// RPC server for `Cluster` objects\n\n///\n\n/// As `Cluster` is neither `Send` nor `Sync` it cannot be directly accessed\n\n/// from other threads. The `ClusterServer` fixes that. Bound to a single\n\n/// thread, it owns a `Cluster` and serves RPC requests from its own and other\n\n/// threads.\n\nstruct ClusterServer {\n\n cluster: Cluster\n\n}\n\n\n\nimpl ClusterServer {\n\n fn new(cluster: Cluster) -> Self {\n\n ClusterServer{cluster}\n\n }\n\n\n\n /// Start the `ClusterServer` in the background, in the current thread\n\n fn run(cs: Rc<ClusterServer>, rx: mpsc::UnboundedReceiver<Rpc>) {\n\n let fut = future::lazy(move || {\n\n // In Futures 0.2, use for_each_concurrent instead\n\n rx.for_each(move |rpc| cs.dispatch(rpc))\n\n // If we get here, the ClusterProxy was dropped\n\n });\n\n executor::current_thread::TaskExecutor::current().spawn_local(\n\n Box::new(fut)\n\n ).unwrap();\n\n }\n", "file_path": "bfffs/src/common/pool.rs", "rank": 67, "score": 170885.6130845645 }, { "content": " let tree: Tree<u32, MockDML, u32, f32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 2\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 41\n\n end: 42\n\n ptr:\n\n Mem:\n\n Int:\n\n children:\n\n - key: 0\n\n txgs:\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 68, "score": 170507.5905605415 }, { "content": " let tree: Tree<u32, MockDML, u32, f32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 2\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 41\n\n end: 42\n\n ptr:\n\n Mem:\n\n Int:\n\n children:\n\n - key: 1\n\n txgs:\n", "file_path": "bfffs/src/common/tree/tree/tests/in_mem.rs", "rank": 69, "score": 170507.5905605415 }, { "content": " let tree: Tree<u32, MockDML, u32, u32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 1\n\nlimits:\n\n min_int_fanout: 2\n\n max_int_fanout: 5\n\n min_leaf_fanout: 2\n\n max_leaf_fanout: 5\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 0\n\n end: 42\n\n ptr:\n\n Addr: 0\n\n\"#);\n\n\n\n let r = tree.flush(TxgT::from(42)).wait();\n\n assert!(r.is_ok());\n\n}\n\n\n\n/// Sync a Tree with both dirty Int nodes and dirty Leaf nodes\n", "file_path": "bfffs/src/common/tree/tree/tests/io.rs", "rank": 70, "score": 170507.5905605415 }, { "content": " let tree: Tree<u32, MockDML, u32, u32> = Tree::from_str(dml, false, r#\"\n\n---\n\nheight: 3\n\nlimits:\n\n min_int_fanout: 3\n\n max_int_fanout: 7\n\n min_leaf_fanout: 3\n\n max_leaf_fanout: 7\n\n _max_size: 4194304\n\nroot:\n\n key: 0\n\n txgs:\n\n start: 2\n\n end: 14\n\n ptr:\n\n Mem:\n\n Int:\n\n children:\n\n - key: 2762\n\n txgs:\n", "file_path": "bfffs/src/common/tree/tree/tests/in_mem.rs", "rank": 71, "score": 170507.5905605415 }, { "content": " {\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n }\n\n let paths = [filename.clone()];\n\n let mut rt = Runtime::new().unwrap();\n\n let pool = rt.block_on(future::lazy(|| {\n\n let cs = NonZeroU64::new(1);\n\n let cluster = Pool::create_cluster(cs, 1, None, 0, &paths);\n\n let clusters = vec![cluster];\n\n future::join_all(clusters)\n\n .map_err(|_| unreachable!())\n\n .and_then(|clusters|\n\n Pool::create(POOLNAME.to_string(), clusters)\n\n )\n\n })).unwrap();\n\n let cache = Arc::new(Mutex::new(Cache::with_capacity(1000)));\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = Arc::new(IDML::create(ddml, cache));\n\n let db = rt.block_on(future::lazy(|| {\n", "file_path": "bfffs/tests/common/database.rs", "rank": 72, "score": 80.17406427720765 }, { "content": " let mut rt = Runtime::new();\n\n let handle = rt.handle().clone();\n\n let len = 1 << 30; // 1GB\n\n let tempdir = t!(TempDir::new(\"test_fs\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n drop(file);\n\n let zone_size = NonZeroU64::new(*self.zone_size);\n\n let db = rt.block_on(future::lazy(move || {\n\n Pool::create_cluster(None, 1, zone_size, 0, &[filename])\n\n .map_err(|_| unreachable!())\n\n .and_then(|cluster| {\n\n Pool::create(String::from(\"test_fs\"), vec![cluster])\n\n .map(|pool| {\n\n let cache = Arc::new(\n\n Mutex::new(\n\n Cache::with_capacity(32_000_000)\n\n )\n\n );\n", "file_path": "bfffs/tests/common/fs.rs", "rank": 73, "score": 79.94609532959166 }, { "content": " let db = rt.block_on(future::lazy(move || {\n\n Pool::create_cluster(None, 1, zone_size, 0, &[filename])\n\n .map_err(|_| unreachable!())\n\n .and_then(|cluster| {\n\n Pool::create(String::from(\"test_fs\"), vec![cluster])\n\n .map(|pool| {\n\n let cache = Arc::new(\n\n Mutex::new(\n\n Cache::with_capacity(32_000_000)\n\n )\n\n );\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = IDML::create(ddml, cache);\n\n Arc::new(Database::create(Arc::new(idml), handle))\n\n })\n\n })\n\n })).unwrap();\n\n let handle = rt.handle().clone();\n\n let (db, fs) = rt.block_on(future::lazy(move || {\n\n db.new_fs(Vec::new())\n", "file_path": "bfffs/tests/common/clean_zone.rs", "rank": 74, "score": 78.71921419594874 }, { "content": " let cs = NonZeroU64::new(1);\n\n let lpz = NonZeroU64::new(LBA_PER_ZONE);\n\n let cluster = Pool::create_cluster(cs, 1, lpz, 0, &paths);\n\n let clusters = vec![cluster];\n\n future::join_all(clusters)\n\n .map_err(|_| unreachable!())\n\n .and_then(|clusters|\n\n Pool::create(POOLNAME.to_string(), clusters)\n\n )\n\n })).unwrap();\n\n let cache = Arc::new(Mutex::new(Cache::with_capacity(1_000_000)));\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = IDML::create(ddml, cache);\n\n (rt, idml, tempdir)\n\n }\n\n });\n\n\n\n // When moving the last record from a zone, the allocator should not reopen\n\n // the same zone for its destination\n\n test move_last_record(objects()) {\n", "file_path": "bfffs/tests/common/idml.rs", "rank": 75, "score": 77.60039928025083 }, { "content": " /// * `disks_per_stripe`: Number of data plus parity chunks in each\n\n /// self-contained RAID stripe. Must be less than\n\n /// or equal to the number of disks in `paths`.\n\n /// * `lbas_per_zone`: If specified, this many LBAs will be assigned to\n\n /// simulated zones on devices that don't have\n\n /// native zones.\n\n /// * `redundancy`: Degree of RAID redundancy. Up to this many\n\n /// disks may fail before the array becomes\n\n /// inoperable.\n\n /// * `paths`: Slice of pathnames of files and/or devices\n\n pub fn create<P>(chunksize: Option<NonZeroU64>, disks_per_stripe: i16,\n\n lbas_per_zone: Option<NonZeroU64>, redundancy: i16, paths: Vec<P>)\n\n -> Self\n\n where P: AsRef<Path> + 'static\n\n {\n\n let vdev = raid::create(chunksize, disks_per_stripe, lbas_per_zone,\n\n redundancy, paths);\n\n let total_zones = vdev.zones();\n\n let fsm = FreeSpaceMap::new(total_zones);\n\n Cluster::new((fsm, vdev))\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 76, "score": 75.59924605547397 }, { "content": " let mut rt = Runtime::new();\n\n let handle = rt.handle().clone();\n\n let len = 1 << 30; // 1GB\n\n let tempdir = t!(TempDir::new(\"test_fs\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n drop(file);\n\n let cache = Arc::new(Mutex::new(Cache::with_capacity(1_000_000)));\n\n let cache2 = cache.clone();\n\n let db = rt.block_on(future::lazy(move || {\n\n Pool::create_cluster(None, 1, None, 0, &[filename])\n\n .map_err(|_| unreachable!())\n\n .and_then(|cluster| {\n\n Pool::create(String::from(\"test_fs\"), vec![cluster])\n\n .map(|pool| {\n\n let ddml = Arc::new(DDML::new(pool, cache2.clone()));\n\n let idml = IDML::create(ddml, cache2);\n\n Arc::new(Database::create(Arc::new(idml), handle))\n\n })\n", "file_path": "bfffs/tests/common/fs.rs", "rank": 77, "score": 73.78421260878886 }, { "content": " .collect::<Vec<PathBuf>>();\n\n DefaultExecutor::current().spawn(Box::new(future::lazy(move || {\n\n let c = Cluster::create(chunksize, disks_per_stripe,\n\n lbas_per_zone, redundancy, owned_paths);\n\n tx.send(ClusterProxy::new(c)).unwrap();\n\n Ok(())\n\n }))).unwrap();\n\n rx.map_err(|_| panic!(\"Closed Runtime while creating Cluster?\"))\n\n }\n\n\n\n /// Create a new `Pool` from some freshly created `Cluster`s.\n\n ///\n\n /// Must be called from within the context of a Tokio Runtime.\n\n pub fn create(name: String, clusters: Vec<ClusterProxy>)\n\n -> impl Future<Item=Self, Error=Error>\n\n {\n\n Pool::new(name, Uuid::new_v4(), clusters)\n\n }\n\n\n\n pub fn flush(&self, idx: u32) -> impl Future<Item=(), Error=Error> + Send {\n", "file_path": "bfffs/src/common/pool.rs", "rank": 78, "score": 73.32053434071237 }, { "content": " let pool = rt.block_on(future::lazy(|| {\n\n let cs = NonZeroU64::new(1);\n\n let cluster = Pool::create_cluster(cs, 1, None, 0, &paths);\n\n let clusters = vec![cluster];\n\n future::join_all(clusters)\n\n .map_err(|_| unreachable!())\n\n .and_then(|clusters|\n\n Pool::create(POOLNAME.to_string(), clusters)\n\n )\n\n })).unwrap();\n\n let cache = Arc::new(Mutex::new(Cache::with_capacity(1000)));\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = Arc::new(IDML::create(ddml, cache));\n\n (rt, idml, tempdir, filename)\n\n }\n\n });\n\n\n\n // Testing IDML::open with golden labels is too hard, because we need to\n\n // store separate golden labels for each VdevLeaf. Instead, we'll just\n\n // check that we can open-after-write\n", "file_path": "bfffs/tests/common/idml.rs", "rank": 79, "score": 72.44409130231575 }, { "content": " let f = *self.f;\n\n let mut rt = Runtime::new().unwrap();\n\n let len = 1 << 30; // 1GB\n\n let tempdir = t!(TempDir::new(\"test_device_manager\"));\n\n let paths = (0..n).map(|i| {\n\n let fname = format!(\"{}/vdev.{}\", tempdir.path().display(), i);\n\n let file = t!(fs::File::create(&fname));\n\n t!(file.set_len(len));\n\n fname\n\n }).collect::<Vec<_>>();\n\n let pathsclone = paths.clone();\n\n let db = rt.block_on(future::lazy(move || {\n\n Pool::create_cluster(None, k, None, f, &paths)\n\n .map_err(|_| unreachable!())\n\n .and_then(|cluster| {\n\n Pool::create(String::from(\"test_device_manager\"),\n\n vec![cluster])\n\n }).map(|pool| {\n\n let cache = Arc::new(Mutex::new(Cache::with_capacity(1000)));\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n", "file_path": "bfffs/tests/common/device_manager.rs", "rank": 80, "score": 69.37242817316807 }, { "content": " ];\n\n\n\n fixture!( objects() -> (Runtime, Pool, TempDir, Vec<String>) {\n\n setup(&mut self) {\n\n let num_disks = 2;\n\n let len = 1 << 26; // 64 MB\n\n let tempdir = t!(TempDir::new(\"test_pool_persistence\"));\n\n let paths = (0..num_disks).map(|i| {\n\n let fname = format!(\"{}/vdev.{}\", tempdir.path().display(), i);\n\n let file = t!(fs::File::create(&fname));\n\n t!(file.set_len(len));\n\n fname\n\n }).collect::<Vec<_>>();\n\n let mut rt = Runtime::new().unwrap();\n\n let pool = rt.block_on(future::lazy(|| {\n\n let clusters = paths.iter().map(|p| {\n\n let cs = NonZeroU64::new(1);\n\n Pool::create_cluster(cs, 1, None, 0, &[p][..])\n\n }).collect::<Vec<_>>();\n\n future::join_all(clusters)\n", "file_path": "bfffs/tests/common/pool.rs", "rank": 81, "score": 69.2747052835941 }, { "content": " path::Path,\n\n sync::{Arc, Mutex}\n\n };\n\n use tempdir::TempDir;\n\n use tokio::runtime::current_thread::Runtime;\n\n\n\n fixture!( objects() -> (Runtime, DDML) {\n\n setup(&mut self) {\n\n let len = 1 << 26; // 64 MB\n\n let tempdir = t!(TempDir::new(\"ddml\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n let mut rt = Runtime::new().unwrap();\n\n let pool = rt.block_on(future::lazy(|| {\n\n let cs = NonZeroU64::new(1);\n\n let clusters = vec![\n\n Pool::create_cluster(cs, 1, None, 0, &[filename][..])\n\n ];\n\n future::join_all(clusters)\n", "file_path": "bfffs/tests/common/ddml.rs", "rank": 82, "score": 68.85138313734873 }, { "content": " fixture!( objects() -> (Runtime, Database, TempDir, PathBuf) {\n\n setup(&mut self) {\n\n let len = 1 << 26; // 64 MB\n\n let tempdir = t!(TempDir::new(\"test_database_persistence\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n {\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n }\n\n let paths = [filename.clone()];\n\n let mut rt = Runtime::new().unwrap();\n\n let pool = rt.block_on(future::lazy(|| {\n\n let cs = NonZeroU64::new(1);\n\n let cluster = Pool::create_cluster(cs, 1, None, 0, &paths);\n\n let clusters = vec![cluster];\n\n future::join_all(clusters)\n\n .map_err(|_| unreachable!())\n\n .and_then(|clusters|\n\n Pool::create(POOLNAME.to_string(), clusters)\n\n )\n", "file_path": "bfffs/tests/common/database.rs", "rank": 83, "score": 68.80346463089683 }, { "content": " }\n\n })\n\n }\n\n\n\n fn new(i: u64, v: Vec<ZoneOnDisk>) -> Self {\n\n debug_assert!(v.len() <= SPACEMAP_ZONES_PER_LBA);\n\n let mut hasher = MetroHash64::new();\n\n hasher.write_u64(i);\n\n v.hash(&mut hasher);\n\n SpacemapOnDisk {\n\n checksum: hasher.finish(),\n\n zones: v\n\n }\n\n }\n\n}\n\n\n\n/// A `Cluster` is BFFFS's equivalent of ZFS's top-level Vdev. It is the\n\n/// highest level `Vdev` that has its own LBA space.\n\npub struct Cluster {\n\n fsm: RefCell<FreeSpaceMap>,\n", "file_path": "bfffs/src/common/cluster.rs", "rank": 84, "score": 67.35623147968111 }, { "content": " sync::{Arc, Mutex}\n\n };\n\n use tempdir::TempDir;\n\n use tokio::runtime::current_thread::Runtime;\n\n\n\n const LBA_PER_ZONE: LbaT = 256;\n\n const POOLNAME: &str = &\"TestPool\";\n\n\n\n fixture!( objects() -> (Runtime, IDML, TempDir) {\n\n setup(&mut self) {\n\n let len = 1 << 26; // 64 MB\n\n let tempdir = t!(TempDir::new(\"test_idml_persistence\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n {\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n }\n\n let paths = [filename.clone()];\n\n let mut rt = Runtime::new().unwrap();\n\n let pool = rt.block_on(future::lazy(|| {\n", "file_path": "bfffs/tests/common/idml.rs", "rank": 85, "score": 66.96612627771319 }, { "content": " .and_then(|cluster| {\n\n Pool::create(String::from(\"database::shutdown\"), vec![cluster])\n\n .map(|pool| {\n\n let cache = Arc::new(\n\n Mutex::new(\n\n Cache::with_capacity(1_000_000)\n\n )\n\n );\n\n let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = IDML::create(ddml, cache);\n\n Database::create(Arc::new(idml), handle)\n\n })\n\n })\n\n })).unwrap();\n\n rt.block_on(db.shutdown()).unwrap();\n\n rt.shutdown_on_idle();\n\n }\n\n}\n", "file_path": "bfffs/tests/common/database.rs", "rank": 86, "score": 66.89988749171071 }, { "content": " self.clusters[pba.cluster as usize].free(pba.lba, length)\n\n }\n\n\n\n /// Construct a new `Pool` from some already constructed\n\n /// [`Cluster`](struct.Cluster.html)s.\n\n ///\n\n /// Must be called from within the context of a Tokio Runtime.\n\n #[allow(clippy::new_ret_no_self)]\n\n fn new(name: String, uuid: Uuid, clusters: Vec<ClusterProxy>)\n\n -> impl Future<Item=Self, Error=Error>\n\n {\n\n let size_fut = future::join_all(clusters.iter()\n\n .map(ClusterProxy::size)\n\n .collect::<Vec<_>>()\n\n );\n\n let allocated_fut = future::join_all(clusters.iter()\n\n .map(|cluster| cluster.allocated()\n\n .map(AtomicU64::new)\n\n ).collect::<Vec<_>>()\n\n );\n", "file_path": "bfffs/src/common/pool.rs", "rank": 87, "score": 64.59756203148068 }, { "content": " /// or equal to the number of disks in `paths`.\n\n /// * `lbas_per_zone`: If specified, this many LBAs will be assigned to\n\n /// simulated zones on devices that don't have\n\n /// native zones.\n\n /// * `redundancy`: Degree of RAID redundancy. Up to this many\n\n /// disks may fail before the array becomes\n\n /// inoperable.\n\n /// * `paths`: Slice of pathnames of files and/or devices\n\n // Hide from docs. The public API should just be raid::create, but this\n\n // function technically needs to be public for testing purposes.\n\n #[doc(hidden)]\n\n pub fn create<P>(chunksize: Option<NonZeroU64>, disks_per_stripe: i16,\n\n lbas_per_zone: Option<NonZeroU64>, redundancy: i16, paths: Vec<P>)\n\n -> Self\n\n where P: AsRef<Path> + 'static\n\n {\n\n let num_disks = paths.len() as i16;\n\n let (layout, chunksize) = VdevRaid::choose_layout(num_disks,\n\n disks_per_stripe, redundancy, chunksize);\n\n let uuid = Uuid::new_v4();\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 88, "score": 63.530726184035366 }, { "content": " let (idml, label_reader) = idml::IDML::open(ddml, arc_cache,\n\n label_reader);\n\n database::Database::open(Arc::new(idml), handle, label_reader)\n\n })\n\n }\n\n\n\n /// Import all of the clusters from a Pool. For debugging purposes only.\n\n #[doc(hidden)]\n\n pub fn import_clusters(&self, uuid: Uuid)\n\n -> impl Future<Item = Vec<Cluster>, Error = Error>\n\n {\n\n let inner = self.inner.lock().unwrap();\n\n let (_pool, raids, mut leaves) = self.open_labels(uuid, inner);\n\n let cfuts = raids.into_iter().map(move |raid| {\n\n let leaf_paths = leaves.remove(&raid.uuid()).unwrap();\n\n DevManager::open_cluster(leaf_paths, raid.uuid())\n\n .map(|(cluster, _reader)| cluster)\n\n });\n\n future::join_all(cfuts)\n\n }\n", "file_path": "bfffs/src/common/device_manager.rs", "rank": 89, "score": 62.93598828603529 }, { "content": " let ddml = Arc::new(DDML::new(pool, cache.clone()));\n\n let idml = IDML::create(ddml, cache);\n\n Arc::new(Database::create(Arc::new(idml), handle))\n\n })\n\n })\n\n })).unwrap();\n\n let handle = rt.handle().clone();\n\n let (db, fs) = rt.block_on(future::lazy(move || {\n\n db.new_fs(Vec::new())\n\n .map(move |tree_id| {\n\n let fs = Fs::new(db.clone(), handle, tree_id);\n\n (db, fs)\n\n })\n\n })).unwrap();\n\n let seed = self.seed.unwrap_or_else(|| {\n\n let mut seed = [0u8; 16];\n\n let mut seeder = thread_rng();\n\n seeder.fill_bytes(&mut seed);\n\n seed\n\n });\n", "file_path": "bfffs/tests/common/fs.rs", "rank": 90, "score": 62.805349450367 }, { "content": " num::NonZeroU64,\n\n sync::{Arc, Mutex},\n\n thread,\n\n time\n\n };\n\n use tempdir::TempDir;\n\n use tokio_io_pool::Runtime;\n\n\n\n fixture!( mocks(devsize: u64, zone_size: u64)\n\n -> (Arc<Database>, Fs, Runtime)\n\n {\n\n setup(&mut self) {\n\n let mut rt = Runtime::new();\n\n let handle = rt.handle().clone();\n\n let tempdir = t!(TempDir::new(\"test_fs\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(*self.devsize));\n\n drop(file);\n\n let zone_size = NonZeroU64::new(*self.zone_size);\n", "file_path": "bfffs/tests/common/clean_zone.rs", "rank": 91, "score": 61.8838899471807 }, { "content": " }\n\n\n\n // TODO: add a test for getting a non-cached property, once it's possible to\n\n // make multiple datasets\n\n\n\n // TODO: add tests for inherited properties, once it's possible to make\n\n // multiple datasets.\n\n\n\n test shutdown() {\n\n let mut rt = tokio_io_pool::Runtime::new();\n\n let handle = rt.handle().clone();\n\n let len = 1 << 30; // 1GB\n\n let tempdir = t!(TempDir::new(\"database::shutdown\"));\n\n let filename = tempdir.path().join(\"vdev\");\n\n let file = t!(fs::File::create(&filename));\n\n t!(file.set_len(len));\n\n drop(file);\n\n let mut db = rt.block_on(future::lazy(move || {\n\n Pool::create_cluster(None, 1, None, 0, &[filename])\n\n .map_err(|_| unreachable!())\n", "file_path": "bfffs/tests/common/database.rs", "rank": 92, "score": 61.32844787181061 }, { "content": " let blockdevs = paths.into_iter().map(|path| {\n\n VdevBlock::create(path, lbas_per_zone).unwrap()\n\n }).collect::<Vec<_>>();\n\n VdevRaid::new(chunksize, disks_per_stripe, redundancy, uuid,\n\n layout, blockdevs.into_boxed_slice())\n\n }\n\n\n\n fn new(chunksize: LbaT,\n\n disks_per_stripe: i16,\n\n redundancy: i16,\n\n uuid: Uuid,\n\n layout_algorithm: LayoutAlgorithm,\n\n blockdevs: Box<[VdevBlock]>) -> Self\n\n {\n\n let num_disks = blockdevs.len() as i16;\n\n let codec = Codec::new(disks_per_stripe as u32, redundancy as u32);\n\n let locator: Box<dyn Locator> = match layout_algorithm {\n\n LayoutAlgorithm::PrimeS => Box::new(\n\n PrimeS::new(num_disks, disks_per_stripe, redundancy))\n\n };\n", "file_path": "bfffs/src/common/raid/vdev_raid.rs", "rank": 93, "score": 60.44742431559898 }, { "content": "\n\n uuid: Uuid,\n\n}\n\n\n\nimpl VdevOneDisk {\n\n /// Create a new VdevOneDisk from an unused file or device\n\n ///\n\n /// * `lbas_per_zone`: If specified, this many LBAs will be assigned to\n\n /// simulated zones on devices that don't have\n\n /// native zones.\n\n /// * `path`: Pathnames of file or device\n\n // Hide from docs. The public API should just be raid::create, but this\n\n // function technically needs to be public for testing purposes.\n\n #[doc(hidden)]\n\n pub fn create<P>(lbas_per_zone: Option<NonZeroU64>, path: P) -> Self\n\n where P: AsRef<Path> + 'static\n\n {\n\n let uuid = Uuid::new_v4();\n\n let blockdev = VdevBlock::create(path, lbas_per_zone).unwrap();\n\n VdevOneDisk{uuid, blockdev}\n", "file_path": "bfffs/src/common/raid/vdev_onedisk.rs", "rank": 94, "score": 60.29572594222386 }, { "content": " .map_err(|_| unreachable!())\n\n .and_then(|clusters|\n\n Pool::create(\"TestPool\".to_string(), clusters)\n\n )\n\n })).unwrap();\n\n let cache = Cache::with_capacity(1_000_000_000);\n\n (rt, DDML::new(pool, Arc::new(Mutex::new(cache))))\n\n }\n\n });\n\n\n\n test basic(objects) {\n\n let (mut rt, ddml) = objects.val;\n\n let dbs = DivBufShared::from(vec![42u8; 4096]);\n\n let ddml2 = &ddml;\n\n rt.block_on(future::lazy(|| {\n\n ddml.put(dbs, Compression::None, TxgT::from(0))\n\n .and_then(move |drp| {\n\n let drp2 = &drp;\n\n ddml2.get::<DivBufShared, DivBuf>(drp2)\n\n .map(|db: Box<DivBuf>| {\n", "file_path": "bfffs/tests/common/ddml.rs", "rank": 95, "score": 59.18493568502519 }, { "content": " }).collect::<Vec<_>>();\n\n let cs = NonZeroU64::new(*self.chunksize);\n\n let vdev_raid = VdevRaid::create(cs, *self.k, None, *self.f,\n\n paths.clone());\n\n current_thread::Runtime::new().unwrap().block_on(\n\n vdev_raid.open_zone(0)\n\n ).expect(\"open_zone\");\n\n (vdev_raid, tempdir, paths)\n\n }\n\n });\n\n\n\n fn make_bufs(chunksize: LbaT, k: i16, f: i16, s: usize) ->\n\n (DivBufShared, DivBufShared) {\n\n\n\n let chunks = s * (k - f) as usize;\n\n let lbas = chunksize * chunks as LbaT;\n\n let bytes = BYTES_PER_LBA * lbas as usize;\n\n let mut wvec = vec![0u8; bytes];\n\n let mut rng = thread_rng();\n\n for x in &mut wvec {\n", "file_path": "bfffs/tests/common/raid/vdev_raid.rs", "rank": 96, "score": 58.22505608956824 }, { "content": " // Write the label, and compare to a golden master\n\n test write_label(fixture) {\n\n let lbas_per_zone = NonZeroU64::new(0xdead_beef_1a7e_babe);\n\n let vdev = VdevFile::create(fixture.val.0.clone(), lbas_per_zone)\n\n .unwrap();\n\n t!(current_thread::Runtime::new().unwrap().block_on(future::lazy(|| {\n\n let label_writer = LabelWriter::new(0);\n\n vdev.write_label(label_writer)\n\n })));\n\n\n\n let mut f = std::fs::File::open(fixture.val.0).unwrap();\n\n let mut v = vec![0; 4096];\n\n f.read_exact(&mut v).unwrap();\n\n // Uncomment this block to save the binary label for inspection\n\n /* {\n\n use std::fs::File;\n\n use std::io::Write;\n\n let mut df = File::create(\"/tmp/label.bin\").unwrap();\n\n df.write_all(&v[..]).unwrap();\n\n println!(\"UUID is {}\", vdev.uuid());\n\n } */\n\n // Compare against the golden master, skipping the checksum and UUID\n\n // fields\n\n assert_eq!(&v[0..16], &GOLDEN[0..16]);\n\n assert_eq!(&v[24..32], &GOLDEN[24..32]);\n\n assert_eq!(&v[48..GOLDEN.len()], &GOLDEN[48..GOLDEN.len()]);\n\n }\n\n}\n", "file_path": "bfffs/tests/common/vdev_file.rs", "rank": 97, "score": 58.08487809680813 }, { "content": " /// or equal to the size of `paths`.\n\n /// * `lbas_per_zone`: If specified, this many LBAs will be assigned to\n\n /// simulated zones on devices that don't have\n\n /// native zones.\n\n /// * `redundancy`: Degree of RAID redundancy. Up to this many\n\n /// disks may fail before the array becomes\n\n /// inoperable.\n\n /// * `paths`: Slice of pathnames of files and/or devices\n\n #[cfg(not(test))]\n\n pub fn create_cluster<P: AsRef<Path> + Sync>(chunksize: Option<NonZeroU64>,\n\n disks_per_stripe: i16,\n\n lbas_per_zone: Option<NonZeroU64>,\n\n redundancy: i16,\n\n paths: &[P])\n\n -> impl Future<Item=ClusterProxy, Error=()>\n\n {\n\n let (tx, rx) = oneshot::channel();\n\n // DefaultExecutor needs 'static futures; we must copy the Paths\n\n let owned_paths = paths.iter()\n\n .map(|p| p.as_ref().to_owned())\n", "file_path": "bfffs/src/common/pool.rs", "rank": 98, "score": 57.88022602307677 }, { "content": " #[cfg(not(test))]\n\n pub fn dump(&self, f: &mut dyn io::Write, tree: TreeID) -> Result<(), Error>\n\n {\n\n let mut rt = current_thread::Runtime::new().unwrap();\n\n rt.block_on(future::lazy(|| {\n\n Inner::open_filesystem(&self.inner, tree)\n\n })).unwrap();\n\n self.inner.fs_trees.try_lock()\n\n .map_err(|_| Error::EDEADLK)\n\n .map(|guard| {\n\n guard.get(&tree).unwrap()\n\n .dump(f).unwrap();\n\n })\n\n }\n\n\n\n /// Get the value of the `name` property for the dataset identified by\n\n /// `tree`.\n\n pub fn get_prop(&self, tree_id: TreeID, name: PropertyName)\n\n -> impl Future<Item=(Property, PropertySource), Error=Error> + Send\n\n {\n", "file_path": "bfffs/src/common/database/database.rs", "rank": 99, "score": 57.70064648502797 } ]
Rust
src/lib.rs
aki-akaguma/cmp_polymorphism
67da07762a02dee2aac7d3033d2b312fb2a1fedc
pub mod enum_obj; pub mod trait_obj; pub fn do_trait_obj( count: i32, ) -> anyhow::Result<( (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), )> { let a: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Cat::new(count)); let b: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Dog::new(count, count + 1)); let c: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Duck::new(count, count + 1, count + 2)); let d: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Crow::new(count, count + 1, count + 2, count + 3)); let e: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Frog::new( count, count + 1, count + 2, count + 3, count + 4, )); Ok(( (a.talk(), a.sum()), (b.talk(), b.sum()), (c.talk(), c.sum()), (d.talk(), d.sum()), (e.talk(), e.sum()), )) } pub fn do_enum_obj( count: i32, ) -> anyhow::Result<( (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), )> { let a: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Cat(count)); let b: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Dog(count, count + 1)); let c: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Duck(count, count + 1, count + 2)); let d: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Crow( count, count + 1, count + 2, count + 3, )); let e: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Frog( count, count + 1, count + 2, count + 3, count + 4, )); Ok(( (a.talk(), a.sum()), (b.talk(), b.sum()), (c.talk(), c.sum()), (d.talk(), d.sum()), (e.talk(), e.sum()), )) } pub fn create_trait_objs(count: usize) -> Vec<Box<dyn trait_obj::Animal>> { let v: Vec<Box<dyn trait_obj::Animal>> = vec![ Box::new(trait_obj::Cat::new(1)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Dog::new(1, 2)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Duck::new(1, 2, 3)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Crow::new(1, 2, 3, 4)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Frog::new(1, 2, 3, 4, 5)) as Box<dyn trait_obj::Animal>, ] .into_iter() .cycle() .take(count) .collect(); v } pub fn sum_id_trait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.animal_id(); } acc } pub fn sum_sum_trait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.sum(); } acc } pub fn sum_rem_trait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.rem(); } acc } pub fn create_enum_objs(count: usize) -> Vec<Box<enum_obj::Animal>> { let v: Vec<Box<enum_obj::Animal>> = vec![ Box::new(enum_obj::Animal::Cat(1)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Dog(1, 2)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Duck(1, 2, 3)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Crow(1, 2, 3, 4)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Frog(1, 2, 3, 4, 5)) as Box<enum_obj::Animal>, ] .into_iter() .cycle() .take(count) .collect(); v } pub fn sum_id_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.animal_id(); } acc } pub fn sum_sum_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.sum(); } acc } pub fn sum_rem_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.rem(); } acc }
pub mod enum_obj; pub mod trait_obj; pub fn do_trait_obj( count: i32, ) -> anyhow::Result<( (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), )> { let a: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Cat::new(count)); let b: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Dog::new(count, count + 1)); let c: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Duck::new(count, count + 1, count + 2)); let d: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Crow::new(count, count + 1, count + 2, count + 3)); let e: Box<dyn trait_obj::Animal> = Box::new(trait_obj::Frog::new( count, count + 1, count + 2, count + 3, count + 4, )); Ok(( (a.talk(), a.sum()), (b.talk(), b.sum()), (c.talk(), c.sum()), (d.talk(), d.sum()), (e.talk(), e.sum()), )) } pub fn do_enum_obj( count: i32, ) -> anyhow::Result<( (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), (&'static str, i32), )> { let a: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Cat(count)); let b: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Dog(count, count + 1)); let c: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Duck(count, count + 1, count + 2)); let d: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Crow( count, count + 1, count + 2, count + 3, )); let e: Box<enum_obj::Animal> = Box::new(enum_obj::Animal::Frog( count, count + 1, count + 2, count + 3, count + 4, )); Ok(( (a.talk(), a.sum()), (b.talk(), b.sum()), (c.talk(), c.sum()), (d.talk(), d.sum()), (e.talk(), e.sum()), )) } pub fn create_trait_objs(count: usize) -> Vec<Box<dyn trait_obj::Animal>> { let v: Vec<Box<dyn trait_obj::Animal>> = vec![ Box::new(trait_obj::Cat::new(1)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Dog::new(1, 2)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Duck::new(1, 2, 3)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Crow::new(1, 2, 3, 4)) as Box<dyn trait_obj::Animal>, Box::new(trait_obj::Frog::new(1, 2, 3, 4, 5)) as Box<dyn trait_obj::Animal>, ] .into_iter() .cycle() .take(count) .collect(); v } pub fn sum_id_trait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.animal_id(); } acc } pub fn sum_sum_t
pub fn sum_rem_trait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.rem(); } acc } pub fn create_enum_objs(count: usize) -> Vec<Box<enum_obj::Animal>> { let v: Vec<Box<enum_obj::Animal>> = vec![ Box::new(enum_obj::Animal::Cat(1)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Dog(1, 2)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Duck(1, 2, 3)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Crow(1, 2, 3, 4)) as Box<enum_obj::Animal>, Box::new(enum_obj::Animal::Frog(1, 2, 3, 4, 5)) as Box<enum_obj::Animal>, ] .into_iter() .cycle() .take(count) .collect(); v } pub fn sum_id_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.animal_id(); } acc } pub fn sum_sum_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.sum(); } acc } pub fn sum_rem_enum_objs(vec: &Vec<Box<enum_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.rem(); } acc }
rait_objs(vec: &Vec<Box<dyn trait_obj::Animal>>) -> i32 { let mut acc = 0; for a in vec { acc += a.sum(); } acc }
function_block-function_prefixed
[ { "content": "fn set_size(bench_vec: &mut Vec<BenchStr>, in_file: &str) -> anyhow::Result<()> {\n\n let mut base_time = 0f64;\n\n let mut base_size = 0u64;\n\n let re_1 = regex::Regex::new(r\"^ *(\\d+)\\t.*\\t([^ ]+)$\").unwrap();\n\n let reader = std::io::BufReader::new(\n\n std::fs::File::open(in_file)\n\n .with_context(|| format!(\"could not open file `{}`\", in_file))?,\n\n );\n\n for line in reader.lines() {\n\n let line = line?;\n\n if let Some(caps) = re_1.captures(&line) {\n\n // 934281\t 26312\t 736\t 961329\t eab31\tcmp_structopt-curl\n\n let size_s = &caps[1];\n\n let name_s = &caps[2];\n\n let name = if name_s.ends_with(\"-curl\") {\n\n &name_s[0..(name_s.len() - 5)]\n\n } else {\n\n name_s\n\n };\n\n let i = match bench_vec.iter().position(|x| x.name == name) {\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 8, "score": 105882.19459231685 }, { "content": "fn criterion_benchmark(c: &mut Criterion<CyclesPerByte>) {\n\n match process_one(criterion::black_box(criterion::black_box(10))) {\n\n Ok(((ct, cn), (dt, dn), (ut, un), (rt, rn), (gt, gn))) => {\n\n assert_eq!(ct, \"Meow!\");\n\n assert_eq!(cn, 10);\n\n assert_eq!(dt, \"Woof!\");\n\n assert_eq!(dn, 10 + 10 + 1);\n\n assert_eq!(ut, \"Quack!\");\n\n assert_eq!(un, 10 + 10 + 1 + 10 + 2);\n\n assert_eq!(rt, \"Caw!\");\n\n assert_eq!(rn, 10 + 10 + 1 + 10 + 2 + 10 + 3);\n\n assert_eq!(gt, \"Croak!\");\n\n assert_eq!(gn, 10 + 10 + 1 + 10 + 2 + 10 + 3 + 10 + 4);\n\n }\n\n Err(err) => {\n\n eprintln!(\"{}\", err);\n\n unreachable!();\n\n }\n\n }\n\n //\n", "file_path": "benches/bench-trait-obj.rs", "rank": 9, "score": 85343.53600622079 }, { "content": "fn criterion_benchmark(c: &mut Criterion<CyclesPerByte>) {\n\n match process_one(criterion::black_box(criterion::black_box(10))) {\n\n Ok(((ct, cn), (dt, dn), (ut, un), (rt, rn), (gt, gn))) => {\n\n assert_eq!(ct, \"Meow!\");\n\n assert_eq!(cn, 10);\n\n assert_eq!(dt, \"Woof!\");\n\n assert_eq!(dn, 10 + 10 + 1);\n\n assert_eq!(ut, \"Quack!\");\n\n assert_eq!(un, 10 + 10 + 1 + 10 + 2);\n\n assert_eq!(rt, \"Caw!\");\n\n assert_eq!(rn, 10 + 10 + 1 + 10 + 2 + 10 + 3);\n\n assert_eq!(gt, \"Croak!\");\n\n assert_eq!(gn, 10 + 10 + 1 + 10 + 2 + 10 + 3 + 10 + 4);\n\n }\n\n Err(err) => {\n\n eprintln!(\"{}\", err);\n\n unreachable!();\n\n }\n\n }\n\n //\n", "file_path": "benches/bench-enum-obj.rs", "rank": 10, "score": 85343.53600622079 }, { "content": "pub fn run(_program: &str, _args: &[&str]) -> anyhow::Result<()> {\n\n let mut bench_vec = get_bench(\"z.bench.log\")?;\n\n //set_size(&mut bench_vec, \"z.size-release.curl.log\")?;\n\n output(bench_vec)?;\n\n //\n\n //let mut bench_vec = get_bench(\"z.bench-release-s.curl.log\")?;\n\n //set_size(&mut bench_vec, \"z.size-release.curl.log\")?;\n\n //output(bench_vec)?;\n\n //\n\n Ok(())\n\n}\n\n\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 11, "score": 84442.71275346605 }, { "content": "fn get_bench(in_file: &str) -> anyhow::Result<Vec<BenchStr>> {\n\n let mut vec_benchstr: Vec<BenchStr> = Vec::new();\n\n //\n\n let re_1 =\n\n regex::Regex::new(r\"^([^ ]+) +time: +[\\[][^ ]+ [^ ]+ ([^ ]+) ([^ ]+) [^ ]+ [^ ]+[\\]]$\")\n\n .unwrap();\n\n //\n\n let reader = std::io::BufReader::new(\n\n std::fs::File::open(in_file)\n\n .with_context(|| format!(\"could not open file `{}`\", in_file))?,\n\n );\n\n for line in reader.lines() {\n\n let line = line?;\n\n if let Some(caps) = re_1.captures(&line) {\n\n // cmp_structopt::curl:: time: [302.50 us 302.87 us 303.34 us]\n\n // cmp_structopt::curl:: time: [714991.6559 cycles 715483.2743 cycles 716029.3928 cycles]\n\n let nm = normalize_name(&caps[1])?;\n\n let tm = normalize_time(&caps[2], &caps[3])?;\n\n let is_cycle = if &caps[3] == \"cycles\" { true } else { false };\n\n let time_1k = if nm.ends_with(\"^01k\") {\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 14, "score": 79728.10817275239 }, { "content": "fn output(bench_vec: Vec<BenchStr>) -> anyhow::Result<()> {\n\n println!(\n\n \"| {:^22} | {:^11} | {:^11} |\",\n\n \"`name`\", \"`bench`\", \"`per 1k`\"\n\n );\n\n println!(\n\n \"|:{:<22}-|-{:>11}:|-{:>11}:|\",\n\n \"-\".repeat(22),\n\n \"-\".repeat(11),\n\n \"-\".repeat(11),\n\n );\n\n for bench in bench_vec {\n\n if bench.is_cycle {\n\n println!(\n\n \"| {:<22} | {:>8.3} kc | {:>8.3} kc |\",\n\n bench.name,\n\n bench.time / 1000.0,\n\n bench.time_1k / 1000.0,\n\n );\n\n } else {\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 15, "score": 69241.38423055966 }, { "content": "fn print_version_and_exit(_program: &str) {\n\n println!(\"{} {}\", env!(\"CARGO_PKG_NAME\"), env!(\"CARGO_PKG_VERSION\"));\n\n std::process::exit(0);\n\n}\n\n\n", "file_path": "xtask/src/main.rs", "rank": 16, "score": 57828.66545381921 }, { "content": "fn print_help_and_exit(program: &str) {\n\n println!(\n\n \"[usage] {} {{ {} }}\",\n\n program,\n\n concat!(\"shape_benchmark_results\",)\n\n );\n\n std::process::exit(0);\n\n}\n", "file_path": "xtask/src/main.rs", "rank": 17, "score": 57828.66545381921 }, { "content": "fn normalize_time(num_s: &str, unit_s: &str) -> anyhow::Result<f64> {\n\n let num: f64 = num_s.parse::<f64>()?;\n\n let unit: f64 = match unit_s {\n\n \"ms\" => 0.001,\n\n \"us\" => 0.000001,\n\n \"ns\" => 0.000000001,\n\n \"ps\" => 0.000000000001,\n\n \"cycles\" => 1.0,\n\n _ => {\n\n let msg = format!(\"can not convert unit: {}\", unit_s);\n\n return Err(anyhow::Error::msg(msg));\n\n }\n\n };\n\n Ok(num * unit)\n\n}\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 18, "score": 57146.21500195896 }, { "content": "fn normalize_name(name_s: &str) -> anyhow::Result<String> {\n\n Ok(name_s.to_string())\n\n}\n\n\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 19, "score": 49130.663139446566 }, { "content": "pub trait Animal {\n\n fn animal_id(&self) -> i32;\n\n fn talk(&self) -> &'static str;\n\n fn sum(&self) -> i32;\n\n fn rem(&self) -> i32;\n\n fn box_clone(&self) -> Box<dyn Animal>;\n\n}\n\nimpl Clone for Box<dyn Animal> {\n\n fn clone(&self) -> Box<dyn Animal> {\n\n self.box_clone()\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Cat(i32);\n\n#[derive(Clone)]\n\npub struct Dog(i32, i32);\n\n#[derive(Clone)]\n\npub struct Duck(i32, i32, i32);\n\n#[derive(Clone)]\n", "file_path": "src/trait_obj.rs", "rank": 20, "score": 36841.14832373393 }, { "content": "fn main() {\n\n let vec = cmp_polymorphism::create_enum_objs(1000000);\n\n let r1 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_id_enum_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n let r2 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_sum_enum_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n let r3 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_rem_enum_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n println!(\"{},{},{}\", r1, r2, r3);\n\n}\n", "file_path": "src/bin-enum-obj.rs", "rank": 21, "score": 25774.02168678796 }, { "content": "fn main() {\n\n let vec = cmp_polymorphism::create_trait_objs(1000000);\n\n let r1 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_id_trait_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n let r2 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_sum_trait_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n let r3 = {\n\n let mut r = 0;\n\n for _i in 0..100 {\n\n let rr = cmp_polymorphism::sum_rem_trait_objs(&vec);\n\n r += rr;\n\n }\n\n r\n\n };\n\n println!(\"{},{},{}\", r1, r2, r3);\n\n}\n", "file_path": "src/bin-trait-obj.rs", "rank": 22, "score": 25774.02168678796 }, { "content": "fn process_one(\n\n count: i32,\n\n) -> anyhow::Result<(\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n)> {\n\n cmp_polymorphism::do_enum_obj(count)\n\n}\n\n\n", "file_path": "benches/bench-enum-obj.rs", "rank": 23, "score": 25010.906212291877 }, { "content": "fn process_one(\n\n count: i32,\n\n) -> anyhow::Result<(\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n (&'static str, i32),\n\n)> {\n\n cmp_polymorphism::do_trait_obj(count)\n\n}\n\n\n", "file_path": "benches/bench-trait-obj.rs", "rank": 24, "score": 25010.906212291877 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let mut env_args: Vec<String> = std::env::args().collect();\n\n let program = env_args.remove(0);\n\n if env_args.is_empty() {\n\n print_help_and_exit(&program);\n\n }\n\n let cmd = env_args[0].as_str();\n\n let program = &program;\n\n let env_args: Vec<&str> = env_args[1..].iter().map(|s| s.as_str()).collect();\n\n #[rustfmt::skip]\n\n match cmd {\n\n \"shape_benchmark_results\" => shape_benchmark_results::run(&format!(\"{} {}\", program, cmd), &env_args)?,\n\n //\n\n \"--help\" | \"-h\" | \"-H\" | \"help\" => print_help_and_exit(program),\n\n \"--version\" | \"-V\" | \"-v\" => print_version_and_exit(program),\n\n _ => {\n\n eprintln!(\"Not fount command: {}\", cmd);\n\n unreachable!()\n\n }\n\n };\n\n //\n\n Ok(())\n\n}\n\n\n", "file_path": "xtask/src/main.rs", "rank": 25, "score": 23974.930842652007 }, { "content": " pub fn new(a: i32, b: i32, c: i32, d: i32) -> Self {\n\n Self(a, b, c, d)\n\n }\n\n}\n\nimpl Frog {\n\n pub fn new(a: i32, b: i32, c: i32, d: i32, e: i32) -> Self {\n\n Self(a, b, c, d, e)\n\n }\n\n}\n\n\n\nimpl Animal for Cat {\n\n fn animal_id(&self) -> i32 {\n\n 1\n\n }\n\n fn talk(&self) -> &'static str {\n\n \"Meow!\"\n\n }\n\n fn sum(&self) -> i32 {\n\n self.0\n\n }\n", "file_path": "src/trait_obj.rs", "rank": 26, "score": 6.636125100442468 }, { "content": "#[derive(Clone)]\n\npub enum Animal {\n\n Cat(i32),\n\n Dog(i32, i32),\n\n Duck(i32, i32, i32),\n\n Crow(i32, i32, i32, i32),\n\n Frog(i32, i32, i32, i32, i32),\n\n}\n\nimpl Animal {\n\n pub fn animal_id(&self) -> i32 {\n\n match self {\n\n Animal::Cat(_a) => 1,\n\n Animal::Dog(_a, _b) => 2,\n\n Animal::Duck(_a, _b, _c) => 3,\n\n Animal::Crow(_a, _b, _c, _d) => 4,\n\n Animal::Frog(_a, _b, _c, _d, _e) => 5,\n\n }\n\n }\n\n pub fn talk(&self) -> &'static str {\n\n match self {\n", "file_path": "src/enum_obj.rs", "rank": 27, "score": 6.235879831798293 }, { "content": " tm\n\n } else if nm.ends_with(\"^08k\") {\n\n tm / 8.0\n\n } else if nm.ends_with(\"^90k\") {\n\n tm / 90.0\n\n } else {\n\n 0.0\n\n };\n\n //\n\n vec_benchstr.push(BenchStr {\n\n name: nm,\n\n time: tm,\n\n is_cycle: is_cycle,\n\n time_1k: time_1k,\n\n ..BenchStr::default()\n\n });\n\n }\n\n }\n\n //vec_benchstr.sort_by(|a, b| a.time.partial_cmp(&b.time).unwrap());\n\n vec_benchstr.sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap());\n\n //\n\n Ok(vec_benchstr)\n\n}\n\n\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 28, "score": 5.944899549783823 }, { "content": " println!(\n\n \"| {:<22} | {:>8.3} uc | {:>8.3} uc |\",\n\n bench.name,\n\n bench.time / 0.000001,\n\n bench.time_1k / 0.000001,\n\n );\n\n }\n\n }\n\n //\n\n Ok(())\n\n}\n\n\n\n#[rustfmt::skip]\n\n#[derive(Default)]\n\npub struct BenchStr {\n\n pub name: String, // name\n\n pub time: f64, // seconds\n\n pub is_cycle: bool, // cycles\n\n pub time_1k: f64, // seconds per 1k\n\n pub size: u64, // bytes\n\n pub oh_time: f64, // seconds\n\n pub oh_size: u64, // bytes\n\n}\n\n\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 29, "score": 5.567756273908203 }, { "content": " 5\n\n }\n\n fn talk(&self) -> &'static str {\n\n \"Croak!\"\n\n }\n\n fn sum(&self) -> i32 {\n\n self.0 + self.1 + self.2 + self.3 + self.4\n\n }\n\n fn rem(&self) -> i32 {\n\n self.4 % self.3\n\n }\n\n fn box_clone(&self) -> Box<dyn Animal> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "src/trait_obj.rs", "rank": 30, "score": 5.086810967314524 }, { "content": "pub struct Crow(i32, i32, i32, i32);\n\n#[derive(Clone)]\n\npub struct Frog(i32, i32, i32, i32, i32);\n\n\n\nimpl Cat {\n\n pub fn new(a: i32) -> Self {\n\n Self(a)\n\n }\n\n}\n\nimpl Dog {\n\n pub fn new(a: i32, b: i32) -> Self {\n\n Self(a, b)\n\n }\n\n}\n\nimpl Duck {\n\n pub fn new(a: i32, b: i32, c: i32) -> Self {\n\n Self(a, b, c)\n\n }\n\n}\n\nimpl Crow {\n", "file_path": "src/trait_obj.rs", "rank": 32, "score": 4.923498767598399 }, { "content": " fn rem(&self) -> i32 {\n\n self.0\n\n }\n\n fn box_clone(&self) -> Box<dyn Animal> {\n\n Box::new(self.clone())\n\n }\n\n}\n\nimpl Animal for Dog {\n\n fn animal_id(&self) -> i32 {\n\n 2\n\n }\n\n fn talk(&self) -> &'static str {\n\n \"Woof!\"\n\n }\n\n fn sum(&self) -> i32 {\n\n self.0 + self.1\n\n }\n\n fn rem(&self) -> i32 {\n\n self.1 % self.0\n\n }\n", "file_path": "src/trait_obj.rs", "rank": 33, "score": 4.860813803158159 }, { "content": "}\n\nimpl Animal for Crow {\n\n fn animal_id(&self) -> i32 {\n\n 4\n\n }\n\n fn talk(&self) -> &'static str {\n\n \"Caw!\"\n\n }\n\n fn sum(&self) -> i32 {\n\n self.0 + self.1 + self.2 + self.3\n\n }\n\n fn rem(&self) -> i32 {\n\n self.3 % self.2\n\n }\n\n fn box_clone(&self) -> Box<dyn Animal> {\n\n Box::new(self.clone())\n\n }\n\n}\n\nimpl Animal for Frog {\n\n fn animal_id(&self) -> i32 {\n", "file_path": "src/trait_obj.rs", "rank": 34, "score": 4.593183347376522 }, { "content": " fn box_clone(&self) -> Box<dyn Animal> {\n\n Box::new(self.clone())\n\n }\n\n}\n\nimpl Animal for Duck {\n\n fn animal_id(&self) -> i32 {\n\n 3\n\n }\n\n fn talk(&self) -> &'static str {\n\n \"Quack!\"\n\n }\n\n fn sum(&self) -> i32 {\n\n self.0 + self.1 + self.2\n\n }\n\n fn rem(&self) -> i32 {\n\n self.2 % self.1\n\n }\n\n fn box_clone(&self) -> Box<dyn Animal> {\n\n Box::new(self.clone())\n\n }\n", "file_path": "src/trait_obj.rs", "rank": 36, "score": 4.269575373970424 }, { "content": " Animal::Cat(_a) => \"Meow!\",\n\n Animal::Dog(_a, _b) => \"Woof!\",\n\n Animal::Duck(_a, _b, _c) => \"Quack!\",\n\n Animal::Crow(_a, _b, _c, _d) => \"Caw!\",\n\n Animal::Frog(_a, _b, _c, _d, _e) => \"Croak!\",\n\n }\n\n }\n\n pub fn sum(&self) -> i32 {\n\n match self {\n\n Animal::Cat(a) => *a,\n\n Animal::Dog(a, b) => a + b,\n\n Animal::Duck(a, b, c) => a + b + c,\n\n Animal::Crow(a, b, c, d) => a + b + c + d,\n\n Animal::Frog(a, b, c, d, e) => a + b + c + d + e,\n\n }\n\n }\n\n pub fn rem(&self) -> i32 {\n\n match self {\n\n Animal::Cat(a) => *a,\n\n Animal::Dog(a, b) => b % a,\n\n Animal::Duck(_a, b, c) => c % b,\n\n Animal::Crow(_a, _b, c, d) => d % c,\n\n Animal::Frog(_a, _b, _c, d, e) => e % d,\n\n }\n\n }\n\n}\n", "file_path": "src/enum_obj.rs", "rank": 37, "score": 2.886264532653559 }, { "content": " Some(i) => i,\n\n None => {\n\n let msg = format!(\"can not find size: {}\", name);\n\n return Err(anyhow::Error::msg(msg));\n\n }\n\n };\n\n bench_vec[i].size = size_s.parse::<u64>()?;\n\n if name == \"cmp_null_void\" {\n\n base_time = bench_vec[i].time;\n\n base_size = bench_vec[i].size;\n\n }\n\n }\n\n }\n\n //\n\n for bench in bench_vec {\n\n bench.oh_time = bench.time - base_time;\n\n bench.oh_size = bench.size - base_size;\n\n }\n\n //\n\n Ok(())\n\n}\n\n\n", "file_path": "xtask/src/shape_benchmark_results.rs", "rank": 38, "score": 2.685443077690263 }, { "content": "| trait_obj^vec^sum^90k | 1203.361 kc | 13.371 kc |\n\n\n\nrustc 1.52.0 (88f19c6da 2021-05-03)\n\n| `name` | `bench` | `per 1k` |\n\n|:-----------------------|------------:|------------:|\n\n| enum_obj^ | 0.024 kc | 0.000 kc |\n\n| enum_obj^vec^id^01k | 4.425 kc | 4.425 kc |\n\n| enum_obj^vec^id^08k | 35.634 kc | 4.454 kc |\n\n| enum_obj^vec^id^90k | 583.796 kc | 6.487 kc |\n\n| enum_obj^vec^rem^01k | 11.453 kc | 11.453 kc |\n\n| enum_obj^vec^rem^08k | 90.914 kc | 11.364 kc |\n\n| enum_obj^vec^rem^90k | 1182.439 kc | 13.138 kc |\n\n| enum_obj^vec^sum^01k | 10.670 kc | 10.670 kc |\n\n| enum_obj^vec^sum^08k | 79.980 kc | 9.998 kc |\n\n| enum_obj^vec^sum^90k | 900.310 kc | 10.003 kc |\n\n| trait_obj^ | 0.024 kc | 0.000 kc |\n\n| trait_obj^vec^id^01k | 6.062 kc | 6.062 kc |\n\n| trait_obj^vec^id^08k | 48.345 kc | 6.043 kc |\n\n| trait_obj^vec^id^90k | 546.009 kc | 6.067 kc |\n\n| trait_obj^vec^rem^01k | 10.162 kc | 10.162 kc |\n\n| trait_obj^vec^rem^08k | 80.179 kc | 10.022 kc |\n\n| trait_obj^vec^rem^90k | 1280.631 kc | 14.229 kc |\n\n| trait_obj^vec^sum^01k | 8.413 kc | 8.413 kc |\n\n| trait_obj^vec^sum^08k | 66.099 kc | 8.262 kc |\n\n| trait_obj^vec^sum^90k | 1182.313 kc | 13.137 kc |\n", "file_path": "README.md", "rank": 39, "score": 2.2634725716355213 }, { "content": "rustc 1.53.0 (53cb7b09b 2021-06-17)\n\n| `name` | `bench` | `per 1k` |\n\n|:-----------------------|------------:|------------:|\n\n| enum_obj^ | 0.024 kc | 0.000 kc |\n\n| enum_obj^vec^id^01k | 4.358 kc | 4.358 kc |\n\n| enum_obj^vec^id^08k | 35.195 kc | 4.399 kc |\n\n| enum_obj^vec^id^90k | 604.227 kc | 6.714 kc |\n\n| enum_obj^vec^rem^01k | 11.488 kc | 11.488 kc |\n\n| enum_obj^vec^rem^08k | 91.363 kc | 11.420 kc |\n\n| enum_obj^vec^rem^90k | 1198.158 kc | 13.313 kc |\n\n| enum_obj^vec^sum^01k | 10.501 kc | 10.501 kc |\n\n| enum_obj^vec^sum^08k | 60.464 kc | 7.558 kc |\n\n| enum_obj^vec^sum^90k | 930.674 kc | 10.341 kc |\n\n| trait_obj^ | 0.024 kc | 0.000 kc |\n\n| trait_obj^vec^id^01k | 7.092 kc | 7.092 kc |\n\n| trait_obj^vec^id^08k | 56.413 kc | 7.052 kc |\n\n| trait_obj^vec^id^90k | 637.158 kc | 7.080 kc |\n\n| trait_obj^vec^rem^01k | 10.077 kc | 10.077 kc |\n\n| trait_obj^vec^rem^08k | 80.625 kc | 10.078 kc |\n\n| trait_obj^vec^rem^90k | 1364.740 kc | 15.164 kc |\n\n| trait_obj^vec^sum^01k | 8.369 kc | 8.369 kc |\n\n| trait_obj^vec^sum^08k | 66.302 kc | 8.288 kc |\n", "file_path": "README.md", "rank": 40, "score": 2.2634725716355213 }, { "content": " let _r = cmp_polymorphism::sum_rem_enum_objs(&vec);\n\n })\n\n });\n\n }\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_enum_objs(criterion::black_box(8000));\n\n c.bench_function(\"enum_obj^vec^id^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^sum^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^rem^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_rem_enum_objs(&vec);\n", "file_path": "benches/bench-enum-obj.rs", "rank": 41, "score": 2.2168329489648326 }, { "content": " })\n\n });\n\n }\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_enum_objs(criterion::black_box(90000));\n\n c.bench_function(\"enum_obj^vec^id^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^sum^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^rem^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_rem_enum_objs(&vec);\n\n })\n", "file_path": "benches/bench-enum-obj.rs", "rank": 42, "score": 2.209626980966557 }, { "content": " let _r = cmp_polymorphism::sum_rem_trait_objs(&vec);\n\n })\n\n });\n\n }\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_trait_objs(criterion::black_box(8000));\n\n c.bench_function(\"trait_obj^vec^id^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^sum^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^rem^08k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_rem_trait_objs(&vec);\n", "file_path": "benches/bench-trait-obj.rs", "rank": 43, "score": 2.18758474984341 }, { "content": " })\n\n });\n\n }\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_trait_objs(criterion::black_box(90000));\n\n c.bench_function(\"trait_obj^vec^id^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^sum^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^rem^90k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_rem_trait_objs(&vec);\n\n })\n", "file_path": "benches/bench-trait-obj.rs", "rank": 44, "score": 2.176478279198126 }, { "content": "use criterion::{criterion_group, criterion_main, Criterion};\n\nuse criterion_cycles_per_byte::CyclesPerByte;\n\n\n", "file_path": "benches/bench-trait-obj.rs", "rank": 45, "score": 2.1231756995416484 }, { "content": "use criterion::{criterion_group, criterion_main, Criterion};\n\nuse criterion_cycles_per_byte::CyclesPerByte;\n\n\n", "file_path": "benches/bench-enum-obj.rs", "rank": 46, "score": 2.1231756995416484 }, { "content": " c.bench_function(\"enum_obj^\", |b| {\n\n b.iter(|| {\n\n let _r = process_one(criterion::black_box(10));\n\n })\n\n });\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_enum_objs(criterion::black_box(1000));\n\n c.bench_function(\"enum_obj^vec^id^01k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^sum^01k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_enum_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"enum_obj^vec^rem^01k\", |b| {\n\n b.iter(|| {\n", "file_path": "benches/bench-enum-obj.rs", "rank": 47, "score": 2.0892040095068207 }, { "content": " c.bench_function(\"trait_obj^\", |b| {\n\n b.iter(|| {\n\n let _r = process_one(criterion::black_box(10));\n\n })\n\n });\n\n //\n\n {\n\n let vec = cmp_polymorphism::create_trait_objs(criterion::black_box(1000));\n\n c.bench_function(\"trait_obj^vec^id^01k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_id_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^sum^01k\", |b| {\n\n b.iter(|| {\n\n let _r = cmp_polymorphism::sum_sum_trait_objs(&vec);\n\n })\n\n });\n\n c.bench_function(\"trait_obj^vec^rem^01k\", |b| {\n\n b.iter(|| {\n", "file_path": "benches/bench-trait-obj.rs", "rank": 48, "score": 2.043429957810487 }, { "content": "//\n\n// ref)\n\n// https://github.com/matklad/cargo-xtask\n\n//\n\nmod shape_benchmark_results;\n\n\n", "file_path": "xtask/src/main.rs", "rank": 49, "score": 1.8147130784616143 }, { "content": " });\n\n }\n\n}\n\n\n\ncriterion_group!(\n\n name = benches;\n\n config = Criterion::default().with_measurement(CyclesPerByte)\n\n .warm_up_time(std::time::Duration::from_millis(300))\n\n .measurement_time(std::time::Duration::from_millis(1500));\n\n targets = criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/bench-enum-obj.rs", "rank": 50, "score": 1.1840814788046088 }, { "content": " });\n\n }\n\n}\n\n\n\ncriterion_group!(\n\n name = benches;\n\n config = Criterion::default().with_measurement(CyclesPerByte)\n\n .warm_up_time(std::time::Duration::from_millis(300))\n\n .measurement_time(std::time::Duration::from_millis(1500));\n\n targets = criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/bench-trait-obj.rs", "rank": 51, "score": 1.1840814788046088 }, { "content": "mod test {\n\n use cmp_polymorphism::enum_obj;\n\n use cmp_polymorphism::trait_obj;\n\n //\n\n #[test]\n\n fn test_size() {\n\n assert_eq!(std::mem::size_of::<enum_obj::Animal>(), 24);\n\n assert_eq!(std::mem::size_of::<trait_obj::Cat>(), 4);\n\n assert_eq!(std::mem::size_of::<trait_obj::Dog>(), 8);\n\n assert_eq!(std::mem::size_of::<trait_obj::Duck>(), 12);\n\n assert_eq!(std::mem::size_of::<trait_obj::Crow>(), 16);\n\n assert_eq!(std::mem::size_of::<trait_obj::Frog>(), 20);\n\n //\n\n assert_eq!(std::mem::size_of::<Box<enum_obj::Animal>>(), 8);\n\n assert_eq!(std::mem::size_of::<Box<dyn trait_obj::Animal>>(), 16);\n\n assert_eq!(std::mem::size_of::<Box<trait_obj::Cat>>(), 8);\n\n assert_eq!(std::mem::size_of::<Box<trait_obj::Dog>>(), 8);\n\n assert_eq!(std::mem::size_of::<Box<trait_obj::Duck>>(), 8);\n\n assert_eq!(std::mem::size_of::<Box<trait_obj::Crow>>(), 8);\n\n assert_eq!(std::mem::size_of::<Box<trait_obj::Frog>>(), 8);\n\n }\n\n}\n", "file_path": "tests/test.rs", "rank": 52, "score": 0.45243922997937025 } ]
Rust
src/prefab.rs
FrancisMurillo/amethyst-tiled
fc5713a8a41a9c829fb624c8f782393fde07971c
use amethyst::assets::{Asset, AssetStorage, Handle, Loader, PrefabData, ProgressCounter, Source}; use amethyst::ecs::{Component, Entity, Read, ReadExpect, Write, WriteStorage}; use amethyst::renderer::{SpriteSheet, Texture}; use amethyst::Error; use tiled::{Map, Tileset}; use crate::strategy::{CompressedLoad, LoadStrategy, StrategyDesc}; use crate::{load_tileset_inner, Tilesets}; use std::sync::Arc; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; pub enum TileSetPrefab { Handle(Handle<SpriteSheet>), TileSet(Tileset, Arc<dyn Source>), } impl<'a> PrefabData<'a> for TileSetPrefab { type SystemData = ( Write<'a, Tilesets>, Read<'a, AssetStorage<Texture>>, Write<'a, AssetStorage<SpriteSheet>>, ReadExpect<'a, Loader>, ); type Result = Handle<SpriteSheet>; fn add_to_entity( &self, _entity: Entity, _system_data: &mut Self::SystemData, _entities: &[Entity], _children: &[Entity], ) -> Result<Self::Result, Error> { match self { Self::Handle(handle) => Ok(handle.clone()), _ => unreachable!("load_sub_assets should be called before add_to_entity"), } } fn load_sub_assets( &mut self, progress: &mut ProgressCounter, system_data: &mut Self::SystemData, ) -> Result<bool, Error> { let (tilesets, textures, sheets, loader) = system_data; if let Self::TileSet(set, source) = self { match tilesets.get(&set.name) { Some(handle) => *self = Self::Handle(handle), None => { let sheet = match load_tileset_inner(set, source.clone(), loader, progress, textures) { Ok(v) => v, Err(e) => return Err(Error::from_string(format!("{:}", e))), }; let handle = sheets.insert(sheet); tilesets.push(set.name.to_owned(), handle.clone()); *self = Self::Handle(handle); return Ok(true); } } } Ok(false) } } pub enum TileMapPrefab<S: StrategyDesc = CompressedLoad> { Result(S::Result), Map(Map, Arc<dyn Source>), } impl<'a, T: LoadStrategy<'a>> PrefabData<'a> for TileMapPrefab<T> where T::Result: Clone + Component + Asset, { type SystemData = (T::SystemData, WriteStorage<'a, <T as StrategyDesc>::Result>); type Result = (); fn add_to_entity( &self, entity: Entity, system_data: &mut Self::SystemData, _entities: &[Entity], _children: &[Entity], ) -> Result<(), Error> { #[cfg(feature = "profiler")] profile_scope!("add_tilemap_to_entity"); let (_, storage) = system_data; match self { TileMapPrefab::Result(v) => { storage.insert(entity, v.clone())?; Ok(()) } _ => unreachable!("load_sub_assets should be called before add_to_entity"), } } fn load_sub_assets( &mut self, progress: &mut ProgressCounter, system_data: &mut Self::SystemData, ) -> Result<bool, Error> { #[cfg(feature = "profiler")] profile_scope!("load_tilemap_assets"); match self { TileMapPrefab::Map(map, source) => { *self = Self::Result(T::load(map, source.clone(), progress, &mut system_data.0)?); Ok(true) } _ => Ok(false), } } }
use amethyst::assets::{Asset, AssetStorage, Handle, Loader, PrefabData, ProgressCounter, Source}; use amethyst::ecs::{Component, Entity, Read, ReadExpect, Write, WriteStorage}; use amethyst::renderer::{SpriteSheet, Texture}; use amethyst::Error; use tiled::{Map, Tileset}; use crate::strategy::{CompressedLoad, LoadStrategy, StrategyDesc}; use crate::{load_tileset_inner, Tilesets}; use std::sync::Arc; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; pub enum TileSetPrefab { Handle(Handle<SpriteSheet>), TileSet(Tileset, Arc<dyn Source>), } impl<'a> PrefabData<'a> for TileSetPrefab { type SystemData = ( Write<'a, Tilesets>, Read<'a, AssetStorage<Texture>>, Write<'a, AssetStorage<SpriteSheet>>, ReadExpect<'a, Loader>, ); type Result = Handle<SpriteSheet>; fn add_to_entity( &self, _entity: Entity, _system_data: &mut Self::SystemData, _entities: &[Entity], _children: &[Entity], ) -> Result<Self::Result, Error> { match self { Self::Handle(handle) => Ok(handle.clone()), _ => unreachable!("load_sub_assets should be called before add_to_entity"), } } fn load_sub_assets( &mut self, progress: &mut ProgressCounter, system_data: &mut Self::SystemData, ) -> Result<bool, Error> { let (tilesets, textures, sheets, loader) = system_data; if let Self::TileSet(set, source) = self { match tilesets.get(&set.name) { Some(handle) => *self = Self::Handle(handle), None => { let sheet = match load_tileset_inner(set, source.clone(), loader, progress, textures) { Ok(v) => v, Err(e) => return Err(Error::from_string(format!("{:}", e))), }; let handle = sheets.insert(sheet); tilesets.push(set.name.to_owned(), handle.clone()); *self = Self::Handle(handle); return Ok(true); } } } Ok(false) } } pub enum TileMapPrefab<S: StrategyDesc = CompressedLoad> { Result(S::Result), Map(Map, Arc<dyn Source>), } impl<'a, T: LoadStrategy<'a>> PrefabData<'a> for TileMapPrefab<T> where T::Result: Clone + Component + Asset, { type SystemData = (T::SystemData, WriteStorage<'a, <T as StrategyDesc>::Result>); type Result = ();
fn load_sub_assets( &mut self, progress: &mut ProgressCounter, system_data: &mut Self::SystemData, ) -> Result<bool, Error> { #[cfg(feature = "profiler")] profile_scope!("load_tilemap_assets"); match self { TileMapPrefab::Map(map, source) => { *self = Self::Result(T::load(map, source.clone(), progress, &mut system_data.0)?); Ok(true) } _ => Ok(false), } } }
fn add_to_entity( &self, entity: Entity, system_data: &mut Self::SystemData, _entities: &[Entity], _children: &[Entity], ) -> Result<(), Error> { #[cfg(feature = "profiler")] profile_scope!("add_tilemap_to_entity"); let (_, storage) = system_data; match self { TileMapPrefab::Result(v) => { storage.insert(entity, v.clone())?; Ok(()) } _ => unreachable!("load_sub_assets should be called before add_to_entity"), } }
function_block-full_function
[ { "content": "pub fn pack_tileset(set: &Tileset, source: Arc<dyn Source>) -> Result<SpriteSheet, Error> {\n\n let mut sprites = Vec::new();\n\n\n\n for image in &set.images {\n\n sprites.extend(pack_image(\n\n image,\n\n source.clone(),\n\n TileSpec {\n\n width: set.tile_width,\n\n height: set.tile_height,\n\n margin: set.margin,\n\n spacing: set.spacing,\n\n },\n\n )?);\n\n }\n\n\n\n // There is guaranteed to be exactly one resulting sprite sheet\n\n Ok(pack::<SimplePacker>(sprites, 4, ()).remove(0))\n\n}\n\n\n\npub struct TileSpec {\n\n pub width: u32,\n\n pub height: u32,\n\n pub margin: u32,\n\n pub spacing: u32,\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 0, "score": 115146.93061046876 }, { "content": "/// Pack a list of tile sets while paying attention to the first grid id\n\npub fn pack_tileset_vec(sets: &[Tileset], source: Arc<dyn Source>) -> Result<SpriteSheet, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"pack_tileset_vec\");\n\n\n\n let mut sprites = Vec::new();\n\n let tile_size = (sets[0].tile_width, sets[0].tile_height);\n\n\n\n // Add the see through placeholder.\n\n sprites.push(InputSprite {\n\n bytes: vec![0; (tile_size.0 * tile_size.1 * 4) as usize],\n\n dimensions: tile_size,\n\n });\n\n\n\n for set in sets {\n\n for image in &set.images {\n\n sprites.extend(pack_image(\n\n image,\n\n source.clone(),\n\n TileSpec {\n\n width: set.tile_width,\n", "file_path": "src/packing.rs", "rank": 1, "score": 112310.16403276738 }, { "content": "/// Open the image and removes the transparent color\n\npub fn open_image(img: &TileImage, source: Arc<dyn Source>) -> Result<RgbaImage, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"open_image\");\n\n\n\n let bytes = {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"load_image_source\");\n\n\n\n match source.load(&img.source) {\n\n Ok(v) => v,\n\n Err(_) => {\n\n return Err(Error::from_string(format!(\n\n \"Unable to open image path: {:?}\",\n\n &img.source\n\n )));\n\n }\n\n }\n\n };\n\n\n\n let mut image = {\n", "file_path": "src/packing.rs", "rank": 2, "score": 79333.17489135731 }, { "content": "pub fn pack_sparse_tileset_vec(\n\n sets: &[Tileset],\n\n source: Arc<dyn Source>,\n\n usage: &[u32],\n\n) -> Result<SpriteSheet, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"pack_sparse_tileset_vec\");\n\n\n\n let mut sprites = Vec::new();\n\n let tile_size = (sets[0].tile_width, sets[0].tile_height);\n\n\n\n // Add the see through placeholder.\n\n sprites.push(InputSprite {\n\n bytes: vec![0; (tile_size.0 * tile_size.1 * 4) as usize],\n\n dimensions: tile_size,\n\n });\n\n\n\n // Don't load GID 0\n\n let mut tile_index = 1;\n\n\n", "file_path": "src/packing.rs", "rank": 3, "score": 62529.91212764799 }, { "content": "pub fn load_tileset<P: AsRef<Path>>(\n\n path: P,\n\n loader: &Loader,\n\n progress: &mut ProgressCounter,\n\n storage: &AssetStorage<Texture>,\n\n) -> Result<SpriteSheet, Error> {\n\n let tileset = parse_tileset(File::open(&path)?, 1)?;\n\n\n\n load_tileset_inner(\n\n &tileset,\n\n Arc::new(Directory::new(path.as_ref())),\n\n loader,\n\n progress,\n\n storage,\n\n )\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 55642.4369246975 }, { "content": "pub fn load_cached_tileset<P: AsRef<Path>>(\n\n path: P,\n\n loader: &Loader,\n\n progress: &mut ProgressCounter,\n\n storage: &AssetStorage<Texture>,\n\n sprite_sheets: &mut AssetStorage<SpriteSheet>,\n\n tilesets: &Tilesets,\n\n) -> Result<Handle<SpriteSheet>, Error> {\n\n let tileset = parse_tileset(File::open(&path)?, 1)?;\n\n\n\n match tilesets.get(&tileset.name) {\n\n Some(handle) => Ok(handle),\n\n None => {\n\n let sheet = load_tileset_inner(\n\n &tileset,\n\n Arc::new(Directory::new(path.as_ref())),\n\n loader,\n\n progress,\n\n storage,\n\n )?;\n", "file_path": "src/lib.rs", "rank": 5, "score": 53571.70061175292 }, { "content": "pub fn pack_image(\n\n img: &TileImage,\n\n source: Arc<dyn Source>,\n\n spec: TileSpec,\n\n) -> Result<Vec<InputSprite>, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"pack_image\");\n\n\n\n let mut image = open_image(img, source)?;\n\n\n\n let TileSpec {\n\n width,\n\n height,\n\n margin,\n\n spacing,\n\n } = spec;\n\n\n\n let mut sprites = Vec::new();\n\n for y in (margin..image.height() + margin).step_by((height + spacing) as usize) {\n\n for x in (margin..image.width() + margin).step_by((width + spacing) as usize) {\n\n sprites.push(InputSprite {\n\n dimensions: (width, height),\n\n bytes: image.sub_image(x, y, width, height).to_image().into_raw(),\n\n })\n\n }\n\n }\n\n\n\n Ok(sprites)\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 6, "score": 51711.83483122148 }, { "content": "fn main() -> amethyst::Result<()> {\n\n amethyst::Logger::from_config(Default::default())\n\n .level_for(\"amethyst_tiles\", log::LevelFilter::Warn)\n\n .start();\n\n\n\n let app_root = application_root_dir()?;\n\n let assets_directory = app_root.join(\"examples/assets\");\n\n let display_config_path = app_root.join(\"examples/concept/resources/display_config.ron\");\n\n\n\n let game_data = GameDataBuilder::default()\n\n .with_system_desc(PrefabLoaderSystemDesc::<TileMapPrefab>::default(), \"\", &[])\n\n .with_bundle(TransformBundle::new())?\n\n .with_bundle(\n\n InputBundle::<StringBindings>::new()\n\n .with_bindings_from_file(\"examples/concept/resources/input.ron\")?,\n\n )?\n\n .with(CameraMovementSystem::default(), \"movement\", &[])\n\n .with_bundle(\n\n RenderingBundle::<DefaultBackend>::new()\n\n .with_plugin(\n\n RenderToWindow::from_config_path(display_config_path)?.with_clear([1.0; 4]),\n\n )\n\n .with_plugin(RenderTiles2D::<TileGid, FlatEncoder>::default()),\n\n )?;\n\n\n\n let mut game = Application::build(assets_directory, Example)?.build(game_data)?;\n\n game.run();\n\n Ok(())\n\n}\n", "file_path": "examples/concept/main.rs", "rank": 7, "score": 51246.76231300902 }, { "content": "/// Returns the necessary import sprites, the number of tiles within this image, and the number of\n\n/// gids filled by this call.\n\npub fn pack_sparse_image(\n\n img: &TileImage,\n\n source: Arc<dyn Source>,\n\n spec: TileSpec,\n\n first_gid: u32,\n\n usage: &[u32],\n\n) -> Result<(Vec<InputSprite>, u32, usize), Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"pack_sparse_image\");\n\n\n\n let mut image = open_image(img, source)?;\n\n\n\n let TileSpec {\n\n width,\n\n height,\n\n margin,\n\n spacing,\n\n } = spec;\n\n\n\n let grid_width = (image.width() - 2 * margin) / (width + spacing);\n", "file_path": "src/packing.rs", "rank": 8, "score": 49785.84592422577 }, { "content": "pub trait StrategyDesc {\n\n /// The type of output this strategy will produce\n\n type Result;\n\n}\n\n\n", "file_path": "src/strategy.rs", "rank": 9, "score": 42353.077051878776 }, { "content": "fn load_sprite_sheet(\n\n packed: PackedSpriteSheet,\n\n loader: &Loader,\n\n progress: &mut ProgressCounter,\n\n storage: &AssetStorage<Texture>,\n\n) -> SpriteSheet {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"load_sprite_sheet\");\n\n\n\n let sprites = encode::<AmethystOrderedFormat>(&packed, ());\n\n\n\n let (width, height) = packed.dimensions;\n\n\n\n let texture_builder = TextureBuilder::new()\n\n .with_kind(Kind::D2(width, height, 1, 1))\n\n .with_view_kind(ViewKind::D2)\n\n .with_data_width(width)\n\n .with_data_height(height)\n\n .with_sampler_info(SamplerInfo::new(Filter::Nearest, WrapMode::Clamp))\n\n .with_raw_data(packed.bytes, Rgba8Srgb::FORMAT);\n\n\n\n SpriteSheet {\n\n texture: loader.load_from_data(texture_builder.into(), progress, storage),\n\n sprites,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 41989.73315495935 }, { "content": "fn load_tileset_inner(\n\n _tileset: &Tileset,\n\n _source: Arc<dyn Source>,\n\n _loader: &Loader,\n\n _progress: &mut ProgressCounter,\n\n _storage: &AssetStorage<Texture>,\n\n) -> Result<SpriteSheet, Error> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 41735.485967703964 }, { "content": "pub trait LoadStrategy<'a>: StrategyDesc {\n\n /// The data to request when loading a map\n\n type SystemData: SystemData<'a>;\n\n\n\n // Preform the load operation using a given map and source location\n\n fn load(\n\n map: &Map,\n\n source: Arc<dyn Source>,\n\n progress: &mut ProgressCounter,\n\n system_data: &mut Self::SystemData,\n\n ) -> Result<<Self as StrategyDesc>::Result, Error>;\n\n}\n\n\n\n/// A load strategy that does not take steps to compress the tile sets. This is the most efficient\n\n/// approach for maps that use all or near all of the tiles in a tileset.\n\n#[derive(Debug, Copy, Clone, Default)]\n\npub struct FlatLoad<E: CoordinateEncoder = FlatEncoder>(PhantomData<E>);\n\n\n\nimpl<E: CoordinateEncoder> StrategyDesc for FlatLoad<E> {\n\n type Result = TileMap<TileGid, E>;\n", "file_path": "src/strategy.rs", "rank": 12, "score": 36170.1901785634 }, { "content": "/// Get an adjusted path based on a reference\n\nfn shift_path(reference: &str, path: &str) -> String {\n\n let mut path_buf = PathBuf::from(reference);\n\n path_buf.set_file_name(path);\n\n path_buf.to_str().unwrap().to_owned()\n\n}\n", "file_path": "src/format.rs", "rank": 13, "score": 18730.167490056254 }, { "content": "fn collect_gid_usage(map: &Map) -> BTreeSet<u32> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"collect_gid_usage\");\n\n\n\n let mut gids = BTreeSet::new();\n\n for layer in &map.layers {\n\n for row in &layer.tiles {\n\n for tile in row {\n\n gids.insert(tile.gid);\n\n }\n\n }\n\n }\n\n gids\n\n}\n\n\n\n/// Loads a tilemap into memory as a single texture. This is by far the best option for performance\n\n/// when a map is never altered. Keep in mind that this approach will compress all of the maps\n\n/// layers together.\n\n#[derive(Debug, Copy, Clone, Default)]\n\npub struct StaticLoad;\n", "file_path": "src/strategy.rs", "rank": 14, "score": 18044.399830245562 }, { "content": "}\n\n\n\nimpl<'a, E: CoordinateEncoder> LoadStrategy<'a> for FlatLoad<E> {\n\n type SystemData = (\n\n ReadExpect<'a, Loader>,\n\n Read<'a, AssetStorage<Texture>>,\n\n Write<'a, AssetStorage<SpriteSheet>>,\n\n );\n\n\n\n fn load(\n\n map: &Map,\n\n source: Arc<dyn Source>,\n\n progress: &mut ProgressCounter,\n\n system_data: &mut Self::SystemData,\n\n ) -> Result<Self::Result, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"FlatLoad::load\");\n\n\n\n let (loader, storage, sheets) = system_data;\n\n\n", "file_path": "src/strategy.rs", "rank": 17, "score": 24.15391272252209 }, { "content": " type Result = TileMap<TileGid, E>;\n\n}\n\n\n\nimpl<'a, E: CoordinateEncoder> LoadStrategy<'a> for CompressedLoad<E> {\n\n type SystemData = (\n\n ReadExpect<'a, Loader>,\n\n Read<'a, AssetStorage<Texture>>,\n\n Write<'a, AssetStorage<SpriteSheet>>,\n\n );\n\n\n\n fn load(\n\n map: &Map,\n\n source: Arc<dyn Source>,\n\n progress: &mut ProgressCounter,\n\n system_data: &mut Self::SystemData,\n\n ) -> Result<Self::Result, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"CompressedLoad::load\");\n\n\n\n let (loader, storage, sheets) = system_data;\n", "file_path": "src/strategy.rs", "rank": 19, "score": 23.8702375813181 }, { "content": "use amethyst::assets::{AssetStorage, Handle, Loader, ProgressCounter, Source};\n\nuse amethyst::core::math::{Point3, Vector3};\n\nuse amethyst::ecs::{Read, ReadExpect, SystemData, Write};\n\nuse amethyst::renderer::{SpriteSheet, Texture};\n\nuse amethyst::tiles::{CoordinateEncoder, FlatEncoder, MapStorage, TileMap};\n\nuse amethyst::Error;\n\nuse tiled::{Map, Tileset};\n\n\n\nuse crate::packing::{pack_sparse_tileset_vec, pack_tileset_vec};\n\nuse crate::{load_sprite_sheet, TileGid};\n\nuse std::collections::{BTreeSet, HashMap};\n\nuse std::marker::PhantomData;\n\nuse std::sync::Arc;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n", "file_path": "src/strategy.rs", "rank": 21, "score": 18.044139859511606 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse amethyst::assets::{AssetStorage, Directory, Handle, Loader, ProgressCounter, Source};\n\nuse amethyst::core::math::Point3;\n\nuse amethyst::ecs::World;\n\n\n\nuse amethyst::error::Error;\n\nuse amethyst::renderer::rendy::{\n\n hal::image::{Filter, Kind, SamplerInfo, ViewKind, WrapMode},\n\n texture::{pixel::AsPixel, pixel::Rgba8Srgb, TextureBuilder},\n\n};\n\nuse amethyst::renderer::{SpriteSheet, Texture};\n\nuse amethyst::tiles::Tile;\n\nuse sheep::{encode, SpriteSheet as PackedSpriteSheet};\n\nuse tiled::{parse_tileset, Tileset};\n\n\n\n#[cfg(feature = \"profiler\")]\n", "file_path": "src/lib.rs", "rank": 23, "score": 16.10635376904588 }, { "content": "\n\nimpl StrategyDesc for StaticLoad {\n\n type Result = Handle<Texture>;\n\n}\n\n\n\nimpl<'a> LoadStrategy<'a> for StaticLoad {\n\n type SystemData = ();\n\n\n\n fn load(\n\n _map: &Map,\n\n _source: Arc<dyn Source>,\n\n _progress: &mut ProgressCounter,\n\n _system_data: &mut Self::SystemData,\n\n ) -> Result<Self::Result, Error> {\n\n unimplemented!()\n\n }\n\n}\n\n// ), error: Compat { error: ErrorMessage { msg: \"Format Rgba8Srgb is not supported and no suitable conversion found.\" }\n", "file_path": "src/strategy.rs", "rank": 24, "score": 14.95402401187692 }, { "content": " }\n\n\n\n fn import(\n\n &self,\n\n name: String,\n\n source: Arc<dyn Source>,\n\n create_reload: Option<Box<dyn Format<TileMapPrefab<T>>>>,\n\n ) -> Result<FormatValue<TileMapPrefab<T>>, Error> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"import_tiled_format\");\n\n\n\n let (b, m) = source.load_with_metadata(&name)?;\n\n\n\n let mut map = match parse(&b[..]) {\n\n Ok(v) => v,\n\n Err(e) => return Err(Error::new(e)),\n\n };\n\n\n\n for tileset in &mut map.tilesets {\n\n if let TilesetRef::Path(path, gid) = tileset {\n", "file_path": "src/format.rs", "rank": 25, "score": 14.453038938275597 }, { "content": "//! Module to help pack tile sets and convert them into amethyst\n\n\n\nuse amethyst::assets::Source;\n\nuse amethyst::error::Error;\n\nuse amethyst::renderer::sprite::Sprite;\n\nuse image::{load_from_memory, GenericImage, Pixel, Rgba, RgbaImage};\n\nuse sheep::{\n\n pack, Format, InputSprite, Packer, PackerResult, SimplePacker, SpriteAnchor, SpriteData,\n\n SpriteSheet,\n\n};\n\nuse std::sync::Arc;\n\nuse tiled::Image as TileImage;\n\nuse tiled::Tileset;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\npub struct AmethystOrderedFormat;\n\n\n\nimpl Format for AmethystOrderedFormat {\n", "file_path": "src/packing.rs", "rank": 27, "score": 12.769300628850733 }, { "content": "use std::path::PathBuf;\n\nuse std::sync::Arc;\n\n\n\nuse amethyst::assets::{Format, FormatValue, Prefab, SingleFile, Source};\n\nuse amethyst::Error;\n\nuse image::{load_from_memory, DynamicImage, ImageError, RgbaImage};\n\nuse tiled::{parse, parse_tileset, TilesetRef};\n\n\n\nuse crate::prefab::TileMapPrefab;\n\nuse crate::strategy::StrategyDesc;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Format for loading *.tmx and *.tsx files\n\n#[derive(Debug, Copy, Clone)]\n\npub struct TiledFormat;\n\n\n\nimpl<T: 'static> Format<Prefab<T>> for TiledFormat\n\nwhere\n", "file_path": "src/format.rs", "rank": 28, "score": 12.18716707012955 }, { "content": " let handle = sprite_sheets.insert(sheet);\n\n tilesets.push(tileset.name.to_owned(), handle.clone());\n\n\n\n Ok(handle)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Tilesets(Mutex<HashMap<String, Handle<SpriteSheet>>>);\n\n\n\nimpl Tilesets {\n\n pub fn push(&self, set_name: String, handle: Handle<SpriteSheet>) {\n\n self.0.lock().unwrap().insert(set_name, handle);\n\n }\n\n\n\n pub fn get(&self, set_name: &str) -> Option<Handle<SpriteSheet>> {\n\n self.0.lock().unwrap().get(set_name).cloned()\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 29, "score": 11.928827973488751 }, { "content": " let packed = pack_tileset_vec(\n\n &map.tilesets\n\n .iter()\n\n .map(|x| x.unwrap().clone())\n\n .collect::<Vec<Tileset>>()[..],\n\n source,\n\n )?;\n\n\n\n let map_size = Vector3::new(map.width, map.height, map.layers.len() as u32);\n\n let tile_size = Vector3::new(map.tile_width, map.tile_height, 1);\n\n let sheet = load_sprite_sheet(packed, loader, progress, storage);\n\n\n\n let mut tilemap = TileMap::new(map_size, tile_size, Some(sheets.insert(sheet)));\n\n\n\n for layer in &map.layers {\n\n for y in 0..layer.tiles.len() {\n\n for x in 0..layer.tiles[y].len() {\n\n match tilemap.get_mut(&Point3::new(x as u32, y as u32, layer.layer_index)) {\n\n Some(v) => *v = TileGid(layer.tiles[y][x].gid as usize),\n\n None => unreachable!(\"The map file was corrupt\"),\n", "file_path": "src/strategy.rs", "rank": 30, "score": 11.537644544202907 }, { "content": "use amethyst::{\n\n assets::{PrefabLoader, PrefabLoaderSystemDesc},\n\n core::{math::Vector3, Transform, TransformBundle},\n\n ecs::{Entities, Join, Read, ReadStorage, System, WriteStorage},\n\n input::{InputBundle, InputHandler, StringBindings},\n\n prelude::*,\n\n renderer::{\n\n camera::{ActiveCamera, Camera},\n\n types::DefaultBackend,\n\n RenderToWindow, RenderingBundle,\n\n },\n\n tiles::{FlatEncoder, RenderTiles2D},\n\n utils::application_root_dir,\n\n window::ScreenDimensions,\n\n};\n\n\n\nuse tiled_support::{TileGid, TileMapPrefab, TiledFormat};\n\n\n\n#[derive(Default)]\n\npub struct CameraMovementSystem;\n", "file_path": "examples/concept/main.rs", "rank": 31, "score": 9.820234643709911 }, { "content": "use thread_profiler::profile_scope;\n\n\n\nmod format;\n\npub mod packing;\n\nmod prefab;\n\npub mod strategy;\n\n\n\nuse packing::AmethystOrderedFormat;\n\n\n\npub use format::TiledFormat;\n\npub use prefab::*;\n\npub use strategy::{CompressedLoad, FlatLoad, StaticLoad};\n\n\n\n/// The grid id of a tile\n\n#[repr(transparent)]\n\n#[derive(Copy, Clone, Hash, Default)]\n\npub struct TileGid(usize);\n\n\n\nimpl From<usize> for TileGid {\n\n fn from(idx: usize) -> Self {\n", "file_path": "src/lib.rs", "rank": 32, "score": 9.628008814336136 }, { "content": " let tile_usage: Vec<u32> = collect_gid_usage(map).into_iter().collect();\n\n\n\n let mut gid_updater = HashMap::new();\n\n\n\n for (new_index, old_index) in tile_usage.iter().enumerate() {\n\n gid_updater.insert(*old_index, new_index);\n\n }\n\n\n\n let packed = pack_sparse_tileset_vec(\n\n &map.tilesets\n\n .iter()\n\n .map(|x| x.unwrap().clone())\n\n .collect::<Vec<Tileset>>()[..],\n\n source,\n\n &tile_usage[..],\n\n )?;\n\n\n\n let map_size = Vector3::new(map.width, map.height, map.layers.len() as u32);\n\n let tile_size = Vector3::new(map.tile_width, map.tile_height, 1);\n\n let sheet = load_sprite_sheet(packed, loader, progress, storage);\n", "file_path": "src/strategy.rs", "rank": 33, "score": 8.843215097018675 }, { "content": "# Tiled file format support for Amethyst\n\nThis crate adds prefab loading functionality for Tiled tilemaps into the amethyst game engine.\n\n\n\n## Usage\n\n### Initialization\n\nWhen creating your game, initialize the the prefab loader and include the tile map rendering pass `amethyst_tiles::RenderTiles2D`.\n\n```rust\n\nuse tiled_support::{TileGid, TileMapPrefab};\n\nuse amethyst::tiles::{RenderTiles2D, FlatEncoder};\n\n\n\nlet game_data = GameDataBuilder::default()\n\n .with_system_desc(PrefabLoaderSystemDesc::<TileMapPrefab>::default(), \"\", &[])\n\n .with_bundle(\n\n RenderingBundle::<DefaultBackend>::new()\n\n .with_plugin(RenderTiles2D::<TileGid, FlatEncoder>::default()),\n\n )?;\n\n```\n\n\n\n> **Note:** `FlatEncoder` is mentioned explicitly due to a bug in the default encoder `MortonEncoder2D`. This bug will likely be fixed very soon.\n\n\n\n### Loading a tile map\n\nA tile map can be loaded like any other prefab and added to a texture.\n\n```rust\n\nuse tiled_support::{TiledFormat, TileMapPrefab};\n\n\n\nlet prefab_handle =\n\n world.exec(|loader: PrefabLoader<'_, TileMapPrefab>| {\n\n loader.load(\"prefab/example_map.tmx\", TiledFormat, ())\n\n });\n\n\n\nlet _map_entity = world\n\n .create_entity()\n\n .with(prefab_handle)\n\n .with(Transform::default())\n\n .build();\n\n```\n\n\n\n\n\n\n\n## Features to add:\n\nA list of features I would like to add in the future, but havent had time to do yet.\n\n- [x] Only pack sprites that used in the tile map to save memory and load time spent packing ignored sprites\n\n- [x] Support all image/pixel types (Currently only supports Rgba8) ***Currently all images are converted to Rgba8***\n\n- [x] Use `amethyst::error::Error` everywhere when parsing for consistency\n\n- [ ] Mark flipped tiles so they can be correctly managed by amethyst\n\n- [ ] Support animation sequences via tiles that swap textures periodically\n\n- [ ] Create an easy way to access layer objects stored in tile maps\n\n- [ ] Support image layers\n\n\n\nPlease make an issue if I'm forgetting something important in this list\n\n \n", "file_path": "readme.md", "rank": 34, "score": 8.704693785829875 }, { "content": " #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"pack_tiled_image\");\n\n\n\n let tile_dimensions = match sprites.get(0) {\n\n Some(v) => v.dimensions,\n\n None => {\n\n return vec![PackerResult {\n\n dimensions: (0, 0),\n\n anchors: Vec::new(),\n\n }]\n\n }\n\n };\n\n\n\n let (width, height) = tile_dimensions;\n\n\n\n let mut num = 0;\n\n let mut anchors = Vec::with_capacity(sprites.len());\n\n\n\n for sprite in sprites {\n\n anchors.push(SpriteAnchor {\n", "file_path": "src/packing.rs", "rank": 35, "score": 8.58333461541914 }, { "content": "impl<'s> System<'s> for CameraMovementSystem {\n\n type SystemData = (\n\n Read<'s, ActiveCamera>,\n\n Entities<'s>,\n\n ReadStorage<'s, Camera>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, InputHandler<StringBindings>>,\n\n );\n\n\n\n fn run(&mut self, (active_camera, entities, cameras, mut transforms, input): Self::SystemData) {\n\n let x_move = input.axis_value(\"camera_x\").unwrap();\n\n let y_move = input.axis_value(\"camera_y\").unwrap();\n\n let z_move = input.axis_value(\"camera_z\").unwrap();\n\n let z_move_scale = input.axis_value(\"camera_scale\").unwrap();\n\n\n\n if x_move != 0.0 || y_move != 0.0 || z_move != 0.0 || z_move_scale != 0.0 {\n\n let mut camera_join = (&cameras, &mut transforms).join();\n\n if let Some((_, camera_transform)) = active_camera\n\n .entity\n\n .and_then(|a| camera_join.get(a, &entities))\n", "file_path": "examples/concept/main.rs", "rank": 36, "score": 8.486001234282261 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Format<RgbaImage> for TiledFormat {\n\n fn name(&self) -> &'static str {\n\n \"Rgba Image\"\n\n }\n\n\n\n fn import_simple(&self, bytes: Vec<u8>) -> Result<RgbaImage, Error> {\n\n match load_from_memory(&bytes[..])? {\n\n DynamicImage::ImageRgba8(v) => Ok(v),\n\n _ => Err(Error::from_string(\n\n \"Unable to read non rgba8 images\".to_owned(),\n\n )),\n\n }\n\n }\n\n}\n\n\n\n/// Get an adjusted path based on a reference\n", "file_path": "src/format.rs", "rank": 37, "score": 7.956162899734954 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n Ok(tilemap)\n\n }\n\n}\n\n\n\n/// A version of FlatLoad that tries to save time and memory by skipping unused tiles when\n\n/// packing the sprite sheet and not leaving the unused tiles stored in memory. On the other hand,\n\n/// if most or all of the tiles are used in the map it the regular version will be faster and use a\n\n/// similar amount of memory.\n\n///\n\n/// In random experimentation, this method was ~2x (23.5s -> 12.6s) as fast as FlatLoad to load the\n\n/// example.\n\n#[derive(Debug, Copy, Clone, Default)]\n\npub struct CompressedLoad<E: CoordinateEncoder = FlatEncoder>(PhantomData<E>);\n\n\n\nimpl<E: CoordinateEncoder> StrategyDesc for CompressedLoad<E> {\n", "file_path": "src/strategy.rs", "rank": 38, "score": 7.878579639869073 }, { "content": " let file = shift_path(&name, path);\n\n let source = source.load(&file)?;\n\n\n\n let mut set = parse_tileset(&source[..], *gid)?;\n\n\n\n for image in &mut set.images {\n\n image.source = shift_path(&file, &image.source);\n\n }\n\n\n\n *tileset = TilesetRef::TileSet(set);\n\n }\n\n }\n\n\n\n if let Some(boxed_format) = create_reload {\n\n Ok(FormatValue {\n\n data: TileMapPrefab::Map(map, source.clone()),\n\n reload: Some(Box::new(SingleFile::new(boxed_format, m, name, source))),\n\n })\n\n } else {\n\n Ok(FormatValue::data(TileMapPrefab::Map(map, source)))\n", "file_path": "src/format.rs", "rank": 39, "score": 7.5180424119768245 }, { "content": " TiledFormat: Format<T>,\n\n{\n\n fn name(&self) -> &'static str {\n\n <Self as Format<T>>::name(self)\n\n }\n\n\n\n fn import(\n\n &self,\n\n name: String,\n\n source: Arc<dyn Source>,\n\n _: Option<Box<dyn Format<Prefab<T>>>>,\n\n ) -> Result<FormatValue<Prefab<T>>, Error> {\n\n let value = <Self as Format<T>>::import(self, name, source, None)?;\n\n Ok(FormatValue::data(Prefab::new_main(value.data)))\n\n }\n\n}\n\n\n\nimpl<T: 'static + StrategyDesc> Format<TileMapPrefab<T>> for TiledFormat {\n\n fn name(&self) -> &'static str {\n\n \"Tile Map\"\n", "file_path": "src/format.rs", "rank": 40, "score": 7.340491057839477 }, { "content": " sprites.extend(input_sprites);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"sheep_pack_image\");\n\n\n\n // There is guaranteed to be exactly one resulting sprite sheet\n\n Ok(pack::<SimplePacker>(sprites, 4, ()).remove(0))\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 41, "score": 5.787178158426846 }, { "content": " height: set.tile_height,\n\n margin: set.margin,\n\n spacing: set.spacing,\n\n },\n\n )?);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"sheep_pack_image\");\n\n\n\n // There is guaranteed to be exactly one resulting sprite sheet\n\n Ok(pack::<SimplePacker>(sprites, 4, ()).remove(0))\n\n}\n", "file_path": "src/packing.rs", "rank": 42, "score": 5.425487068397037 }, { "content": " type Data = Vec<Sprite>;\n\n type Options = ();\n\n\n\n fn encode(\n\n dimensions: (u32, u32),\n\n sprites: &[SpriteAnchor],\n\n _options: Self::Options,\n\n ) -> Self::Data {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"encode_amethyst_format\");\n\n\n\n // Fix ordering issues\n\n let mut inputs = sprites.to_vec();\n\n inputs.sort_by_key(|x| x.id);\n\n\n\n let (width, height) = dimensions;\n\n\n\n inputs\n\n .iter()\n\n .map(|anchor| {\n", "file_path": "src/packing.rs", "rank": 43, "score": 5.401496010709112 }, { "content": "\n\n let mut tilemap = TileMap::new(map_size, tile_size, Some(sheets.insert(sheet)));\n\n\n\n for layer in &map.layers {\n\n for y in 0..layer.tiles.len() {\n\n for x in 0..layer.tiles[y].len() {\n\n let tile_ref =\n\n tilemap.get_mut(&Point3::new(x as u32, y as u32, layer.layer_index));\n\n let tile_idx = gid_updater.get(&layer.tiles[y][x].gid);\n\n\n\n match (tile_ref, tile_idx) {\n\n (Some(tile), Some(index)) => *tile = TileGid(*index),\n\n _ => unreachable!(\"The available tiles should not have changed since the start of the function\"),\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(tilemap)\n\n }\n\n}\n\n\n", "file_path": "src/strategy.rs", "rank": 44, "score": 4.732508422524042 }, { "content": " for set in sets {\n\n let mut first_gid = set.first_gid;\n\n\n\n for image in &set.images {\n\n let (input_sprites, len, consumed) = pack_sparse_image(\n\n image,\n\n source.clone(),\n\n TileSpec {\n\n width: set.tile_width,\n\n height: set.tile_height,\n\n margin: set.margin,\n\n spacing: set.spacing,\n\n },\n\n first_gid,\n\n &usage[tile_index..],\n\n )?;\n\n\n\n first_gid += len;\n\n tile_index += consumed;\n\n\n", "file_path": "src/packing.rs", "rank": 45, "score": 4.005803213808747 }, { "content": " #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"load_from_memory\");\n\n\n\n // TODO: Leave images in their original formats and allow amethyst to deal with conversions\n\n // to save memory when possible\n\n load_from_memory(&bytes[..])?.to_rgba()\n\n };\n\n\n\n if let Some(color) = img.transparent_colour {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"apply_transparency\");\n\n\n\n let color = Rgba([color.red, color.green, color.blue, 0]);\n\n\n\n for pixel in image.pixels_mut() {\n\n if pixel.to_rgb() == color.to_rgb() {\n\n *pixel = color;\n\n }\n\n }\n\n }\n\n\n\n Ok(image)\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 46, "score": 3.5820943436409047 }, { "content": " let (pixel_left, pixel_top) = anchor.position;\n\n let (sprite_w, sprite_h) = anchor.dimensions;\n\n Sprite::from_pixel_values(\n\n width, height, sprite_w, sprite_h, pixel_left, pixel_top, [1.0; 2], false,\n\n false,\n\n )\n\n })\n\n .collect()\n\n }\n\n}\n\n\n\n/// A sprite packer that can save time on the packing by assuming all sprites will be the exact same\n\n/// size. Because of this, it can pack everything in a single pass. However, it won't be as easy to\n\n/// view and look at due to all of the sprites being put in a vertical line.\n\npub struct TilePacker;\n\n\n\nimpl Packer for TilePacker {\n\n type Options = ();\n\n\n\n fn pack(sprites: &[SpriteData], _options: Self::Options) -> Vec<PackerResult> {\n", "file_path": "src/packing.rs", "rank": 47, "score": 3.439085509589959 }, { "content": " Self(idx)\n\n }\n\n}\n\n\n\nimpl Tile for TileGid {\n\n fn sprite(&self, _: Point3<u32>, _: &World) -> Option<usize> {\n\n Some(self.0)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 48, "score": 2.1162752642389604 }, { "content": " id: sprite.id,\n\n position: (0, num * height),\n\n dimensions: tile_dimensions,\n\n });\n\n\n\n num += 1;\n\n }\n\n\n\n vec![PackerResult {\n\n dimensions: (width, num * height),\n\n anchors,\n\n }]\n\n }\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 49, "score": 1.8757348420086595 }, { "content": " let grid_height = (image.height() - 2 * margin) / (height + spacing);\n\n\n\n let mut sprites = Vec::new();\n\n let mut consumed_tiles = 0;\n\n\n\n for idx in usage.iter() {\n\n if *idx >= first_gid + grid_width * grid_height {\n\n break;\n\n }\n\n consumed_tiles += 1;\n\n\n\n let x = margin + ((idx - first_gid) % grid_width) * (width + spacing);\n\n let y = margin + ((idx - first_gid) / grid_width) * (height + spacing);\n\n\n\n sprites.push(InputSprite {\n\n dimensions: (width, height),\n\n bytes: image.sub_image(x, y, width, height).to_image().into_raw(),\n\n })\n\n }\n\n\n\n Ok((sprites, grid_width * grid_height, consumed_tiles))\n\n}\n\n\n", "file_path": "src/packing.rs", "rank": 50, "score": 1.6494102023332862 } ]
Rust
crates/github_scbot_database/src/models/auth/external_account/mod.rs
sharingcloud/github-scbot
953ba1ae7f3bb06c37084756458a1ddb53c8fa65
use github_scbot_crypto::JwtUtils; use github_scbot_utils::TimeUtils; use serde::{Deserialize, Serialize}; use crate::{schema::external_account, Result}; mod adapter; mod builder; pub use adapter::{ DummyExternalAccountDbAdapter, ExternalAccountDbAdapter, IExternalAccountDbAdapter, }; use builder::ExternalAccountModelBuilder; #[derive(Debug, Serialize, Deserialize)] pub struct ExternalJwtClaims { pub iat: u64, pub iss: String, } #[derive( Debug, Deserialize, Insertable, Identifiable, Serialize, Queryable, Clone, Default, AsChangeset, PartialEq, Eq, )] #[primary_key(username)] #[table_name = "external_account"] pub struct ExternalAccountModel { pub username: String, pub public_key: String, pub private_key: String, } impl ExternalAccountModel { pub fn builder(username: &str) -> ExternalAccountModelBuilder { ExternalAccountModelBuilder::default(username) } pub fn builder_from_model(model: &Self) -> ExternalAccountModelBuilder { ExternalAccountModelBuilder::from_model(model) } pub fn generate_access_token(&self) -> Result<String> { let now_ts = TimeUtils::now_timestamp(); let claims = ExternalJwtClaims { iat: now_ts, iss: self.username.clone(), }; JwtUtils::create_jwt(&self.private_key, &claims).map_err(Into::into) } } #[cfg(test)] mod tests { use pretty_assertions::assert_eq; use super::*; use crate::{tests::using_test_db, DatabaseError}; #[actix_rt::test] async fn create_and_update() -> Result<()> { using_test_db("test_db_external_account", |_config, pool| async move { let db_adapter = ExternalAccountDbAdapter::new(pool.clone()); let acc = ExternalAccountModel::builder("ext1") .create_or_update(&db_adapter) .await .unwrap(); assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), public_key: String::new(), private_key: String::new(), } ); let acc = ExternalAccountModel::builder("ext1") .private_key("pri") .public_key("pub") .create_or_update(&db_adapter) .await .unwrap(); assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), private_key: "pri".into(), public_key: "pub".into() } ); let acc = ExternalAccountModel::builder("ext1") .public_key("public") .create_or_update(&db_adapter) .await .unwrap(); assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), private_key: "pri".into(), public_key: "public".into() } ); assert_eq!(db_adapter.list().await.unwrap().len(), 1); Ok::<_, DatabaseError>(()) }) .await } }
use github_scbot_crypto::JwtUtils; use github_scbot_utils::TimeUtils; use serde::{Deserialize, Serialize}; use crate::{schema::external_account, Result}; mod adapter; mod builder; pub use adapter::{ DummyExternalAccountDbAdapter, ExternalAccountDbAdapter, IExternalAccountDbAdapter, }; use builder::ExternalAccountModelBuilder; #[derive(Debug, Serialize, Deserialize)] pub struct ExternalJwtClaims { pub iat: u64, pub iss: String, } #[derive( Debug, Deserialize, Insertable, Identifiable, Serialize, Queryable, Clone, Default, AsChangeset, PartialEq, Eq, )] #[primary_key(username)] #[table_name = "external_account"] pub struct ExternalAccountModel { pub username: String, pub public_key: String, pub private_key: String, } impl ExternalAccountModel { pub fn builder(username: &str) -> ExternalAccountModelBuilder { ExternalAccountModelBuilder::default(username) } pub fn builder_from_model(model: &Self) -> ExternalAccountModelBuilder { ExternalAccountModelBuilder::from_model(model) } pub fn generate_access_token(&self) -> Result<String> { let now_ts = TimeUtils::now_timestamp(); let claims = ExternalJwtClaims { iat: now_ts, iss: self.username.clone(), }; JwtUtils::create_jwt(&self.private_key, &claims).map_err(Into::into) } } #[cfg(test)] mod tests { use pretty_assertions::assert_eq; use super::*; use crate::{tests::using_test_db, DatabaseError}; #[actix_rt::test] async fn create_and_update() -> Result<()> { using_test_db("test_db_external_account", |_config, pool| async move { let db_adapter = ExternalAccountDbAdapter::new(pool.clone()); let acc = ExternalAccountModel::builder("ext1") .create_or_update(&db_adapter) .await .unwrap(); assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), public_key: String::new(), private_key: String::new(), } );
assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), private_key: "pri".into(), public_key: "pub".into() } ); let acc = ExternalAccountModel::builder("ext1") .public_key("public") .create_or_update(&db_adapter) .await .unwrap(); assert_eq!( acc, ExternalAccountModel { username: "ext1".into(), private_key: "pri".into(), public_key: "public".into() } ); assert_eq!(db_adapter.list().await.unwrap().len(), 1); Ok::<_, DatabaseError>(()) }) .await } }
let acc = ExternalAccountModel::builder("ext1") .private_key("pri") .public_key("pub") .create_or_update(&db_adapter) .await .unwrap();
assignment_statement
[ { "content": "fn env_to_u64(name: &str, default: u64) -> u64 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 0, "score": 232379.7236178441 }, { "content": "fn env_to_str(name: &str, default: &str) -> String {\n\n env::var(name)\n\n .unwrap_or_else(|_e| default.to_string())\n\n .replace(\"\\\\n\", \"\\n\")\n\n}\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 1, "score": 231833.3818986186 }, { "content": "fn create_pool(base_url: &str, db_name: &str) -> Result<DbPool> {\n\n let url = format!(\"{}/{}\", base_url, db_name);\n\n let manager = ConnectionManager::<PgConnection>::new(&url);\n\n Ok(Pool::builder().build(manager)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 2, "score": 228435.42229683208 }, { "content": "/// Run migrations.\n\npub fn run_migrations(pool: &DbPool) -> Result<()> {\n\n embedded_migrations::run(&*pool.get()?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 3, "score": 222253.9139291045 }, { "content": "fn teardown_test_db(base_url: &str, db_name: &str) -> Result<()> {\n\n let conn = create_postgres_connection(base_url)?;\n\n terminate_connections(&conn, db_name)?;\n\n drop_database(&conn, db_name)\n\n}\n\n\n\n/// Using test database.\n\n#[allow(clippy::missing_panics_doc)]\n\npub async fn using_test_db<F, Fut, E>(db_name: &str, test: F) -> Result<()>\n\nwhere\n\n E: std::fmt::Debug,\n\n F: FnOnce(Config, DbPool) -> Fut,\n\n Fut: Future<Output = core::result::Result<(), E>>,\n\n{\n\n let mut config = Config::from_env();\n\n config.bot_username = \"test-bot\".into();\n\n\n\n let base_url = get_base_url(&config);\n\n teardown_test_db(&base_url, db_name)?;\n\n setup_test_db(&base_url, db_name)?;\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 4, "score": 206169.71108702238 }, { "content": "fn setup_test_db(base_url: &str, db_name: &str) -> Result<()> {\n\n {\n\n let conn = create_postgres_connection(base_url)?;\n\n terminate_connections(&conn, db_name)?;\n\n drop_database(&conn, db_name)?;\n\n create_database(&conn, db_name)?;\n\n }\n\n\n\n {\n\n let conn = create_db_connection(base_url, db_name)?;\n\n diesel_migrations::run_pending_migrations(&conn)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 5, "score": 206169.71108702238 }, { "content": "/// Handle `Ping` command.\n\npub fn handle_ping_command(comment_author: &str) -> Result<CommandExecutionResult> {\n\n let comment = format!(\"**{}** pong!\", comment_author);\n\n Ok(CommandExecutionResult::builder()\n\n .with_action(ResultAction::AddReaction(GhReactionType::Eyes))\n\n .with_action(ResultAction::PostComment(comment))\n\n .build())\n\n}\n\n\n\n/// Handle `Gif` command.\n\npub async fn handle_gif_command(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n search_terms: &str,\n\n) -> Result<CommandExecutionResult> {\n\n Ok(CommandExecutionResult::builder()\n\n .with_action(ResultAction::AddReaction(GhReactionType::Eyes))\n\n .with_action(ResultAction::PostComment(\n\n GifPoster::generate_random_gif_comment(config, api_adapter, search_terms).await?,\n\n ))\n\n .build())\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 6, "score": 201120.2999306415 }, { "content": "/// Establish a connection to a database pool.\n\npub fn establish_pool_connection(config: &Config) -> Result<DbPool> {\n\n ConnectionBuilder::configure(config).build_pool()\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 7, "score": 189517.79125289342 }, { "content": "fn create_db_connection(base_url: &str, db_name: &str) -> Result<PgConnection> {\n\n let url = format!(\"{}/{}\", base_url, db_name);\n\n Ok(PgConnection::establish(&url)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 8, "score": 185427.48050639173 }, { "content": "fn create_postgres_connection(base_url: &str) -> Result<PgConnection> {\n\n let url = format!(\"{}/postgres\", base_url);\n\n Ok(PgConnection::establish(&url)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 9, "score": 181428.30099007802 }, { "content": "fn drop_database(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(r#\"DROP DATABASE IF EXISTS {};\"#, db_name)).execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 10, "score": 177649.03592562003 }, { "content": "fn terminate_connections(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(\n\n r#\"SELECT pg_terminate_backend(pg_stat_activity.pid)\n\n FROM pg_stat_activity\n\n WHERE datname = '{}'\n\n AND pid <> pg_backend_pid();\"#,\n\n db_name\n\n ))\n\n .execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 11, "score": 177649.03592562003 }, { "content": "fn create_database(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(r#\"CREATE DATABASE {};\"#, db_name)).execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 12, "score": 177649.03592562003 }, { "content": "/// Configure application startup.\n\npub fn configure_startup() -> Result<Config> {\n\n dotenv::dotenv().ok();\n\n github_scbot_sentry::eyre::install().ok();\n\n\n\n let config = Config::from_env();\n\n self::logging::configure_logging(&config);\n\n\n\n self::validation::validate_configuration(&config)?;\n\n Ok(config)\n\n}\n", "file_path": "crates/github_scbot_conf/src/lib.rs", "rank": 13, "score": 166163.59090500313 }, { "content": "/// Get uninitialized client.\n\npub fn get_uninitialized_client() -> Result<Octocrab> {\n\n Octocrab::builder().build().map_err(ApiError::from)\n\n}\n\n\n\nasync fn get_authentication_credentials(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n) -> Result<String> {\n\n if config.github_api_token.is_empty() {\n\n create_installation_access_token(config, api_adapter).await\n\n } else {\n\n Ok(config.github_api_token.clone())\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 14, "score": 163352.95503604616 }, { "content": "/// Initialize command line.\n\npub fn initialize_command_line() -> eyre::Result<()> {\n\n // Prepare startup\n\n let config = configure_startup()?;\n\n\n\n async fn sync(config: Config, cmd: SubCommand, no_input: bool) -> eyre::Result<()> {\n\n let pool = establish_pool_connection(&config)?;\n\n run_migrations(&pool)?;\n\n\n\n let db_adapter = DatabaseAdapter::new(pool);\n\n let api_adapter = GithubAPIAdapter::new(config.clone());\n\n let redis_adapter = RedisAdapter::new(&config.redis_address);\n\n let ctx = CommandContext {\n\n config,\n\n db_adapter: Box::new(db_adapter),\n\n api_adapter: Box::new(api_adapter),\n\n redis_adapter: Box::new(redis_adapter),\n\n no_input,\n\n };\n\n\n\n cmd.execute(ctx).await\n", "file_path": "crates/github_scbot_cli/src/lib.rs", "rank": 15, "score": 163352.95503604616 }, { "content": "/// Validate configuration.\n\npub fn validate_configuration(config: &Config) -> Result<()> {\n\n validate_env_vars(config)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // RSA key specifically generated for these tests.\n\n const SAMPLE_RSA_KEY: &str = r\"\n\n-----BEGIN RSA PUBLIC KEY-----\n\nMIIBigKCAYEAzEWMCHfwGGXxwFDRtHn43opUTW/qMXUoLH7KLpO0meL9jv/TNnI5\n\ntotrx/AbnqpKI50TNpYKfw08C9/WC3SZMuyudBOSShXmDjq1yVOM7p9+gjjw5O78\n\n60WqyiUbxOHOIz4CfgoEr23h9I916SCGzqEVTCHvlDE5qQcdNoHeYdohWUTMGxKs\n\niRMbbHsNvD56zJ8U4AOjOb4J2410ZMx+VQGXeFtZvWYL2EFq1ZiGoo1ZIUZPRImO\n\naxGG0RhzwQdaiktCP7ENjwpr5MBsKlwXFOEb6LdeaCAOqOd05qf4yphzBbLiLK7Y\n\nCZbQ5S3QVQMrn0ycdtFlWt0kAVps9WdB+8izDehuN+pozTm+mjehFsEEj4REGyHu\n\nH3iwEyuGr90vKWEht1Wfvt9C4guBhoLQlSwzgTqNgbHDXiasITmMUwzsgxyASxop\n\n7ih/0aNRO/HfV7rQgFwMrCfPijZJkQHyougprERZJD6U9pPvAIow3G535LpT7mwC\n\n2zEcABBQBwtxAgMBAAE=\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 16, "score": 159081.71915820977 }, { "content": "fn create_app_token(config: &Config) -> Result<String> {\n\n // GitHub App authentication documentation\n\n // https://docs.github.com/en/developers/apps/authenticating-with-github-apps#authenticating-as-a-github-app\n\n\n\n let now_ts = TimeUtils::now_timestamp();\n\n let claims = JwtClaims {\n\n // Issued at time\n\n iat: now_ts,\n\n // Expiration time, 1 minute\n\n exp: now_ts + 60,\n\n // GitHub App Identifier\n\n iss: config.github_app_id,\n\n };\n\n\n\n JwtUtils::create_jwt(&config.github_app_private_key, &claims)\n\n .map_err(|e| ApiError::JWTError(e.to_string()))\n\n}\n\n\n\nasync fn create_installation_access_token(\n\n config: &Config,\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 17, "score": 157571.5646654433 }, { "content": "fn env_to_bool(name: &str, default: bool) -> bool {\n\n env::var(name).map(|e| !e.is_empty()).unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 18, "score": 153539.64199843473 }, { "content": "fn env_to_u16(name: &str, default: u16) -> u16 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 19, "score": 153539.64199843473 }, { "content": "fn env_to_u32(name: &str, default: u32) -> u32 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 20, "score": 153539.64199843473 }, { "content": "/// Check if a signature is valid.\n\npub fn is_valid_signature<'a>(signature: &str, body: &'a [u8], secret: &str) -> bool {\n\n let digest = Sha256::new();\n\n let mut hmac = Hmac::new(digest, secret.as_bytes());\n\n hmac.input(body);\n\n let expected_signature = hmac.result();\n\n\n\n crypto::util::fixed_time_eq(\n\n hex::encode(expected_signature.code()).as_bytes(),\n\n signature.as_bytes(),\n\n )\n\n}\n\n\n\n/// Convert Actix payload to bytes.\n\npub async fn convert_payload_to_bytes(payload: &mut Payload) -> Result<Bytes, Box<dyn Error>> {\n\n let mut body = BytesMut::new();\n\n\n\n while let Some(chunk) = payload.next().await {\n\n body.extend_from_slice(&chunk?);\n\n }\n\n\n", "file_path": "crates/github_scbot_server/src/utils.rs", "rank": 21, "score": 151565.3466943839 }, { "content": "fn parse_event_type<'de, T>(event_type: EventType, body: &'de str) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n serde_json::from_str(body).map_err(|e| ServerError::EventParseError(event_type, e))\n\n}\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 22, "score": 149735.82166168184 }, { "content": "fn trace_call(method: &str) {\n\n debug!(message = \"GitHub API call\", method = method)\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl IAPIAdapter for GithubAPIAdapter {\n\n async fn issue_labels_list(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n ) -> Result<Vec<String>> {\n\n trace_call(\"issue_labels_list\");\n\n\n\n Ok(self\n\n .get_client()\n\n .await?\n\n .issues(owner, name)\n\n .list_labels_for_issue(issue_number)\n\n .send()\n", "file_path": "crates/github_scbot_ghapi/src/adapter/github.rs", "rank": 23, "score": 146169.30187933941 }, { "content": "fn get_base_url(config: &Config) -> String {\n\n config\n\n .test_database_url\n\n .split('/')\n\n .take(3)\n\n .collect::<Vec<_>>()\n\n .join(\"/\")\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 24, "score": 141293.73857094732 }, { "content": "fn env_to_optional_u16(name: &str, default: Option<u16>) -> Option<u16> {\n\n env::var(name)\n\n .map(|e| e.parse::<u16>().map(Some).unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 25, "score": 139985.0292505197 }, { "content": "pub fn configure_debug_handlers(cfg: &mut web::ServiceConfig) {\n\n cfg.service(web::resource(\"panic\").route(web::get().to(panic_route)));\n\n cfg.service(web::resource(\"error\").route(web::get().to(error_route)));\n\n cfg.service(web::resource(\"error-nest\").route(web::get().to(error_route_nest)));\n\n}\n\n\n\nasync fn error_route() -> ActixResult<HttpResponse> {\n\n will_error().await.map_err(WrapEyre::to_http_error)?;\n\n\n\n Ok(HttpResponse::Ok().json(serde_json::json!({\"message\": \"ok\"})))\n\n}\n\n\n\nasync fn error_route_nest() -> ActixResult<HttpResponse> {\n\n will_error_nest().await.map_err(WrapEyre::to_http_error)?;\n\n\n\n Ok(HttpResponse::Ok().json(serde_json::json!({\"message\": \"ok\"})))\n\n}\n\n\n\nasync fn panic_route() -> ActixResult<HttpResponse> {\n\n panic!(\"Oh noes, a panic.\")\n", "file_path": "crates/github_scbot_server/src/debug.rs", "rank": 26, "score": 139483.19809348555 }, { "content": "#[test]\n\nfn test_ping_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_ping_event(fixtures::PING_EVENT_DATA)?,\n\n GhPingEvent {\n\n zen: \"Favor focus over features.\".to_string(),\n\n hook_id: 12_345_678,\n\n repository: Some(GhRepository {\n\n name: \"test-repo\".to_string(),\n\n full_name: \"Example/test-repo\".to_string(),\n\n owner: GhUser {\n\n login: \"Example\".to_string()\n\n }\n\n }),\n\n sender: Some(GhUser {\n\n login: \"Example\".to_string()\n\n })\n\n }\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 27, "score": 127299.37704499977 }, { "content": "#[test]\n\nfn test_review_submitted_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_review_event(fixtures::PULL_REQUEST_REVIEW_SUBMITTED_DATA)?,\n\n GhReviewEvent {\n\n action: GhReviewAction::Submitted,\n\n review: GhReview {\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n submitted_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:25:46Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n state: GhReviewState::ChangesRequested\n\n },\n\n pull_request: GhPullRequest {\n\n number: 1206,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 28, "score": 124943.65610613907 }, { "content": "/// Configure webhook handlers.\n\npub fn configure_webhook_handlers(cfg: &mut web::ServiceConfig) {\n\n cfg.service(web::resource(\"\").route(web::post().to(event_handler)));\n\n}\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 29, "score": 124548.21416063313 }, { "content": "#[test]\n\nfn test_pull_request_opened_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_pull_request_event(fixtures::PULL_REQUEST_OPENED_DATA)?,\n\n GhPullRequestEvent {\n\n action: GhPullRequestAction::Opened,\n\n number: 1214,\n\n pull_request: GhPullRequest {\n\n number: 1214,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n body: Some(\"Ceci est\\nle corps de la \\nPR\".to_string()),\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 30, "score": 122696.13703516294 }, { "content": "#[test]\n\nfn test_issue_comment_created_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_issue_comment_event(fixtures::ISSUE_COMMENT_CREATED_DATA)?,\n\n GhIssueCommentEvent {\n\n action: GhIssueCommentAction::Created,\n\n changes: None,\n\n issue: GhIssue {\n\n number: 1,\n\n title: \"Add the webhook module\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n labels: vec![],\n\n state: GhIssueState::Open,\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-15T15:49:48Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-15T16:13:15Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 31, "score": 122696.13703516294 }, { "content": "#[test]\n\nfn test_pull_request_labeled_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_pull_request_event(fixtures::PULL_REQUEST_LABELED_DATA)?,\n\n GhPullRequestEvent {\n\n action: GhPullRequestAction::Labeled,\n\n number: 1214,\n\n pull_request: GhPullRequest {\n\n number: 1214,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n body: Some(\"This is a PR body\".to_string()),\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:39:42Z\")\n\n .expect(\"bad date\")\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 32, "score": 122696.13703516294 }, { "content": "#[test]\n\nfn test_check_suite_completed_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_check_suite_event(fixtures::CHECK_SUITE_COMPLETED_DATA)?,\n\n GhCheckSuiteEvent {\n\n action: GhCheckSuiteAction::Completed,\n\n check_suite: GhCheckSuite {\n\n id: 12_345_678,\n\n head_branch: \"head-branch\".to_string(),\n\n head_sha: \"12345678123456781234567812345678\".to_string(),\n\n status: GhCheckStatus::Completed,\n\n conclusion: Some(GhCheckConclusion::Failure),\n\n pull_requests: vec![GhPullRequestShort {\n\n number: 1214,\n\n head: GhBranchShort {\n\n reference: \"head-branch\".to_string(),\n\n sha: \"12345678123456781234567812345678\".to_string(),\n\n },\n\n base: GhBranchShort {\n\n reference: \"stable\".to_string(),\n\n sha: \"12345678123456781234567812345678\".to_string(),\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 33, "score": 122696.13703516294 }, { "content": "/// Handle `Help` command.\n\npub fn handle_help_command(\n\n config: &Config,\n\n comment_author: &str,\n\n) -> Result<CommandExecutionResult> {\n\n let comment = format!(\n\n \"Hello **{}** ! I am a GitHub helper bot ! :robot:\\n\\\n\n You can ping me with a command in the format: `{} <command> (<arguments>)`\\n\\\n\n \\n\\\n\n Supported commands:\\n\\\n\n - `noqa+`: _Skip QA validation_\\n\\\n\n - `noqa-`: _Enable QA validation_\\n\\\n\n - `qa+`: _Mark QA as passed_\\n\\\n\n - `qa-`: _Mark QA as failed_\\n\\\n\n - `qa?`: _Mark QA as waiting_\\n\\\n\n - `nochecks+`: _Skip checks validation_\\n\\\n\n - `nochecks-`: _Enable checks validation_\\n\\\n\n - `automerge+`: _Enable auto-merge for this PR (once all checks pass)_\\n\\\n\n - `automerge-`: _Disable auto-merge for this PR_\\n\\\n\n - `lock+ <reason?>`: _Lock a pull-request (block merge)_\\n\\\n\n - `lock- <reason?>`: _Unlock a pull-request (unblock merge)_\\n\\\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 34, "score": 116851.2932836271 }, { "content": "/// Handle `AdminHelp` command.\n\npub fn handle_admin_help_command(\n\n config: &Config,\n\n comment_author: &str,\n\n) -> Result<CommandExecutionResult> {\n\n let comment = format!(\n\n \"Hello **{}** ! I am a GitHub helper bot ! :robot:\\n\\\n\n You can ping me with a command in the format: `{} <command> (<arguments>)`\\n\\\n\n \\n\\\n\n Supported admin commands:\\n\\\n\n - `admin-help`: _Show this comment_\\n\\\n\n - `admin-enable`: _Enable me on a pull request with manual interaction_\\n\\\n\n - `admin-disable`: _Disable me on a pull request with manual interaction_\\n\\\n\n - `admin-set-default-needed-reviewers <count>`: _Set default needed reviewers count for this repository_\\n\\\n\n - `admin-set-default-merge-strategy <merge|squash|rebase>`: _Set default merge strategy for this repository_\\n\\\n\n - `admin-set-default-pr-title-regex <regex?>`: _Set default PR title validation regex for this repository_\\n\\\n\n - `admin-set-default-automerge+`: _Set automerge enabled for this repository_\\n\\\n\n - `admin-set-default-automerge-`: _Set automerge disabled for this repository_\\n\\\n\n - `admin-set-default-qa-status+`: _Enable QA validation by default for this repository_\\n\\\n\n - `admin-set-default-qa-status-`: _Disable QA validation by default for this repository_\\n\\\n\n - `admin-set-default-checks-status+`: _Enable checks validation by default for this repository_\\n\\\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 35, "score": 114978.69004902066 }, { "content": "pub fn configure_logging(config: &Config) {\n\n LogTracer::init().expect(\"Unable to setup log tracer.\");\n\n\n\n let log_config = std::env::var(\"RUST_LOG\").unwrap_or_else(|_| DEFAULT_ENV_CONFIG.to_string());\n\n\n\n if config.logging_use_bunyan {\n\n let app_name = concat!(env!(\"CARGO_PKG_NAME\"), \"-\", env!(\"CARGO_PKG_VERSION\")).to_string();\n\n let layer = BunyanFormattingLayer::new(app_name, std::io::stdout);\n\n let subscriber = Registry::default()\n\n .with(EnvFilter::from_str(&log_config).expect(\"Bad log configuration\"))\n\n .with(JsonStorageLayer)\n\n .with(layer);\n\n tracing::subscriber::set_global_default(subscriber).unwrap();\n\n } else {\n\n let subscriber = tracing_subscriber::fmt().finish();\n\n tracing::subscriber::set_global_default(subscriber).unwrap();\n\n }\n\n}\n", "file_path": "crates/github_scbot_conf/src/logging.rs", "rank": 36, "score": 109931.22308158173 }, { "content": "fn get_bind_address(config: &Config) -> String {\n\n format!(\"{}:{}\", config.server_bind_ip, config.server_bind_port)\n\n}\n\n\n\nasync fn run_bot_server_internal(ip_with_port: String, context: AppContext) -> Result<()> {\n\n let context = Arc::new(context);\n\n let cloned_context = context.clone();\n\n let prometheus = PrometheusMetrics::new(\"api\", Some(\"/metrics\"), None);\n\n\n\n let mut server = HttpServer::new(move || {\n\n let mut app = App::new()\n\n .data(context.clone())\n\n .wrap(prometheus.clone())\n\n .wrap(Sentry::new())\n\n .wrap(Logger::default())\n\n .wrap(TracingLogger)\n\n .service(\n\n web::scope(\"/external\")\n\n .wrap(HttpAuthentication::bearer(jwt_auth_validator))\n\n .wrap(Cors::permissive())\n", "file_path": "crates/github_scbot_server/src/server.rs", "rank": 37, "score": 109278.96885027364 }, { "content": "fn validate_env_vars(config: &Config) -> Result<()> {\n\n #[inline]\n\n fn _missing(error: &mut String, name: &str) {\n\n error.push('\\n');\n\n error.push_str(&format!(\" - Missing env. var.: {}\", name));\n\n }\n\n\n\n #[inline]\n\n fn _invalid_key(error: &mut String, name: &str) {\n\n error.push('\\n');\n\n error.push_str(&format!(\" - Invalid private key: {}\", name));\n\n }\n\n\n\n let mut error = String::new();\n\n\n\n // Check server configuration\n\n if config.server_bind_ip.is_empty() {\n\n _missing(&mut error, \"BOT_SERVER_BIND_IP\");\n\n }\n\n if config.server_bind_port == 0 {\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 38, "score": 108389.20576335947 }, { "content": "/// Captures an [`eyre::Report`].\n\n///\n\n/// This will capture an eyre report as a sentry event if a\n\n/// [`sentry::Client`](../../struct.Client.html) is initialised, otherwise it will be a\n\n/// no-op. The event is dispatched to the thread-local hub, with semantics as described in\n\n/// [`Hub::current`].\n\n///\n\n/// See [module level documentation](index.html) for more information.\n\n///\n\n/// [`eyre::Report`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n\npub fn capture_eyre(e: &eyre::Report) -> Uuid {\n\n Hub::with_active(|hub| hub.capture_eyre(e))\n\n}\n\n\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 39, "score": 104679.19283843183 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct JwtClaims {\n\n iat: u64,\n\n exp: u64,\n\n iss: u64,\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 40, "score": 102953.53913034755 }, { "content": "struct ConnectionBuilder {\n\n database_url: String,\n\n pool_size: u32,\n\n}\n\n\n\nimpl ConnectionBuilder {\n\n fn configure(config: &Config) -> Self {\n\n Self {\n\n database_url: config.database_url.clone(),\n\n pool_size: config.database_pool_size,\n\n }\n\n }\n\n\n\n fn build_pool(self) -> Result<DbPool> {\n\n let manager = ConnectionManager::<PgConnection>::new(&self.database_url);\n\n Ok(Pool::builder().max_size(self.pool_size).build(manager)?)\n\n }\n\n}\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 41, "score": 102672.9982619859 }, { "content": "fn get_tag_attr(attr: &Attribute) -> Result<Ident> {\n\n let list: Meta = attr.parse_args()?;\n\n let ident = list.path().get_ident().expect(\"missing ident\");\n\n Ok(Ident::new(&format!(\"{}\", ident), Span::call_site()))\n\n}\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 42, "score": 102127.4991556925 }, { "content": "#[async_trait]\n\npub trait IRepositoryDbAdapter {\n\n /// Creates a new repository.\n\n async fn create(&self, entry: RepositoryCreation) -> Result<RepositoryModel>;\n\n /// Lists available repositories.\n\n async fn list(&self) -> Result<Vec<RepositoryModel>>;\n\n /// Gets repository from ID.\n\n async fn get_from_id(&self, id: i32) -> Result<RepositoryModel>;\n\n /// Gets repository from owner and name.\n\n async fn get_from_owner_and_name(&self, owner: &str, name: &str) -> Result<RepositoryModel>;\n\n /// Updates repository.\n\n async fn update(&self, entry: &mut RepositoryModel, update: RepositoryUpdate) -> Result<()>;\n\n}\n\n\n\n/// Concrete repository DB adapter.\n\npub struct RepositoryDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl RepositoryDbAdapter {\n\n /// Creates a new repository DB adapter.\n", "file_path": "crates/github_scbot_database/src/models/repository/adapter.rs", "rank": 43, "score": 101806.51911089246 }, { "content": "#[async_trait]\n\npub trait IReviewDbAdapter {\n\n /// Creates a new review.\n\n async fn create(&self, entry: ReviewCreation) -> Result<ReviewModel>;\n\n /// Lists available reviews.\n\n async fn list(&self) -> Result<Vec<ReviewModel>>;\n\n /// Lists reviews from pull request ID.\n\n async fn list_from_pull_request_id(&self, pull_request_id: i32) -> Result<Vec<ReviewModel>>;\n\n /// Lists reviews from pull request and username.\n\n async fn get_from_pull_request_and_username(\n\n &self,\n\n repository: &RepositoryModel,\n\n pull_request: &PullRequestModel,\n\n username: &str,\n\n ) -> Result<ReviewModel>;\n\n /// Removes an existing review.\n\n async fn remove(&self, entry: ReviewModel) -> Result<()>;\n\n /// Removes all existing reviews for a pull request ID.\n\n async fn remove_all_for_pull_request(&self, pull_request_id: i32) -> Result<()>;\n\n /// Update.\n\n async fn update(&self, entry: &mut ReviewModel, update: ReviewUpdate) -> Result<()>;\n", "file_path": "crates/github_scbot_database/src/models/review/adapter.rs", "rank": 44, "score": 101806.51911089246 }, { "content": "fn validate_api_credentials(config: &Config) -> Result<(), ApiConfigError> {\n\n // Check token first\n\n if config.github_api_token.is_empty() {\n\n match validate_github_app_config(config) {\n\n // If private key is missing, you might want to use token instead.\n\n Err(ApiConfigError::MissingPrivateKey) => Err(ApiConfigError::MissingToken),\n\n res => res,\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 45, "score": 100539.94562682728 }, { "content": "#[proc_macro_derive(SCGetter, attributes(get, get_ref, get_as, get_try_from))]\n\npub fn add_scgetter(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let name = &ast.ident;\n\n let generics = &ast.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n if let Data::Struct(DataStruct { ref fields, .. }) = ast.data {\n\n let generated = fields\n\n .iter()\n\n .filter_map(|field| {\n\n if has_tag(field.attrs.iter(), \"get_ref\") {\n\n let field_name = field.clone().ident.unwrap();\n\n let ty = field.ty.clone();\n\n let fn_name = Ident::new(&format!(\"{}\", field_name), Span::call_site());\n\n let doc = field.attrs.iter().filter(|v| {\n\n v.parse_meta()\n\n .map(|meta| meta.path().is_ident(\"doc\"))\n\n .unwrap_or(false)\n\n });\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 46, "score": 100234.65097193714 }, { "content": "#[async_trait]\n\npub trait IAccountDbAdapter {\n\n /// Creates a new account.\n\n async fn create(&self, entry: AccountModel) -> Result<AccountModel>;\n\n /// Gets account from username.\n\n async fn get_from_username(&self, username: &str) -> Result<AccountModel>;\n\n /// Lists available accounts.\n\n async fn list(&self) -> Result<Vec<AccountModel>>;\n\n /// Lists available admin accounts.\n\n async fn list_admin_accounts(&self) -> Result<Vec<AccountModel>>;\n\n /// Removes a specific account.\n\n async fn remove(&self, entry: AccountModel) -> Result<()>;\n\n /// Saves and updates a specific account.\n\n async fn save(&self, entry: &mut AccountModel) -> Result<()>;\n\n}\n\n\n\n/// Concrete account DB adapter.\n\npub struct AccountDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/models/auth/account/adapter.rs", "rank": 47, "score": 100101.04682524262 }, { "content": "#[async_trait]\n\npub trait IHistoryWebhookDbAdapter {\n\n /// Creates a new history webhook entry.\n\n async fn create(&self, entry: HistoryWebhookCreation) -> Result<HistoryWebhookModel>;\n\n /// Lists existing history webhook entries.\n\n async fn list(&self) -> Result<Vec<HistoryWebhookModel>>;\n\n /// Lists existing history webhook entries for repository.\n\n async fn list_from_repository_id(&self, repository_id: i32)\n\n -> Result<Vec<HistoryWebhookModel>>;\n\n /// Removes all history webhook entries.\n\n async fn remove_all(&self) -> Result<()>;\n\n}\n\n\n\n/// Concrete history webhook DB adapter.\n\npub struct HistoryWebhookDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl HistoryWebhookDbAdapter {\n\n /// Creates a new history webhook DB adapter.\n\n pub fn new(pool: DbPool) -> Self {\n", "file_path": "crates/github_scbot_database/src/models/history/adapter.rs", "rank": 48, "score": 100101.04682524262 }, { "content": "#[async_trait]\n\npub trait IPullRequestDbAdapter {\n\n /// Creates a pull request.\n\n async fn create(&self, entry: PullRequestCreation) -> Result<PullRequestModel>;\n\n /// Fetch status comment ID from a pull request ID.\n\n async fn fetch_status_comment_id(&self, pull_request_id: i32) -> Result<i32>;\n\n /// Lists available pull requests.\n\n async fn list(&self) -> Result<Vec<PullRequestModel>>;\n\n /// Lists available pull requests from a repository path.\n\n async fn list_from_repository_path(&self, path: &str) -> Result<Vec<PullRequestModel>>;\n\n /// Gets an existing pull request from a repository and a pull request number.\n\n async fn get_from_repository_and_number(\n\n &self,\n\n repository: &RepositoryModel,\n\n number: u64,\n\n ) -> Result<PullRequestModel>;\n\n /// Gets an existing pull request from a repository path and a pull request number.\n\n async fn get_from_repository_path_and_number(\n\n &self,\n\n path: &str,\n\n number: u64,\n", "file_path": "crates/github_scbot_database/src/models/pulls/adapter.rs", "rank": 49, "score": 100101.04682524262 }, { "content": "fn validate_github_app_config(config: &Config) -> Result<(), ApiConfigError> {\n\n // Check Private key\n\n if config.github_app_private_key.is_empty() {\n\n Err(ApiConfigError::MissingPrivateKey)\n\n } else {\n\n match JwtUtils::parse_encoding_key(&config.github_app_private_key) {\n\n Err(_) => Err(ApiConfigError::InvalidPrivateKey),\n\n Ok(_) => {\n\n // Check App ID\n\n if config.github_app_id == 0 {\n\n Err(ApiConfigError::MissingAppId)\n\n } else if config.github_app_installation_id == 0 {\n\n Err(ApiConfigError::MissingInstallationId)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 50, "score": 99033.05770553843 }, { "content": "#[async_trait]\n\npub trait IMergeRuleDbAdapter {\n\n /// Creates a new merge rule entry.\n\n async fn create(&self, entry: MergeRuleCreation) -> Result<MergeRuleModel>;\n\n /// Gets a merge rule from branches.\n\n async fn get_from_branches(\n\n &self,\n\n repository: &RepositoryModel,\n\n base_branch: &RuleBranch,\n\n head_branch: &RuleBranch,\n\n ) -> Result<MergeRuleModel>;\n\n /// Lists merge rules from a repository ID.\n\n async fn list_from_repository_id(&self, repository_id: i32) -> Result<Vec<MergeRuleModel>>;\n\n /// Lists existing merge rules.\n\n async fn list(&self) -> Result<Vec<MergeRuleModel>>;\n\n /// Remove a specific merge rule.\n\n async fn remove(&self, entry: MergeRuleModel) -> Result<()>;\n\n /// Update.\n\n async fn update(&self, entry: &mut MergeRuleModel, update: MergeRuleUpdate) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/models/merge_rule/adapter.rs", "rank": 51, "score": 98473.47792102644 }, { "content": "#[async_trait(?Send)]\n\npub trait IAPIAdapter: Send + Sync {\n\n /// List labels from a target issue.\n\n async fn issue_labels_list(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n ) -> Result<Vec<String>>;\n\n /// Replace all labels for a target issue.\n\n async fn issue_labels_replace_all(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n labels: &[String],\n\n ) -> Result<()>;\n\n /// Get user permissions from a repository.\n\n async fn user_permissions_get(\n\n &self,\n\n owner: &str,\n", "file_path": "crates/github_scbot_ghapi/src/adapter/interface.rs", "rank": 52, "score": 98207.8789008602 }, { "content": "/// Database adapter.\n\npub trait IDatabaseAdapter: Send + Sync {\n\n /// Gets account DB adapter.\n\n fn account(&self) -> &dyn IAccountDbAdapter;\n\n /// Gets external account DB adapter.\n\n fn external_account(&self) -> &dyn IExternalAccountDbAdapter;\n\n /// Gets external account right DB adapter.\n\n fn external_account_right(&self) -> &dyn IExternalAccountRightDbAdapter;\n\n /// Gets history webhook DB adapter.\n\n fn history_webhook(&self) -> &dyn IHistoryWebhookDbAdapter;\n\n /// Gets merge rule DB adapter.\n\n fn merge_rule(&self) -> &dyn IMergeRuleDbAdapter;\n\n /// Gets pull request DB adapter.\n\n fn pull_request(&self) -> &dyn IPullRequestDbAdapter;\n\n /// Gets repository DB adapter.\n\n fn repository(&self) -> &dyn IRepositoryDbAdapter;\n\n /// Gets review DB adapter.\n\n fn review(&self) -> &dyn IReviewDbAdapter;\n\n}\n\n\n\n/// Concrete database adapter.\n", "file_path": "crates/github_scbot_database/src/models/adapter.rs", "rank": 53, "score": 98207.85821276292 }, { "content": "#[async_trait]\n\npub trait IExternalAccountDbAdapter {\n\n /// Creates a new external account.\n\n async fn create(&self, entry: ExternalAccountModel) -> Result<ExternalAccountModel>;\n\n /// Gets an external account from username.\n\n async fn get_from_username(&self, username: &str) -> Result<ExternalAccountModel>;\n\n /// Lists available external accounts.\n\n async fn list(&self) -> Result<Vec<ExternalAccountModel>>;\n\n /// Removes a specific external account.\n\n async fn remove(&self, entry: ExternalAccountModel) -> Result<()>;\n\n /// Saves and updates a specific external account.\n\n async fn save(&self, entry: &mut ExternalAccountModel) -> Result<()>;\n\n}\n\n\n\n/// Concrete external account DB adapter.\n\npub struct ExternalAccountDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl ExternalAccountDbAdapter {\n\n /// Creates a new external account DB adapter.\n", "file_path": "crates/github_scbot_database/src/models/auth/external_account/adapter.rs", "rank": 54, "score": 96918.4342179481 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\nstruct ImportExportModel {\n\n repositories: Vec<RepositoryModel>,\n\n pull_requests: Vec<PullRequestModel>,\n\n reviews: Vec<ReviewModel>,\n\n merge_rules: Vec<MergeRuleModel>,\n\n accounts: Vec<AccountModel>,\n\n external_accounts: Vec<ExternalAccountModel>,\n\n external_account_rights: Vec<ExternalAccountRightModel>,\n\n}\n\n\n\n/// Export database models to JSON.\n\npub async fn export_models_to_json<W>(\n\n db_adapter: &dyn IDatabaseAdapter,\n\n writer: &mut W,\n\n) -> Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let model = ImportExportModel {\n\n repositories: db_adapter.repository().list().await?,\n", "file_path": "crates/github_scbot_database/src/import_export/mod.rs", "rank": 55, "score": 95695.60822671014 }, { "content": "#[async_trait]\n\npub trait IExternalAccountRightDbAdapter {\n\n /// Lists available external account rights.\n\n async fn list(&self) -> Result<Vec<ExternalAccountRightModel>>;\n\n /// Lists available external accounts rights for username.\n\n async fn list_rights(&self, username: &str) -> Result<Vec<ExternalAccountRightModel>>;\n\n /// Gets external account right for username on repository.\n\n async fn get_right(\n\n &self,\n\n username: &str,\n\n repository: &RepositoryModel,\n\n ) -> Result<ExternalAccountRightModel>;\n\n /// Adds right to username on repository.\n\n async fn add_right(\n\n &self,\n\n username: &str,\n\n repository: &RepositoryModel,\n\n ) -> Result<ExternalAccountRightModel>;\n\n /// Removes right from username on repository.\n\n async fn remove_right(&self, username: &str, repository: &RepositoryModel) -> Result<()>;\n\n /// Removes all rights from username.\n", "file_path": "crates/github_scbot_database/src/models/auth/external_account_right/adapter.rs", "rank": 56, "score": 94006.83567840765 }, { "content": "/// Build a Sentry request struct from the HTTP request\n\nfn sentry_request_from_http(request: &ServiceRequest, with_pii: bool) -> (Option<String>, Request) {\n\n let transaction = if let Some(name) = request.match_name() {\n\n Some(String::from(name))\n\n } else {\n\n request.match_pattern()\n\n };\n\n\n\n let mut sentry_req = Request {\n\n url: format!(\n\n \"{}://{}{}\",\n\n request.connection_info().scheme(),\n\n request.connection_info().host(),\n\n request.uri()\n\n )\n\n .parse()\n\n .ok(),\n\n method: Some(request.method().to_string()),\n\n headers: request\n\n .headers()\n\n .iter()\n", "file_path": "crates/sentry-actix/src/lib.rs", "rank": 57, "score": 93110.26311237563 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 58, "score": 90368.92285375636 }, { "content": "#[async_trait]\n\npub trait IRedisAdapter: Send + Sync {\n\n /// Tries to lock a resource.\n\n async fn try_lock_resource<'a>(&'a self, name: &str) -> Result<LockStatus<'a>, RedisError>;\n\n /// Checks if resource exists.\n\n async fn has_resource(&self, name: &str) -> Result<bool, RedisError>;\n\n /// Deletes a resource if it exists.\n\n async fn del_resource(&self, name: &str) -> Result<(), RedisError>;\n\n /// Wait for a resource lock, until timeout.\n\n async fn wait_lock_resource<'a>(\n\n &'a self,\n\n name: &str,\n\n timeout_ms: u64,\n\n ) -> Result<LockStatus<'a>, RedisError> {\n\n // Try each 100ms\n\n let mut elapsed_time = 0;\n\n let millis = 100;\n\n let duration = Duration::from_millis(millis);\n\n\n\n loop {\n\n match self.try_lock_resource(name).await? {\n", "file_path": "crates/github_scbot_redis/src/interface.rs", "rank": 59, "score": 88494.15882450892 }, { "content": "fn has_tag<'a, T: Iterator<Item = &'a Attribute>>(mut attribs: T, tag_name: &str) -> bool {\n\n attribs\n\n .find_map(|v| {\n\n let meta = v.parse_meta().expect(\"failed to parse attr meta data\");\n\n if meta.path().is_ident(tag_name) {\n\n Some(meta)\n\n } else {\n\n None\n\n }\n\n })\n\n .is_some()\n\n}\n\n\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 60, "score": 81043.25264681074 }, { "content": "fn get_tag<'a, T: Iterator<Item = &'a Attribute>>(mut attribs: T, tag_name: &str) -> &'a Attribute {\n\n attribs\n\n .find_map(|v| {\n\n let meta = v.parse_meta().expect(\"failed to parse attr meta data\");\n\n if meta.path().is_ident(tag_name) {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 61, "score": 78695.51387240217 }, { "content": "#[derive(FromArgs)]\n\n#[argh(subcommand)]\n\nenum DebugSubCommand {\n\n TestSentry(DebugTestSentryCommand),\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Command for DebugSubCommand {\n\n async fn execute(self, ctx: CommandContext) -> Result<()> {\n\n match self {\n\n Self::TestSentry(sub) => sub.execute(ctx).await,\n\n }\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/commands/debug/mod.rs", "rank": 62, "score": 77402.49589151441 }, { "content": "fn extract_event_from_request(req: &HttpRequest) -> Option<EventType> {\n\n req.headers()\n\n .get(GITHUB_EVENT_HEADER)\n\n .and_then(|x| x.to_str().ok())\n\n .and_then(|x| EventType::try_from(x).ok())\n\n}\n\n\n\npub(crate) async fn event_handler(\n\n req: HttpRequest,\n\n mut payload: web::Payload,\n\n ctx: web::Data<Arc<AppContext>>,\n\n) -> ActixResult<HttpResponse> {\n\n // Route event depending on header\n\n if let Some(event_type) = extract_event_from_request(&req) {\n\n if let Ok(body) = convert_payload_to_string(&mut payload).await {\n\n sentry::configure_scope(|scope| {\n\n scope.set_extra(\"Event type\", event_type.to_str().into());\n\n scope.set_extra(\"Payload\", body.clone().into());\n\n });\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 63, "score": 76773.86686144218 }, { "content": "use github_scbot_conf::Config;\n\nuse github_scbot_database::models::DummyDatabaseAdapter;\n\nuse github_scbot_ghapi::adapter::DummyAPIAdapter;\n\nuse github_scbot_redis::DummyRedisAdapter;\n\n\n\nuse crate::commands::CommandContext;\n\n\n\npub(crate) fn create_test_context() -> CommandContext {\n\n let config = Config::from_env();\n\n let api_adapter = DummyAPIAdapter::new();\n\n let db_adapter = DummyDatabaseAdapter::new();\n\n let redis_adapter = DummyRedisAdapter::new();\n\n\n\n CommandContext {\n\n config,\n\n api_adapter: Box::new(api_adapter),\n\n db_adapter: Box::new(db_adapter),\n\n redis_adapter: Box::new(redis_adapter),\n\n no_input: false,\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/tests/mod.rs", "rank": 64, "score": 73969.3858994478 }, { "content": "mod external;\n\nmod pr_creation;\n\nmod reviews;\n", "file_path": "crates/github_scbot_logic/src/tests/mod.rs", "rank": 65, "score": 73961.77779206322 }, { "content": "//! Adapter\n\n\n\nmod dummy;\n\nmod github;\n\nmod interface;\n\n\n\npub use dummy::DummyAPIAdapter;\n\npub use github::GithubAPIAdapter;\n\npub use interface::{\n\n GhReviewApi, GhReviewStateApi, GifFormat, GifObject, GifResponse, IAPIAdapter, MediaObject,\n\n};\n", "file_path": "crates/github_scbot_ghapi/src/adapter/mod.rs", "rank": 66, "score": 73568.30476304465 }, { "content": "//! Debug commands.\n\n\n\nuse argh::FromArgs;\n\nuse async_trait::async_trait;\n\nuse github_scbot_sentry::eyre::Result;\n\n\n\nuse super::{Command, CommandContext};\n\n\n\nmod test_sentry;\n\n\n\nuse test_sentry::DebugTestSentryCommand;\n\n\n\n/// debug related commands.\n\n#[derive(FromArgs)]\n\n#[argh(subcommand, name = \"debug\")]\n\npub(crate) struct DebugCommand {\n\n #[argh(subcommand)]\n\n inner: DebugSubCommand,\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Command for DebugCommand {\n\n async fn execute(self, ctx: CommandContext) -> Result<()> {\n\n self.inner.execute(ctx).await\n\n }\n\n}\n\n\n\n#[derive(FromArgs)]\n\n#[argh(subcommand)]\n", "file_path": "crates/github_scbot_cli/src/commands/debug/mod.rs", "rank": 67, "score": 71972.97455765157 }, { "content": "//! Webhook tests\n\n\n\nmod fixtures;\n\nmod webhook;\n", "file_path": "crates/github_scbot_server/src/webhook/tests/mod.rs", "rank": 68, "score": 71607.8735431392 }, { "content": "use argh::FromArgs;\n\nuse async_trait::async_trait;\n\nuse github_scbot_sentry::{\n\n eyre::{eyre::eyre, Result},\n\n send_test_event,\n\n};\n\n\n\nuse super::{Command, CommandContext};\n\n\n\n/// send a test message to Sentry.\n\n#[derive(FromArgs)]\n\n#[argh(subcommand, name = \"test-sentry\")]\n\npub(crate) struct DebugTestSentryCommand {\n\n /// custom message, defaults to \"This is a test\".\n\n #[argh(option, short = 'm')]\n\n message: Option<String>,\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Command for DebugTestSentryCommand {\n", "file_path": "crates/github_scbot_cli/src/commands/debug/test_sentry.rs", "rank": 69, "score": 70326.26588200373 }, { "content": " async fn execute(self, ctx: CommandContext) -> Result<()> {\n\n if ctx.config.sentry_url.is_empty() {\n\n Err(eyre!(\"Sentry URL is not configured.\"))\n\n } else {\n\n send_test_event(&ctx.config.sentry_url, self.message).await;\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/commands/debug/test_sentry.rs", "rank": 70, "score": 70308.73744409766 }, { "content": "//! Fixtures\n\n\n\npub const PING_EVENT_DATA: &str = include_str!(\"ping_event.json\");\n\npub const CHECK_SUITE_COMPLETED_DATA: &str = include_str!(\"check_suite_completed.json\");\n\npub const ISSUE_COMMENT_CREATED_DATA: &str = include_str!(\"issue_comment_created.json\");\n\npub const PULL_REQUEST_OPENED_DATA: &str = include_str!(\"pull_request_opened.json\");\n\npub const PULL_REQUEST_LABELED_DATA: &str = include_str!(\"pull_request_labeled.json\");\n\npub const PULL_REQUEST_REVIEW_SUBMITTED_DATA: &str =\n\n include_str!(\"pull_request_review_submitted.json\");\n", "file_path": "crates/github_scbot_server/src/webhook/tests/fixtures/mod.rs", "rank": 71, "score": 69395.51787071723 }, { "content": "#[derive(FromArgs)]\n\n#[argh(description = \"SharingCloud PR Bot\")]\n\nstruct Args {\n\n #[argh(subcommand)]\n\n cmd: Option<SubCommand>,\n\n\n\n /// do not ask for input.\n\n #[argh(switch)]\n\n no_input: bool,\n\n\n\n /// show version.\n\n #[argh(switch)]\n\n version: bool,\n\n}\n\n\n", "file_path": "crates/github_scbot_cli/src/lib.rs", "rank": 72, "score": 67663.91181275327 }, { "content": "ALTER TABLE repository ADD COLUMN default_automerge bool NOT NULL DEFAULT false;\n", "file_path": "migrations/2021-09-20-094351_default-automerge/up.sql", "rank": 73, "score": 66199.82759969358 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct InstallationTokenResponse {\n\n token: String,\n\n expires_at: String,\n\n}\n\n\n\n/// Get an authenticated GitHub client builder.\n\npub async fn get_client_builder(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n) -> Result<OctocrabBuilder> {\n\n let token = get_authentication_credentials(config, api_adapter).await?;\n\n Ok(Octocrab::builder().personal_token(token))\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 74, "score": 65351.634147892386 }, { "content": "fn main() {\n\n if let Err(err) = github_scbot_cli::initialize_command_line() {\n\n eprintln!(\"{}\", format!(\"ERROR: {:?}\", err).red());\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/main.rs", "rank": 75, "score": 63793.901027192696 }, { "content": "/// Hub extension methods for working with [`eyre`].\n\n///\n\n/// [`eyre`]: https://docs.rs/eyre\n\npub trait EyreHubExt {\n\n /// Captures an [`eyre::Report`] on a specific hub.\n\n ///\n\n /// [`eyre::Report`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n\n fn capture_eyre(&self, e: &eyre::Report) -> Uuid;\n\n}\n\n\n\nimpl EyreHubExt for Hub {\n\n fn capture_eyre(&self, e: &eyre::Report) -> Uuid {\n\n let err: &dyn std::error::Error = e.as_ref();\n\n let mut evt = sentry_core::event_from_error(err);\n\n\n\n // Add traceback\n\n if let Some(bt) = e.backtrace() {\n\n if let Some(mut st) = backtrace_to_stacktrace(bt) {\n\n if let Some(client) = self.client() {\n\n process_event_stacktrace(&mut st, client.options());\n\n }\n\n\n\n if let Some(mut exc) = evt.exception.last_mut() {\n\n exc.stacktrace = Some(st);\n\n }\n\n }\n\n }\n\n\n\n self.capture_event(evt)\n\n }\n\n}\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 76, "score": 61550.31018308263 }, { "content": "ALTER TABLE repository ADD COLUMN default_enable_qa BOOL NOT NULL default true;\n", "file_path": "migrations/2021-09-21-081512_skip_checks/up.sql", "rank": 77, "score": 59168.960141274685 }, { "content": "ALTER TABLE repository ADD COLUMN default_enable_checks BOOL NOT NULL default true;\n", "file_path": "migrations/2021-09-21-081512_skip_checks/up.sql", "rank": 78, "score": 59168.960141274685 }, { "content": "ALTER TABLE repository ADD COLUMN manual_interaction bool NOT NULL DEFAULT false;\n", "file_path": "migrations/2021-05-04-142456_manual_interaction/up.sql", "rank": 79, "score": 48118.60140220617 }, { "content": "fn process_error(hub: Arc<Hub>, e: &actix_web::Error) -> Uuid {\n\n process_eyre_report(hub.clone(), e).unwrap_or_else(|| hub.capture_error(e))\n\n}\n\n\n", "file_path": "crates/sentry-actix/src/lib.rs", "rank": 80, "score": 46871.79679590308 }, { "content": "ALTER TABLE repository DROP COLUMN default_automerge;\n", "file_path": "migrations/2021-09-20-094351_default-automerge/down.sql", "rank": 81, "score": 46803.072267222364 }, { "content": "/// Add request data to a Sentry event\n\nfn process_event(mut event: Event<'static>, request: &Request) -> Event<'static> {\n\n // Request\n\n if event.request.is_none() {\n\n event.request = Some(request.clone());\n\n }\n\n\n\n // SDK\n\n if let Some(sdk) = event.sdk.take() {\n\n let mut sdk = sdk.into_owned();\n\n sdk.packages.push(ClientSdkPackage {\n\n name: \"sentry-actix\".into(),\n\n version: env!(\"CARGO_PKG_VERSION\").into(),\n\n });\n\n event.sdk = Some(Cow::Owned(sdk));\n\n }\n\n event\n\n}\n", "file_path": "crates/sentry-actix/src/lib.rs", "rank": 82, "score": 45240.23420798876 }, { "content": "#[cfg(feature = \"eyre\")]\n\nfn process_eyre_report(hub: Arc<Hub>, e: &actix_web::Error) -> Option<Uuid> {\n\n use sentry_eyre::EyreHubExt;\n\n\n\n e.as_error::<WrapEyre>()\n\n .map(|report| hub.capture_eyre(report))\n\n}\n\n\n", "file_path": "crates/sentry-actix/src/lib.rs", "rank": 83, "score": 44488.18320055083 }, { "content": "#[cfg(not(feature = \"eyre\"))]\n\nfn process_eyre_report(_hub: Arc<Hub>, _e: &actix_web::Error) -> Option<Uuid> {\n\n None\n\n}\n\n\n", "file_path": "crates/sentry-actix/src/lib.rs", "rank": 84, "score": 44488.18320055083 }, { "content": "use sentry::{protocol::Event, Hub, Level};\n\n\n\nuse crate::with_sentry_configuration;\n\n\n\npub async fn send_test_event(sentry_url: &str, message: Option<String>) {\n\n with_sentry_configuration(sentry_url, || async {\n\n // Create event\n\n let event = Event {\n\n message: Some(message.unwrap_or_else(|| \"This is a test\".into())),\n\n level: Level::Info,\n\n ..Default::default()\n\n };\n\n\n\n Hub::with_active(|hub| {\n\n let uuid = hub.capture_event(event);\n\n println!(\"Event UUID: {}\", uuid);\n\n uuid\n\n });\n\n\n\n Ok::<(), ()>(())\n\n })\n\n .await\n\n .unwrap()\n\n}\n", "file_path": "crates/github_scbot_sentry/src/debug.rs", "rank": 85, "score": 38946.922778496686 }, { "content": "}\n\n\n\nasync fn will_error() -> Result<(), ServerError> {\n\n Err(ServerError::ThreadpoolError)\n\n}\n\n\n\nasync fn _will_error_nest_api() -> Result<(), ApiError> {\n\n Err(ApiError::MergeError(\"Nope.\".into()))\n\n}\n\n\n\nasync fn _will_error_nest_logic() -> Result<(), LogicError> {\n\n _will_error_nest_api().await.map_err(Into::into)\n\n}\n\n\n\nasync fn will_error_nest() -> Result<(), ServerError> {\n\n _will_error_nest_logic().await.map_err(Into::into)\n\n}\n", "file_path": "crates/github_scbot_server/src/debug.rs", "rank": 86, "score": 38933.564903008 }, { "content": "use actix_web::{web, HttpResponse, Result as ActixResult};\n\nuse github_scbot_ghapi::ApiError;\n\nuse github_scbot_logic::LogicError;\n\nuse github_scbot_sentry::WrapEyre;\n\n\n\nuse crate::ServerError;\n\n\n", "file_path": "crates/github_scbot_server/src/debug.rs", "rank": 87, "score": 38930.673149889415 }, { "content": " assert_eq!(mock.call_count(), 1);\n\n\n\n mock.set_callback(Box::new(|x| x * 4));\n\n assert_eq!(mock.call(2), 8);\n\n assert_eq!(mock.call_count(), 2);\n\n }\n\n\n\n #[test]\n\n fn test_set_callback() {\n\n #[derive(Clone, Debug, PartialEq)]\n\n struct MyStruct {\n\n pub a: String,\n\n pub b: String,\n\n }\n\n\n\n let mut mock: Mock<MyStruct, MyStruct> = Mock::new(Box::new(|mut x| {\n\n x.a = \"Pouet\".to_string();\n\n x\n\n }));\n\n\n", "file_path": "crates/github_scbot_utils/src/tests.rs", "rank": 88, "score": 38586.29197324741 }, { "content": " let struct_test_input = MyStruct {\n\n a: \"1\".to_string(),\n\n b: \"2\".to_string(),\n\n };\n\n let struct_test = MyStruct {\n\n a: \"A\".to_string(),\n\n b: \"B\".to_string(),\n\n };\n\n let struct_test_clone = struct_test.clone();\n\n\n\n mock.set_callback(Box::new(move |_| struct_test_clone.clone()));\n\n assert_eq!(mock.call(struct_test_input), struct_test);\n\n }\n\n}\n", "file_path": "crates/github_scbot_utils/src/tests.rs", "rank": 89, "score": 38584.09738481251 }, { "content": "//! Test utils.\n\n\n\nuse std::sync::RwLock;\n\n\n\n#[derive(Default)]\n\npub(crate) struct MockInternal {\n\n call_count: u64,\n\n}\n\n\n\n/// Simple mock.\n\npub struct Mock<Args, Output> {\n\n internal: RwLock<MockInternal>,\n\n cb: Box<dyn Fn(Args) -> Output + Send + Sync>,\n\n}\n\n\n\nimpl<Args, Output> Mock<Args, Output> {\n\n /// Creates a new mock.\n\n pub fn new(cb: Box<dyn Fn(Args) -> Output + Send + Sync>) -> Self {\n\n Self {\n\n internal: RwLock::new(MockInternal::default()),\n", "file_path": "crates/github_scbot_utils/src/tests.rs", "rank": 90, "score": 38580.99953802752 }, { "content": " self.increment_call_count();\n\n (self.cb)(args)\n\n }\n\n\n\n /// Sets mock response.\n\n pub fn set_callback(&mut self, f: Box<dyn Fn(Args) -> Output + Send + Sync>) {\n\n self.cb = f;\n\n }\n\n}\n\n\n\n#[allow(clippy::module_inception)]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Mock;\n\n\n\n #[test]\n\n fn test_mock() {\n\n let mut mock: Mock<u16, u16> = Mock::new(Box::new(|x| x * 2));\n\n assert_eq!(mock.call(2), 4);\n\n assert!(mock.called());\n", "file_path": "crates/github_scbot_utils/src/tests.rs", "rank": 91, "score": 38577.07846906965 }, { "content": "\n\n let pool = create_pool(&base_url, db_name)?;\n\n let result = test(config, pool).await;\n\n teardown_test_db(&base_url, db_name)?;\n\n\n\n if let Err(e) = result {\n\n panic!(\"Error while executing test: {:?}\", e);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 92, "score": 38574.51351151076 }, { "content": "//! Test utils\n\n\n\nuse std::future::Future;\n\n\n\nuse diesel::{r2d2::ConnectionManager, Connection, PgConnection, RunQueryDsl};\n\nuse github_scbot_conf::Config;\n\nuse r2d2::Pool;\n\n\n\nuse crate::{DbPool, Result};\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 93, "score": 38572.92719295726 }, { "content": " cb,\n\n }\n\n }\n\n\n\n fn increment_call_count(&self) {\n\n self.internal.write().unwrap().call_count += 1;\n\n }\n\n\n\n /// Checks if mock has been called.\n\n pub fn called(&self) -> bool {\n\n self.internal.read().unwrap().call_count > 0\n\n }\n\n\n\n /// Checks mock call count.\n\n pub fn call_count(&self) -> u64 {\n\n self.internal.read().unwrap().call_count\n\n }\n\n\n\n /// Gets mock response.\n\n pub fn call(&self, args: Args) -> Output {\n", "file_path": "crates/github_scbot_utils/src/tests.rs", "rank": 94, "score": 38568.77426124657 }, { "content": "//! External tests\n\n\n\nuse github_scbot_database::{\n\n models::{DatabaseAdapter, ExternalAccountModel, IDatabaseAdapter, RepositoryModel},\n\n tests::using_test_db,\n\n Result,\n\n};\n\n\n\nuse crate::LogicError;\n\n\n\n#[actix_rt::test]\n\nasync fn test_repository_right_validation() -> Result<()> {\n\n using_test_db(\"test_logic_external\", |config, pool| async move {\n\n let db_adapter = DatabaseAdapter::new(pool);\n\n let account = ExternalAccountModel::builder(\"test-ext\")\n\n .generate_keys()\n\n .create_or_update(db_adapter.external_account())\n\n .await?;\n\n let repo = RepositoryModel::builder(&config, \"test\", \"Test\")\n\n .create_or_update(db_adapter.repository())\n", "file_path": "crates/github_scbot_logic/src/tests/external.rs", "rank": 95, "score": 37316.931005838116 }, { "content": " LogicError, Result as LogicResult,\n\n};\n\n\n\nasync fn arrange(\n\n conf: &Config,\n\n db_adapter: &dyn IDatabaseAdapter,\n\n) -> (RepositoryModel, PullRequestModel) {\n\n // Create a repository and a pull request\n\n let repo = RepositoryModel::builder(conf, \"me\", \"TestRepo\")\n\n .create_or_update(db_adapter.repository())\n\n .await\n\n .unwrap();\n\n\n\n let pr = PullRequestModel::builder(&repo, 1, \"me\")\n\n .name(\"PR 1\")\n\n .create_or_update(db_adapter.pull_request())\n\n .await\n\n .unwrap();\n\n\n\n (repo, pr)\n", "file_path": "crates/github_scbot_logic/src/tests/reviews.rs", "rank": 96, "score": 37310.018228457964 }, { "content": " repo,\n\n pr,\n\n 0,\n\n \"me\",\n\n commands,\n\n )\n\n .await?;\n\n\n\n Ok(())\n\n }\n\n\n\n using_test_db(\"test_logic_reviews\", |config, pool| async move {\n\n let db_adapter = DatabaseAdapter::new(pool);\n\n let mut api_adapter = DummyAPIAdapter::new();\n\n let mut redis_adapter = DummyRedisAdapter::new();\n\n api_adapter\n\n .user_permissions_get_response\n\n .set_callback(Box::new(|_| Ok(GhUserPermission::Write)));\n\n\n\n let (mut repo, mut pr) = arrange(&config, &db_adapter).await;\n", "file_path": "crates/github_scbot_logic/src/tests/reviews.rs", "rank": 97, "score": 37309.30867174635 }, { "content": " &repo,\n\n &pr,\n\n &review2,\n\n )\n\n .await?;\n\n\n\n // List reviews\n\n let reviews = pr.reviews(db_adapter.review()).await.unwrap();\n\n assert_eq!(reviews[0].username(), \"me\");\n\n assert_eq!(reviews[1].username(), \"him\");\n\n assert!(!reviews[1].required());\n\n\n\n // Parse comment\n\n parse_and_execute_command(\n\n &config,\n\n &api_adapter,\n\n &db_adapter,\n\n &redis_adapter,\n\n &mut repo,\n\n &mut pr,\n", "file_path": "crates/github_scbot_logic/src/tests/reviews.rs", "rank": 98, "score": 37306.230681556255 }, { "content": "}\n\n\n\n#[actix_rt::test]\n\nasync fn test_review_creation() -> Result<()> {\n\n async fn parse_and_execute_command(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n db_adapter: &dyn IDatabaseAdapter,\n\n redis_adapter: &dyn IRedisAdapter,\n\n repo: &mut RepositoryModel,\n\n pr: &mut PullRequestModel,\n\n command_str: &str,\n\n ) -> LogicResult<()> {\n\n // Parse comment\n\n let commands = CommandParser::parse_commands(config, command_str);\n\n CommandExecutor::execute_commands(\n\n config,\n\n api_adapter,\n\n db_adapter,\n\n redis_adapter,\n", "file_path": "crates/github_scbot_logic/src/tests/reviews.rs", "rank": 99, "score": 37305.880096439454 } ]
Rust
src/git_config/git_config_entry.rs
rashil2000/delta
55287a827e8f2527df938a1b85e23290f8692607
use std::result::Result; use std::str::FromStr; use lazy_static::lazy_static; use regex::Regex; use crate::errors::*; #[derive(Clone, Debug)] pub enum GitConfigEntry { Style(String), GitRemote(GitRemoteRepo), } #[derive(Clone, Debug, PartialEq)] pub enum GitRemoteRepo { GitHubRepo { repo_slug: String }, GitLabRepo { repo_slug: String }, } impl GitRemoteRepo { pub fn format_commit_url(&self, commit: &str) -> String { match self { Self::GitHubRepo { repo_slug } => { format!("https://github.com/{}/commit/{}", repo_slug, commit) } Self::GitLabRepo { repo_slug } => { format!("https://gitlab.com/{}/-/commit/{}", repo_slug, commit) } } } } lazy_static! { static ref GITHUB_REMOTE_URL: Regex = Regex::new( r"(?x) ^ (?:https://|git@)? # Support both HTTPS and SSH URLs, SSH URLs optionally omitting the git@ github\.com [:/] # This separator differs between SSH and HTTPS URLs ([^/]+) # Capture the user/org name / (.+?) # Capture the repo name (lazy to avoid consuming '.git' if present) (?:\.git)? # Non-capturing group to consume '.git' if present $ " ) .unwrap(); static ref GITLAB_REMOTE_URL: Regex = Regex::new( r"(?x) ^ (?:https://|git@)? # Support both HTTPS and SSH URLs, SSH URLs optionally omitting the git@ gitlab\.com [:/] # This separator differs between SSH and HTTPS URLs ([^/]+) # Capture the user/org name (/.*)? # Capture group(s), if any / (.+?) # Capture the repo name (lazy to avoid consuming '.git' if present) (?:\.git)? # Non-capturing group to consume '.git' if present $ " ) .unwrap(); } impl FromStr for GitRemoteRepo { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if let Some(caps) = GITHUB_REMOTE_URL.captures(s) { Ok(Self::GitHubRepo { repo_slug: format!( "{user}/{repo}", user = caps.get(1).unwrap().as_str(), repo = caps.get(2).unwrap().as_str() ), }) } else if let Some(caps) = GITLAB_REMOTE_URL.captures(s) { Ok(Self::GitLabRepo { repo_slug: format!( "{user}{groups}/{repo}", user = caps.get(1).unwrap().as_str(), groups = caps.get(2).map(|x| x.as_str()).unwrap_or_default(), repo = caps.get(3).unwrap().as_str() ), }) } else { Err("Not a GitHub or GitLab repo.".into()) } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_github_urls() { let urls = &[ "https://github.com/dandavison/delta.git", "https://github.com/dandavison/delta", "[email protected]:dandavison/delta.git", "[email protected]:dandavison/delta", "github.com:dandavison/delta.git", "github.com:dandavison/delta", ]; for url in urls { let parsed = GitRemoteRepo::from_str(url); assert!(parsed.is_ok()); assert_eq!( parsed.unwrap(), GitRemoteRepo::GitHubRepo { repo_slug: "dandavison/delta".to_string() } ); } } #[test] fn test_format_github_commit_link() { let repo = GitRemoteRepo::GitHubRepo { repo_slug: "dandavison/delta".to_string(), }; let commit_hash = "d3b07384d113edec49eaa6238ad5ff00"; assert_eq!( repo.format_commit_url(commit_hash), format!("https://github.com/dandavison/delta/commit/{}", commit_hash) ) } #[test] fn test_parse_gitlab_urls() { let urls = &[ ( "https://gitlab.com/proj/grp/subgrp/repo.git", "proj/grp/subgrp/repo", ), ("https://gitlab.com/proj/grp/repo.git", "proj/grp/repo"), ("https://gitlab.com/proj/repo.git", "proj/repo"), ("https://gitlab.com/proj/repo", "proj/repo"), ( "[email protected]:proj/grp/subgrp/repo.git", "proj/grp/subgrp/repo", ), ("[email protected]:proj/repo.git", "proj/repo"), ("[email protected]:proj/repo", "proj/repo"), ("gitlab.com:proj/grp/repo.git", "proj/grp/repo"), ("gitlab.com:proj/repo.git", "proj/repo"), ("gitlab.com:proj/repo", "proj/repo"), ]; for (url, expected) in urls { let parsed = GitRemoteRepo::from_str(url); assert!(parsed.is_ok()); assert_eq!( parsed.unwrap(), GitRemoteRepo::GitLabRepo { repo_slug: expected.to_string() } ); } } #[test] fn test_format_gitlab_commit_link() { let repo = GitRemoteRepo::GitLabRepo { repo_slug: "proj/grp/repo".to_string(), }; let commit_hash = "d3b07384d113edec49eaa6238ad5ff00"; assert_eq!( repo.format_commit_url(commit_hash), format!("https://gitlab.com/proj/grp/repo/-/commit/{}", commit_hash) ) } }
use std::result::Result; use std::str::FromStr; use lazy_static::lazy_static; use regex::Regex; use crate::errors::*; #[derive(Clone, Debug)] pub enum GitConfigEntry { Style(String), GitRemote(GitRemoteRepo), } #[derive(Clone, Debug, PartialEq)] pub enum GitRemoteRepo { GitHubRepo { repo_slug: String }, GitLabRepo { repo_slug: String }, } impl GitRemoteRepo { pub fn format_commit_url(&self, commit: &str) -> String { match self { Self::GitHubRepo { repo_slug } => { format!("https://github.com/{}/commit/{}", repo_slug, commit) } Self::GitLabRepo { repo_slug } => { format!("https://gitlab.com/{}/-/commit/{}", repo_slug, commit) } } } } lazy_static! { static ref GITHUB_REMOTE_URL: Regex = Regex::new( r"(?x) ^ (?:https://|git@)? # Support both HTTPS and SSH URLs, SSH URLs optionally omitting the git@ github\.com [:/] # This separator differs between SSH and HTTPS URLs ([^/]+) # Capture the user/org name / (.+?) # Capture the repo name (lazy to avoid consuming '.git' if present) (?:\.git)? # Non-capturing group to consume '.git' if present $ " ) .unwrap(); static ref GITLAB_REMOTE_URL: Regex = Regex::new( r"(?x) ^ (?:https://|git@)? # Support both HTTPS and SSH URLs, SSH URLs optionally omitting the git@ gitlab\.com [:/] # This separator differs between SSH and HTTPS URLs ([^/]+) # Capture the user/org name (/.*)? # Capture group(s), if any / (.+?) # Capture the repo name (lazy to avoid consuming '.git' if present) (?:\.git)? # Non-capturing group to consume '.git' if present $ " ) .unwrap(); } impl FromStr for GitRemoteRepo { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if let Some(caps) = GITHUB_REMOTE_URL.captures(s) { Ok(Self::GitHubRepo { repo_slug: format!( "{user}/{repo}", user = caps.get(1).unwrap().as_str(), repo = caps.get(2).unwrap().as_str() ), }) } else if let Some(caps) = GITLAB_REMOTE_URL.captures(s) { Ok(Self::GitLabRepo { repo_slug: format!( "{user}{groups}/{repo}", user = caps.get(1).unwrap().as_str(), groups = caps.get(2).map(|x| x.as_str()).unwrap_or_default(), repo = caps.get(3).unwrap().as_str() ), }) } else { Err("Not a GitHub or GitLab repo.".into()) } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_github_urls() { let urls = &[ "https://github.com/dandavison/delta.git", "https://github.com/dandavison/delta", "[email protected]:dandavison/delta.git", "[email protected]:dandavison/delta", "github.com:dandavison/delta.git", "github.com:dandavison/delta", ]; for url in urls { let parsed = GitRemoteRepo::from_str(url); assert!(parsed.is_ok()); assert_eq!( parsed.unwrap(), GitRemoteRepo::GitHubRepo { repo_slug: "dandavison/delta".to_string() } ); } } #[test] fn test_format_github_commit_lin
#[test] fn test_parse_gitlab_urls() { let urls = &[ ( "https://gitlab.com/proj/grp/subgrp/repo.git", "proj/grp/subgrp/repo", ), ("https://gitlab.com/proj/grp/repo.git", "proj/grp/repo"), ("https://gitlab.com/proj/repo.git", "proj/repo"), ("https://gitlab.com/proj/repo", "proj/repo"), ( "[email protected]:proj/grp/subgrp/repo.git", "proj/grp/subgrp/repo", ), ("[email protected]:proj/repo.git", "proj/repo"), ("[email protected]:proj/repo", "proj/repo"), ("gitlab.com:proj/grp/repo.git", "proj/grp/repo"), ("gitlab.com:proj/repo.git", "proj/repo"), ("gitlab.com:proj/repo", "proj/repo"), ]; for (url, expected) in urls { let parsed = GitRemoteRepo::from_str(url); assert!(parsed.is_ok()); assert_eq!( parsed.unwrap(), GitRemoteRepo::GitLabRepo { repo_slug: expected.to_string() } ); } } #[test] fn test_format_gitlab_commit_link() { let repo = GitRemoteRepo::GitLabRepo { repo_slug: "proj/grp/repo".to_string(), }; let commit_hash = "d3b07384d113edec49eaa6238ad5ff00"; assert_eq!( repo.format_commit_url(commit_hash), format!("https://gitlab.com/proj/grp/repo/-/commit/{}", commit_hash) ) } }
k() { let repo = GitRemoteRepo::GitHubRepo { repo_slug: "dandavison/delta".to_string(), }; let commit_hash = "d3b07384d113edec49eaa6238ad5ff00"; assert_eq!( repo.format_commit_url(commit_hash), format!("https://github.com/dandavison/delta/commit/{}", commit_hash) ) }
function_block-function_prefixed
[ { "content": "/// If `name` is set and, after trimming whitespace, is not empty string, then return that trimmed\n\n/// string. Else None.\n\npub fn get_env_var(_name: &str) -> Option<String> {\n\n #[cfg(not(test))]\n\n match env::var(_name).unwrap_or_else(|_| \"\".to_string()).trim() {\n\n \"\" => None,\n\n non_empty_string => Some(non_empty_string.to_string()),\n\n }\n\n #[cfg(test)]\n\n None\n\n}\n\n\n", "file_path": "src/env.rs", "rank": 0, "score": 336522.9769634101 }, { "content": "pub fn syntect_color_from_name(name: &str) -> Option<Color> {\n\n palette::named::from_str(name).map(|color| Color {\n\n r: color.red,\n\n g: color.green,\n\n b: color.blue,\n\n a: 0xFF,\n\n })\n\n}\n\n\n", "file_path": "src/utils/syntect.rs", "rank": 1, "score": 309259.6203538228 }, { "content": "pub fn ansi_16_color_name_to_number(name: &str) -> Option<u8> {\n\n ANSI_16_COLORS.get(name).copied()\n\n}\n\n\n", "file_path": "src/color.rs", "rank": 2, "score": 309259.6203538228 }, { "content": "pub fn syntect_color_from_ansi_name(name: &str) -> Option<Color> {\n\n color::ansi_16_color_name_to_number(name).and_then(syntect_color_from_ansi_number)\n\n}\n\n\n", "file_path": "src/utils/syntect.rs", "rank": 3, "score": 304425.98626664176 }, { "content": "/// Did the user supply `option` on the command line?\n\npub fn user_supplied_option(option: &str, arg_matches: &clap::ArgMatches) -> bool {\n\n arg_matches.occurrences_of(option) > 0\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 4, "score": 300980.6383268801 }, { "content": "pub fn delineated_string(txt: &str) -> String {\n\n let top = \"▼\".repeat(100);\n\n let btm = \"▲\".repeat(100);\n\n let nl = \"\\n\";\n\n top + &nl + txt + &nl + &btm\n\n}\n\n\n\npub struct DeltaTest<'a> {\n\n config: Cow<'a, config::Config>,\n\n calling_process: Option<String>,\n\n explain_ansi_: bool,\n\n}\n\n\n\nimpl<'a> DeltaTest<'a> {\n\n pub fn with_args(args: &[&str]) -> Self {\n\n Self {\n\n config: Cow::Owned(make_config_from_args(args)),\n\n calling_process: None,\n\n explain_ansi_: false,\n\n }\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 5, "score": 299661.2193085872 }, { "content": "pub fn make_placeholder_regex(labels: &[&str]) -> Regex {\n\n Regex::new(&format!(\n\n r\"(?x)\n\n \\{{\n\n ({}) # 1: Placeholder labels\n\n (?: # Start optional format spec (non-capturing)\n\n : # Literal colon\n\n (?: # Start optional fill/alignment spec (non-capturing)\n\n ([^<^>])? # 2: Optional fill character (ignored)\n\n ([<^>]) # 3: Alignment spec\n\n )? #\n\n (\\d+)? # 4: Width (optional)\n\n (?: # Start optional precision (non-capturing)\n\n \\.(\\d+) # 5: Precision\n\n )? #\n\n (?: # Start optional format type (non-capturing)\n\n _?([A-Za-z][0-9A-Za-z_-]*) # 6: Format type, optional leading _\n\n )? #\n\n )? #\n\n \\}}\n\n \",\n\n labels.join(\"|\")\n\n ))\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 6, "score": 299266.79164239723 }, { "content": "fn format_osc8_hyperlink(url: &str, text: &str) -> String {\n\n format!(\n\n \"{osc}8;;{url}{st}{text}{osc}8;;{st}\",\n\n url = url,\n\n text = text,\n\n osc = \"\\x1b]\",\n\n st = \"\\x1b\\\\\"\n\n )\n\n}\n\n\n\nlazy_static! {\n\n static ref COMMIT_LINE_REGEX: Regex = Regex::new(\"(.* )?([0-9a-f]{8,40})(.*)\").unwrap();\n\n}\n\n\n", "file_path": "src/features/hyperlinks.rs", "rank": 7, "score": 294008.98626474646 }, { "content": "pub fn git_blame_filename_extension() -> Option<String> {\n\n calling_process_cmdline(ProcInfo::new(), guess_git_blame_filename_extension)\n\n}\n\n\n", "file_path": "src/utils/process.rs", "rank": 8, "score": 288409.7454263896 }, { "content": "pub fn get_submodule_short_commit(line: &str) -> Option<&str> {\n\n match SUBMODULE_SHORT_LINE_REGEX.captures(line) {\n\n Some(caps) => Some(caps.get(1).unwrap().as_str()),\n\n None => None,\n\n }\n\n}\n", "file_path": "src/handlers/submodule.rs", "rank": 9, "score": 286623.8837188247 }, { "content": "pub fn strip_ansi_codes(s: &str) -> String {\n\n strip_ansi_codes_from_strings_iterator(ansi_strings_iterator(s))\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 10, "score": 286452.90807255515 }, { "content": "fn ansi_16_color_number_to_name(n: u8) -> Option<&'static str> {\n\n for (k, _n) in &*ANSI_16_COLORS {\n\n if *_n == n {\n\n return Some(*k);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/color.rs", "rank": 11, "score": 277105.9989431143 }, { "content": "pub fn _parse_grep_line<'b>(regex: &Regex, line: &'b str) -> Option<GrepLine<'b>> {\n\n let caps = regex.captures(line)?;\n\n let file = caps.get(1).unwrap().as_str().into();\n\n let (line_type, line_number) = &[\n\n (2, LineType::Match),\n\n (4, LineType::Context),\n\n (6, LineType::ContextHeader),\n\n ]\n\n .iter()\n\n .find_map(|(i, line_type)| {\n\n if caps.get(*i).is_some() {\n\n let line_number: Option<usize> = caps.get(i + 1).and_then(|m| m.as_str().parse().ok());\n\n Some((*line_type, line_number))\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap(); // The regex matches so one of the three alternatives must have matched\n\n let code = caps.get(8).unwrap().as_str().into();\n\n\n", "file_path": "src/handlers/grep.rs", "rank": 12, "score": 276810.9696486093 }, { "content": "pub fn parse_git_blame_line<'a>(line: &'a str, timestamp_format: &str) -> Option<BlameLine<'a>> {\n\n let caps = BLAME_LINE_REGEX.captures(line)?;\n\n\n\n let commit = caps.get(1).unwrap().as_str();\n\n let author = caps.get(2).unwrap().as_str();\n\n let timestamp = caps.get(3).unwrap().as_str();\n\n\n\n let time = DateTime::parse_from_str(timestamp, timestamp_format).ok()?;\n\n\n\n let line_number = caps.get(4).unwrap().as_str().parse::<usize>().ok()?;\n\n\n\n let code = caps.get(5).unwrap().as_str();\n\n\n\n Some(BlameLine {\n\n commit,\n\n author,\n\n time,\n\n line_number,\n\n code,\n\n })\n\n}\n\n\n\nlazy_static! {\n\n // line numbers (`{n}`) change with every line and are set separately via `blame-separator-format`\n\n pub static ref BLAME_PLACEHOLDER_REGEX: Regex =\n\n format::make_placeholder_regex(&[\"timestamp\", \"author\", \"commit\"]);\n\n}\n\n\n", "file_path": "src/handlers/blame.rs", "rank": 13, "score": 276239.0056004444 }, { "content": "pub fn get_themes(git_config: Option<git_config::GitConfig>) -> Vec<String> {\n\n let mut themes: Vec<String> = Vec::new();\n\n for e in &git_config.unwrap().config.entries(None).unwrap() {\n\n let entry = e.unwrap();\n\n let entry_name = entry.name().unwrap();\n\n let caps = GIT_CONFIG_THEME_REGEX.captures(entry_name);\n\n if let Some(caps) = caps {\n\n let name = caps.get(1).map_or(\"\", |m| m.as_str()).to_string();\n\n if !themes.contains(&name) {\n\n themes.push(name)\n\n }\n\n }\n\n }\n\n themes.sort_by_key(|a| a.to_lowercase());\n\n themes\n\n}\n\n\n", "file_path": "src/options/get.rs", "rank": 14, "score": 272867.20505607175 }, { "content": "pub fn assert_lines_match(expected: &str, have: &str) {\n\n assert_lines_match_after_skip(0, expected, have)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 15, "score": 271582.5729656011 }, { "content": "/// Return string formed from a byte slice starting at byte position `start`, where the index\n\n/// counts bytes in non-ANSI-escape-sequence content only. All ANSI escape sequences in the\n\n/// original string are preserved.\n\npub fn ansi_preserving_slice(s: &str, start: usize) -> String {\n\n AnsiElementIterator::new(s)\n\n .scan(0, |index, element| {\n\n // `index` is the index in non-ANSI-escape-sequence content.\n\n Some(match element {\n\n Element::Sgr(_, a, b) => &s[a..b],\n\n Element::Csi(a, b) => &s[a..b],\n\n Element::Esc(a, b) => &s[a..b],\n\n Element::Osc(a, b) => &s[a..b],\n\n Element::Text(a, b) => {\n\n let i = *index;\n\n *index += b - a;\n\n if *index <= start {\n\n // This text segment ends before start, so contributes no bytes.\n\n \"\"\n\n } else if i > start {\n\n // This section starts after `start`, so contributes all its bytes.\n\n &s[a..b]\n\n } else {\n\n // This section contributes those bytes that are >= start\n\n &s[(a + start - i)..b]\n\n }\n\n }\n\n })\n\n })\n\n .join(\"\")\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 16, "score": 264034.2696490079 }, { "content": "pub fn explain_ansi(line: &str, colorful: bool) -> String {\n\n use crate::style::Style;\n\n\n\n parse_style_sections(line)\n\n .into_iter()\n\n .map(|(ansi_term_style, s)| {\n\n let style = Style {\n\n ansi_term_style,\n\n ..Style::default()\n\n };\n\n if colorful {\n\n format!(\"({}){}\", style.to_painted_string(), style.paint(s))\n\n } else {\n\n format!(\"({}){}\", style, s)\n\n }\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/ansi/mod.rs", "rank": 17, "score": 264029.08012286865 }, { "content": "/// Attempt to parse input as a file path and return extension as a &str.\n\npub fn get_extension(s: &str) -> Option<&str> {\n\n let path = Path::new(s);\n\n path.extension()\n\n .and_then(|e| e.to_str())\n\n // E.g. 'Makefile' is the file name and also the extension\n\n .or_else(|| path.file_name().and_then(|s| s.to_str()))\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 18, "score": 261626.55264333048 }, { "content": "pub fn parse_color(s: &str, true_color: bool, git_config: Option<&GitConfig>) -> Option<Color> {\n\n if s == \"normal\" {\n\n return None;\n\n }\n\n let die = || {\n\n fatal(format!(\"Invalid color or style attribute: {}\", s));\n\n };\n\n let syntect_color = if s.starts_with('#') {\n\n SyntectColor::from_str(s).unwrap_or_else(|_| die())\n\n } else {\n\n let syntect_color = s\n\n .parse::<u8>()\n\n .ok()\n\n .and_then(utils::syntect::syntect_color_from_ansi_number)\n\n .or_else(|| utils::syntect::syntect_color_from_ansi_name(s))\n\n .or_else(|| utils::syntect::syntect_color_from_name(s));\n\n if syntect_color.is_none() {\n\n if let Some(git_config) = git_config {\n\n if let Some(val) = git_config.get::<String>(&format!(\"delta.{}\", s)) {\n\n return parse_color(&val, true_color, None);\n\n }\n\n }\n\n die();\n\n }\n\n syntect_color.unwrap()\n\n };\n\n utils::bat::terminal::to_ansi_color(syntect_color, true_color)\n\n}\n\n\n", "file_path": "src/color.rs", "rank": 19, "score": 259377.45769101736 }, { "content": "// Return the first CSI element, if any, as an `ansi_term::Style`.\n\npub fn parse_first_style(s: &str) -> Option<ansi_term::Style> {\n\n AnsiElementIterator::new(s).find_map(|el| match el {\n\n Element::Sgr(style, _, _) => Some(style),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 20, "score": 258892.94367415376 }, { "content": "/// Return the byte index in `s` of the i-th text byte in `s`. I.e. `i` counts\n\n/// bytes in non-ANSI-escape-sequence content only.\n\npub fn ansi_preserving_index(s: &str, i: usize) -> Option<usize> {\n\n let mut index = 0;\n\n for element in AnsiElementIterator::new(s) {\n\n if let Element::Text(a, b) = element {\n\n index += b - a;\n\n if index > i {\n\n return Some(b - (index - i));\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 21, "score": 258838.00822679768 }, { "content": "fn split_feature_string(features: &str) -> impl Iterator<Item = &str> {\n\n features.split_whitespace().rev()\n\n}\n\n\n\nimpl FromStr for cli::InspectRawLines {\n\n type Err = Error;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.to_lowercase().as_str() {\n\n \"true\" => Ok(Self::True),\n\n \"false\" => Ok(Self::False),\n\n _ => {\n\n fatal(format!(\n\n r#\"Invalid value for inspect-raw-lines option: {}. Valid values are \"true\", and \"false\".\"#,\n\n s\n\n ));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/options/set.rs", "rank": 22, "score": 257714.5585323561 }, { "content": "pub fn run_delta(input: &str, config: &config::Config) -> String {\n\n let mut writer: Vec<u8> = Vec::new();\n\n\n\n delta(\n\n ByteLines::new(BufReader::new(input.as_bytes())),\n\n &mut writer,\n\n &config,\n\n )\n\n .unwrap();\n\n String::from_utf8(writer).unwrap()\n\n}\n\n\n\npub mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_lines_match_ok() {\n\n let expected = r#\"\n\n one\n\n two\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 23, "score": 257079.30790520838 }, { "content": "fn ansi_strings_iterator(s: &str) -> impl Iterator<Item = (&str, bool)> {\n\n AnsiElementIterator::new(s).map(move |el| match el {\n\n Element::Sgr(_, i, j) => (&s[i..j], true),\n\n Element::Csi(i, j) => (&s[i..j], true),\n\n Element::Esc(i, j) => (&s[i..j], true),\n\n Element::Osc(i, j) => (&s[i..j], true),\n\n Element::Text(i, j) => (&s[i..j], false),\n\n })\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 24, "score": 254209.4973353463 }, { "content": "fn parse_config_from_env_var_value(s: &str) -> HashMap<String, String> {\n\n GIT_CONFIG_PARAMETERS_REGEX\n\n .captures_iter(s)\n\n .map(|captures| {\n\n let (i, j) = match (\n\n captures.get(1),\n\n captures.get(2),\n\n captures.get(3),\n\n captures.get(4),\n\n ) {\n\n (Some(_), Some(_), None, None) => (1, 2),\n\n (None, None, Some(_), Some(_)) => (3, 4),\n\n _ => (0, 0),\n\n };\n\n if (i, j) == (0, 0) {\n\n (\"\".to_string(), \"\".to_string())\n\n } else {\n\n (captures[i].to_string(), captures[j].to_string())\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/git_config/mod.rs", "rank": 25, "score": 252684.33751854533 }, { "content": "// Given an `expected` block as a raw string like: `r#\"\n\n// #indent_mark [optional]\n\n// line1\"#;` // line 2 etc.\n\n// ignore the first newline and compare the following `lines()` to those produced\n\n// by `have`, `skip`-ping the first few. The leading spaces of the first line\n\n// to indicate the last line in the list). The leading spaces of the first line\n\n// are stripped from every following line (and verified), unless the first line\n\n// marks the indentation level with `#indent_mark`.\n\npub fn assert_lines_match_after_skip(skip: usize, expected: &str, have: &str) {\n\n let mut exp = expected.lines().peekable();\n\n let mut line1 = exp.next().unwrap();\n\n let allow_partial = line1 == \"#partial\";\n\n assert!(\n\n allow_partial || line1.is_empty(),\n\n \"first line must be empty or \\\"#partial\\\"\"\n\n );\n\n line1 = exp.peek().unwrap();\n\n let indentation = line1.find(|c| c != ' ').unwrap_or(0);\n\n let ignore_indent = &line1[indentation..] == \"#indent_mark\";\n\n if ignore_indent {\n\n let _indent_mark = exp.next();\n\n }\n\n\n\n let mut it = have.lines().skip(skip);\n\n\n\n for (i, expected) in exp.enumerate() {\n\n if !ignore_indent {\n\n let next_indentation = expected.find(|c| c != ' ').unwrap_or(0);\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 26, "score": 251052.29077778306 }, { "content": "pub fn string_starts_with_ansi_style_sequence(s: &str) -> bool {\n\n AnsiElementIterator::new(s)\n\n .next()\n\n .map(|el| matches!(el, Element::Sgr(_, _, _)))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 27, "score": 247056.51276298962 }, { "content": "/// Return true iff `s` contains exactly one occurrence of substring `t`.\n\npub fn contains_once(s: &str, t: &str) -> bool {\n\n match (s.find(t), s.rfind(t)) {\n\n (Some(i), Some(j)) => i == j,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/tests/test_utils.rs", "rank": 28, "score": 246620.50499632017 }, { "content": "pub fn make_options_from_args(args: &[&str]) -> cli::Opt {\n\n make_options_from_args_and_git_config(args, None, None)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 29, "score": 244586.3093210083 }, { "content": "pub fn make_options_from_args_and_git_config(\n\n args: &[&str],\n\n git_config_contents: Option<&[u8]>,\n\n git_config_path: Option<&str>,\n\n) -> cli::Opt {\n\n _make_options_from_args_and_git_config(args, git_config_contents, git_config_path, false)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 30, "score": 241922.69438693428 }, { "content": "pub fn color_groups() -> HashMap<String, Vec<(String, String)>> {\n\n [\n\n (\n\n \"Blue\",\n\n vec![\n\n (\"cadetblue\", \"#5f9ea0\"),\n\n (\"steelblue\", \"#4682b4\"),\n\n (\"lightsteelblue\", \"#b0c4de\"),\n\n (\"lightblue\", \"#add8e6\"),\n\n (\"powderblue\", \"#b0e0e6\"),\n\n (\"lightskyblue\", \"#87cefa\"),\n\n (\"skyblue\", \"#87ceeb\"),\n\n (\"cornflowerblue\", \"#6495ed\"),\n\n (\"deepskyblue\", \"#00bfff\"),\n\n (\"dodgerblue\", \"#1e90ff\"),\n\n (\"royalblue\", \"#4169e1\"),\n\n (\"blue\", \"#0000ff\"),\n\n (\"mediumblue\", \"#0000cd\"),\n\n (\"darkblue\", \"#00008b\"),\n\n (\"navy\", \"#000080\"),\n", "file_path": "src/colors.rs", "rank": 31, "score": 239448.08538224164 }, { "content": "#[allow(dead_code)]\n\npub fn print_with_line_numbers(s: &str) {\n\n for (i, t) in s.lines().enumerate() {\n\n println!(\"{:>2}│ {}\", i + 1, t);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::tests::test_utils::*;\n\n\n\n #[test]\n\n fn test_contains_once_1() {\n\n assert!(contains_once(\"\", \"\"));\n\n }\n\n\n\n #[test]\n\n fn test_contains_once_2() {\n\n assert!(contains_once(\"a\", \"a\"));\n\n }\n\n\n", "file_path": "src/tests/test_utils.rs", "rank": 32, "score": 238509.62703733955 }, { "content": "fn _parse_file_path(s: &str, git_diff_name: bool) -> String {\n\n // It appears that, if the file name contains a space, git appends a tab\n\n // character in the diff metadata lines, e.g.\n\n // $ git diff --no-index \"a b\" \"c d\" | cat -A\n\n // diff·--git·a/a·b·b/c·d␊\n\n // index·d00491f..0cfbf08·100644␊\n\n // ---·a/a·b├──┤␊\n\n // +++·b/c·d├──┤␊\n\n match s.strip_suffix('\\t').unwrap_or(s) {\n\n path if path == \"/dev/null\" => \"/dev/null\",\n\n path if git_diff_name && DIFF_PREFIXES.iter().any(|s| path.starts_with(s)) => &path[2..],\n\n path if git_diff_name => path,\n\n path => path.split('\\t').next().unwrap_or(\"\"),\n\n }\n\n .to_string()\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 33, "score": 234935.83980560268 }, { "content": "/// Given input like \"diff --git a/src/my file.rs b/src/my file.rs\"\n\n/// return Some(\"src/my file.rs\")\n\nfn get_repeated_file_path_from_diff_line(line: &str) -> Option<String> {\n\n if let Some(line) = line.strip_prefix(\"diff --git \") {\n\n let line: Vec<&str> = line.graphemes(true).collect();\n\n let midpoint = line.len() / 2;\n\n if line[midpoint] == \" \" {\n\n let first_path = _parse_file_path(&line[..midpoint].join(\"\"), true);\n\n let second_path = _parse_file_path(&line[midpoint + 1..].join(\"\"), true);\n\n if first_path == second_path {\n\n return Some(first_path);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 34, "score": 232532.37764968682 }, { "content": "pub fn parse_grep_line(line: &str) -> Option<GrepLine> {\n\n if line.starts_with('{') {\n\n ripgrep_json::parse_line(line)\n\n } else {\n\n match &*process::calling_process() {\n\n process::CallingProcess::GitGrep(_) | process::CallingProcess::OtherGrep => [\n\n &*GREP_LINE_REGEX_ASSUMING_FILE_EXTENSION_AND_LINE_NUMBER,\n\n &*GREP_LINE_REGEX_ASSUMING_FILE_EXTENSION_NO_SPACES,\n\n &*GREP_LINE_REGEX_ASSUMING_FILE_EXTENSION,\n\n &*GREP_LINE_REGEX_ASSUMING_NO_INTERNAL_SEPARATOR_CHARS,\n\n ]\n\n .iter()\n\n .find_map(|regex| _parse_grep_line(*regex, line)),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/handlers/grep.rs", "rank": 35, "score": 231700.58909095518 }, { "content": "pub fn make_options_from_args_and_git_config_honoring_env_var(\n\n args: &[&str],\n\n git_config_contents: Option<&[u8]>,\n\n git_config_path: Option<&str>,\n\n) -> cli::Opt {\n\n _make_options_from_args_and_git_config(args, git_config_contents, git_config_path, true)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 36, "score": 228210.9637316198 }, { "content": "type OptionValueFunction = Box<dyn Fn(&cli::Opt, &Option<GitConfig>) -> ProvenancedOptionValue>;\n\n\n", "file_path": "src/features/mod.rs", "rank": 37, "score": 227394.96251711686 }, { "content": "pub fn make_git_config(contents: &[u8], path: &str, honor_env_var: bool) -> GitConfig {\n\n let path = Path::new(path);\n\n let mut file = File::create(path).unwrap();\n\n file.write_all(contents).unwrap();\n\n GitConfig::from_path(&path, honor_env_var)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 38, "score": 223200.68799926538 }, { "content": "pub fn parse_line(line: &str) -> Option<grep::GrepLine> {\n\n let ripgrep_line: Option<RipGrepLine> = serde_json::from_str(line).ok();\n\n match ripgrep_line {\n\n Some(ripgrep_line) => {\n\n // A real line of rg --json output, i.e. either of type \"match\" or\n\n // \"context\".\n\n let mut code = ripgrep_line.data.lines.text;\n\n if code.ends_with('\\n') {\n\n code.truncate(code.len() - 1);\n\n if code.ends_with('\\r') {\n\n code.truncate(code.len() - 1);\n\n }\n\n }\n\n Some(grep::GrepLine {\n\n line_type: ripgrep_line._type,\n\n line_number: ripgrep_line.data.line_number,\n\n path: Cow::from(ripgrep_line.data.path.text),\n\n code: Cow::from(code),\n\n submatches: Some(\n\n ripgrep_line\n", "file_path": "src/handlers/ripgrep_json.rs", "rank": 39, "score": 222506.3634418273 }, { "content": "pub fn measure_text_width(s: &str) -> usize {\n\n // TODO: how should e.g. '\\n' be handled?\n\n strip_ansi_codes(s).width()\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 40, "score": 221882.08323386565 }, { "content": "pub fn guess_git_blame_filename_extension(args: &[String]) -> ProcessArgs<String> {\n\n let all_args = args.iter().map(|s| s.as_str());\n\n\n\n // See git(1) and git-blame(1). Some arguments separate their parameter with space or '=', e.g.\n\n // --date 2015 or --date=2015.\n\n let git_blame_options_with_parameter =\n\n \"-C -c -L --since --ignore-rev --ignore-revs-file --contents --reverse --date\";\n\n\n\n let selected_args =\n\n skip_uninteresting_args(all_args, git_blame_options_with_parameter.split(' '));\n\n\n\n match selected_args.as_slice() {\n\n [git, \"blame\", .., last_arg] if is_git_binary(git) => match last_arg.split('.').last() {\n\n Some(arg) => ProcessArgs::Args(arg.to_string()),\n\n None => ProcessArgs::ArgError,\n\n },\n\n [git, \"blame\"] if is_git_binary(git) => ProcessArgs::ArgError,\n\n _ => ProcessArgs::OtherProcess,\n\n }\n\n}\n\n\n", "file_path": "src/utils/process.rs", "rank": 41, "score": 220671.18987546762 }, { "content": "/// If `name` is set to any value at all (including \"\") then return true; else false.\n\npub fn get_boolean_env_var(_name: &str) -> bool {\n\n #[cfg(not(test))]\n\n {\n\n env::var(_name).ok().is_some()\n\n }\n\n #[cfg(test)]\n\n false\n\n}\n", "file_path": "src/env.rs", "rank": 42, "score": 217671.41188714985 }, { "content": "pub fn is_light_syntax_theme(theme: &str) -> bool {\n\n LIGHT_SYNTAX_THEMES.contains(&theme) || theme.to_lowercase().contains(\"light\")\n\n}\n\n\n\nconst LIGHT_SYNTAX_THEMES: [&str; 6] = [\n\n \"GitHub\",\n\n \"gruvbox-light\",\n\n \"gruvbox-white\",\n\n \"Monokai Extended Light\",\n\n \"OneHalfLight\",\n\n \"Solarized (light)\",\n\n];\n\n\n\nconst DEFAULT_LIGHT_SYNTAX_THEME: &str = \"GitHub\";\n\nconst DEFAULT_DARK_SYNTAX_THEME: &str = \"Monokai Extended\";\n\n\n", "file_path": "src/options/theme.rs", "rank": 43, "score": 217246.68983471324 }, { "content": "pub fn parse_style_sections(s: &str) -> Vec<(ansi_term::Style, &str)> {\n\n let mut sections = Vec::new();\n\n let mut curr_style = Style::default();\n\n for element in AnsiElementIterator::new(s) {\n\n match element {\n\n Element::Text(start, end) => sections.push((curr_style, &s[start..end])),\n\n Element::Sgr(style, _, _) => curr_style = style,\n\n _ => {}\n\n }\n\n }\n\n sections\n\n}\n\n\n", "file_path": "src/ansi/mod.rs", "rank": 44, "score": 215627.53020549373 }, { "content": "fn parse_diff_header_line(line: &str, git_diff_name: bool) -> (String, FileEvent) {\n\n match line {\n\n line if line.starts_with(\"--- \") || line.starts_with(\"+++ \") => {\n\n let offset = 4;\n\n let file = _parse_file_path(&line[offset..], git_diff_name);\n\n (file, FileEvent::Change)\n\n }\n\n line if line.starts_with(\"rename from \") => {\n\n (line[12..].to_string(), FileEvent::Rename) // \"rename from \".len()\n\n }\n\n line if line.starts_with(\"rename to \") => {\n\n (line[10..].to_string(), FileEvent::Rename) // \"rename to \".len()\n\n }\n\n line if line.starts_with(\"copy from \") => {\n\n (line[10..].to_string(), FileEvent::Copy) // \"copy from \".len()\n\n }\n\n line if line.starts_with(\"copy to \") => {\n\n (line[8..].to_string(), FileEvent::Copy) // \"copy to \".len()\n\n }\n\n _ => (\"\".to_string(), FileEvent::NoEvent),\n\n }\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 45, "score": 214634.8007965354 }, { "content": "/// Relativize path if delta config demands that and paths are not already relativized by git.\n\npub fn relativize_path_maybe(path: &str, config: &Config) -> Option<PathBuf> {\n\n if config.relative_paths && !calling_process().paths_in_input_are_relative_to_cwd() {\n\n if let Some(base) = config.cwd_relative_to_repo_root.as_deref() {\n\n pathdiff::diff_paths(&path, base)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/utils/path.rs", "rank": 46, "score": 214085.20899277463 }, { "content": "// Infer absolute path to `relative_path`.\n\npub fn absolute_path(relative_path: &str, config: &Config) -> Option<PathBuf> {\n\n match (\n\n &config.cwd_of_delta_process,\n\n &config.cwd_of_user_shell_process,\n\n calling_process().paths_in_input_are_relative_to_cwd() || config.relative_paths,\n\n ) {\n\n // Note that if we were invoked by git then cwd_of_delta_process == repo_root\n\n (Some(cwd_of_delta_process), _, false) => Some(cwd_of_delta_process.join(relative_path)),\n\n (_, Some(cwd_of_user_shell_process), true) => {\n\n Some(cwd_of_user_shell_process.join(relative_path))\n\n }\n\n (Some(cwd_of_delta_process), None, true) => {\n\n // This might occur when piping from git to delta?\n\n Some(cwd_of_delta_process.join(relative_path))\n\n }\n\n _ => None,\n\n }\n\n .map(normalize_path)\n\n}\n\n\n", "file_path": "src/utils/path.rs", "rank": 47, "score": 214080.4829737608 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n builtin_feature!([\n\n (\n\n \"navigate\",\n\n bool,\n\n None,\n\n _opt => true\n\n ),\n\n (\n\n \"file-modified-label\",\n\n String,\n\n None,\n\n _opt => \"Δ\"\n\n ),\n\n (\n\n \"hunk-label\",\n\n String,\n\n None,\n\n _opt => \"•\"\n\n )\n\n ])\n\n}\n\n\n", "file_path": "src/features/navigate.rs", "rank": 48, "score": 213746.8276693549 }, { "content": "/// color-only is like raw but does not override these styles.\n\npub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n let styles: HashSet<_> = [\n\n \"minus-style\",\n\n \"minus-emph-style\",\n\n \"zero-style\",\n\n \"plus-style\",\n\n \"plus-emph-style\",\n\n ]\n\n .iter()\n\n .collect();\n\n raw::make_feature()\n\n .into_iter()\n\n .filter(|(k, _)| !styles.contains(&k.as_str()))\n\n .collect()\n\n}\n", "file_path": "src/features/color_only.rs", "rank": 49, "score": 213746.8276693549 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n builtin_feature!([\n\n (\n\n \"commit-decoration-style\",\n\n String,\n\n None,\n\n _opt => \"none\"\n\n ),\n\n (\n\n \"commit-style\",\n\n String,\n\n None,\n\n _opt => \"raw\"\n\n ),\n\n (\n\n \"file-decoration-style\",\n\n String,\n\n None,\n\n _opt => \"none\"\n\n ),\n", "file_path": "src/features/raw.rs", "rank": 50, "score": 213746.8276693549 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n builtin_feature!([\n\n (\n\n \"hyperlinks\",\n\n bool,\n\n None,\n\n _opt => true\n\n )\n\n ])\n\n}\n\n\n", "file_path": "src/features/hyperlinks.rs", "rank": 51, "score": 213746.8276693549 }, { "content": "/// Split line into tokens for alignment. The alignment algorithm aligns sequences of substrings;\n\n/// not individual characters.\n\nfn tokenize<'a>(line: &'a str, regex: &Regex) -> Vec<&'a str> {\n\n // Starting with \"\", see comment in Alignment::new(). Historical note: Replacing the '+/-'\n\n // prefix with a space implicitly generated this.\n\n let mut tokens = vec![\"\"];\n\n let mut offset = 0;\n\n for m in regex.find_iter(line) {\n\n if offset == 0 && m.start() > 0 {\n\n tokens.push(\"\");\n\n }\n\n // Align separating text as multiple single-character tokens.\n\n for t in line[offset..m.start()].graphemes(true) {\n\n tokens.push(t);\n\n }\n\n tokens.push(&line[m.start()..m.end()]);\n\n offset = m.end();\n\n }\n\n if offset < line.len() {\n\n if offset == 0 {\n\n tokens.push(\"\");\n\n }\n", "file_path": "src/edits.rs", "rank": 52, "score": 213574.6691817913 }, { "content": "pub fn make_config_from_args(args: &[&str]) -> config::Config {\n\n config::Config::from(make_options_from_args(args))\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 53, "score": 213008.58046852643 }, { "content": "/// Truncate string such that `tail` is present as a suffix, preceded by as much of `s` as can be\n\n/// displayed in the requested width.\n\n// Return string constructed as follows:\n\n// 1. `display_width` characters are available. If the string fits, return it.\n\n//\n\n// 2. Contribute graphemes and ANSI escape sequences from `tail` until either (1) `tail` is\n\n// exhausted, or (2) the display width of the result would exceed `display_width`.\n\n//\n\n// 3. If tail was exhausted, then contribute graphemes and ANSI escape sequences from `s` until the\n\n// display_width of the result would exceed `display_width`.\n\npub fn truncate_str<'a, 'b>(s: &'a str, display_width: usize, tail: &'b str) -> Cow<'a, str> {\n\n let items = ansi_strings_iterator(s).collect::<Vec<(&str, bool)>>();\n\n let width = strip_ansi_codes_from_strings_iterator(items.iter().copied()).width();\n\n if width <= display_width {\n\n return Cow::from(s);\n\n }\n\n let result_tail = if !tail.is_empty() {\n\n truncate_str(tail, display_width, \"\").to_string()\n\n } else {\n\n String::new()\n\n };\n\n let mut used = measure_text_width(&result_tail);\n\n let mut result = String::new();\n\n for (t, is_ansi) in items {\n\n if !is_ansi {\n\n for g in t.graphemes(true) {\n\n let w = g.width();\n\n if used + w > display_width {\n\n break;\n\n }\n", "file_path": "src/ansi/mod.rs", "rank": 54, "score": 210452.4683020047 }, { "content": "/// If output is going to a tty, emit hyperlinks if requested.\n\n// Although raw output should basically be emitted unaltered, we do this.\n\npub fn format_raw_line<'a>(line: &'a str, config: &Config) -> Cow<'a, str> {\n\n if config.hyperlinks && atty::is(atty::Stream::Stdout) {\n\n features::hyperlinks::format_commit_line_with_osc8_commit_hyperlink(line, config)\n\n } else {\n\n Cow::from(line)\n\n }\n\n}\n\n\n", "file_path": "src/delta.rs", "rank": 55, "score": 210285.85826617756 }, { "content": "// Construct a 2-level hash map: (feature name) -> (option name) -> (value function). A value\n\n// function is a function that takes an Opt struct, and a git Config struct, and returns the value\n\n// for the option.\n\npub fn make_builtin_features() -> HashMap<String, BuiltinFeature> {\n\n vec![\n\n (\n\n \"color-only\".to_string(),\n\n color_only::make_feature().into_iter().collect(),\n\n ),\n\n (\n\n \"diff-highlight\".to_string(),\n\n diff_highlight::make_feature().into_iter().collect(),\n\n ),\n\n (\n\n \"diff-so-fancy\".to_string(),\n\n diff_so_fancy::make_feature().into_iter().collect(),\n\n ),\n\n (\n\n \"hyperlinks\".to_string(),\n\n hyperlinks::make_feature().into_iter().collect(),\n\n ),\n\n (\n\n \"line-numbers\".to_string(),\n", "file_path": "src/features/mod.rs", "rank": 56, "score": 210054.92450670287 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n builtin_feature!([\n\n (\n\n \"line-numbers\",\n\n bool,\n\n None,\n\n _opt => true\n\n ),\n\n (\n\n \"line-numbers-left-style\",\n\n String,\n\n None,\n\n _opt => \"blue\"\n\n ),\n\n (\n\n \"line-numbers-right-style\",\n\n String,\n\n None,\n\n _opt => \"blue\"\n\n ),\n", "file_path": "src/features/line_numbers.rs", "rank": 57, "score": 209838.21068606572 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n builtin_feature!([\n\n (\n\n \"side-by-side\",\n\n bool,\n\n None,\n\n _opt => true\n\n ),\n\n (\"features\", bool, None, _opt => \"line-numbers\"),\n\n (\"line-numbers-left-format\", String, None, _opt => \"│{nm:^4}│\".to_string()),\n\n (\"line-numbers-right-format\", String, None, _opt => \"│{np:^4}│\".to_string())\n\n ])\n\n}\n\n\n\n// Aliases for Minus/Plus because Left/Right and PanelSide makes\n\n// more sense in a side-by-side context.\n\npub use crate::minusplus::MinusPlusIndex as PanelSide;\n\npub use MinusPlusIndex::Minus as Left;\n\npub use MinusPlusIndex::Plus as Right;\n\n\n", "file_path": "src/features/side_by_side.rs", "rank": 58, "score": 209838.21068606572 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n let mut feature = diff_highlight::_make_feature(true);\n\n feature.extend(builtin_feature!([\n\n (\n\n \"minus-emph-style\",\n\n String,\n\n Some(\"color.diff-highlight.oldHighlight\"),\n\n _opt => \"bold red 52\"\n\n ),\n\n (\n\n \"plus-emph-style\",\n\n String,\n\n Some(\"color.diff-highlight.newHighlight\"),\n\n _opt => \"bold green 22\"\n\n ),\n\n (\n\n \"file-style\",\n\n String,\n\n Some(\"color.diff.meta\"),\n\n _opt => \"11\"\n", "file_path": "src/features/diff_so_fancy.rs", "rank": 59, "score": 209838.21068606572 }, { "content": "pub fn make_feature() -> Vec<(String, OptionValueFunction)> {\n\n _make_feature(false)\n\n}\n\n\n", "file_path": "src/features/diff_highlight.rs", "rank": 60, "score": 209838.21068606572 }, { "content": "/// Remove initial -/+ character, expand tabs as spaces, and terminate with newline.\n\n// Terminating with newline character is necessary for many of the sublime syntax definitions to\n\n// highlight correctly.\n\n// See https://docs.rs/syntect/3.2.0/syntect/parsing/struct.SyntaxSetBuilder.html#method.add_from_folder\n\npub fn prepare(line: &str, prefix_length: usize, config: &config::Config) -> String {\n\n if !line.is_empty() {\n\n // The prefix contains -/+/space characters, added by git. We removes them now so they\n\n // are not present during syntax highlighting or wrapping. If --keep-plus-minus-markers\n\n // is in effect the prefix is re-inserted in Painter::paint_line.\n\n let line = line.graphemes(true).skip(prefix_length);\n\n format!(\"{}\\n\", expand_tabs(line, config.tab_width))\n\n } else {\n\n \"\\n\".to_string()\n\n }\n\n}\n\n\n", "file_path": "src/paint.rs", "rank": 61, "score": 209283.09626243822 }, { "content": "fn is_git_binary(git: &str) -> bool {\n\n // Ignore case, for e.g. NTFS or APFS file systems\n\n Path::new(git)\n\n .file_stem()\n\n .and_then(|os_str| os_str.to_str())\n\n .map(|s| s.eq_ignore_ascii_case(\"git\"))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/utils/process.rs", "rank": 62, "score": 209113.67783405317 }, { "content": "pub fn format_commit_line_with_osc8_commit_hyperlink<'a>(\n\n line: &'a str,\n\n config: &Config,\n\n) -> Cow<'a, str> {\n\n if let Some(commit_link_format) = &config.hyperlinks_commit_link_format {\n\n COMMIT_LINE_REGEX.replace(line, |captures: &Captures| {\n\n let commit = captures.get(2).unwrap().as_str();\n\n format_osc8_hyperlink(&commit_link_format.replace(\"{commit}\", commit), commit)\n\n })\n\n } else if let Some(GitConfigEntry::GitRemote(repo)) =\n\n config.git_config.as_ref().and_then(get_remote_url)\n\n {\n\n COMMIT_LINE_REGEX.replace(line, |captures: &Captures| {\n\n format_commit_line_captures_with_osc8_commit_hyperlink(captures, &repo)\n\n })\n\n } else {\n\n Cow::from(line)\n\n }\n\n}\n\n\n", "file_path": "src/features/hyperlinks.rs", "rank": 63, "score": 208999.4956637729 }, { "content": "fn parse_config_from_env_var() -> HashMap<String, String> {\n\n if let Ok(s) = env::var(\"GIT_CONFIG_PARAMETERS\") {\n\n parse_config_from_env_var_value(&s)\n\n } else {\n\n HashMap::new()\n\n }\n\n}\n\n\n\nlazy_static! {\n\n static ref GIT_CONFIG_PARAMETERS_REGEX: Regex = Regex::new(\n\n r\"(?x)\n\n (?: # Non-capturing group containing union\n\n '(delta\\.[a-z-]+)=([^']+)' # Git <2.31.0 format\n\n |\n\n '(delta\\.[a-z-]+)'='([^']+)' # Git ≥2.31.0 format\n\n )\n\n \"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/git_config/mod.rs", "rank": 64, "score": 208531.53130266393 }, { "content": "#[allow(dead_code)]\n\npub fn make_config_from_args_and_git_config(\n\n args: &[&str],\n\n git_config_contents: Option<&[u8]>,\n\n git_config_path: Option<&str>,\n\n) -> config::Config {\n\n config::Config::from(make_options_from_args_and_git_config(\n\n args,\n\n git_config_contents,\n\n git_config_path,\n\n ))\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 65, "score": 208443.14002274742 }, { "content": "fn ensure_display_width_1(what: &str, arg: String) -> String {\n\n match arg.grapheme_indices(true).count() {\n\n INLINE_SYMBOL_WIDTH_1 => arg,\n\n width => fatal(format!(\n\n \"Invalid value for {}, display width of \\\"{}\\\" must be {} but is {}\",\n\n what, arg, INLINE_SYMBOL_WIDTH_1, width\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 66, "score": 205247.7239315515 }, { "content": "pub fn color_to_string(color: Color) -> String {\n\n match color {\n\n Color::Fixed(n) if n < 16 => ansi_16_color_number_to_name(n).unwrap().to_string(),\n\n Color::Fixed(n) => format!(\"{}\", n),\n\n Color::RGB(r, g, b) => format!(\"\\\"#{:02x?}{:02x?}{:02x?}\\\"\", r, g, b),\n\n Color::Black => \"black\".to_string(),\n\n Color::Red => \"red\".to_string(),\n\n Color::Green => \"green\".to_string(),\n\n Color::Yellow => \"yellow\".to_string(),\n\n Color::Blue => \"blue\".to_string(),\n\n Color::Purple => \"purple\".to_string(),\n\n Color::Cyan => \"cyan\".to_string(),\n\n Color::White => \"white\".to_string(),\n\n }\n\n}\n\n\n\n// See\n\n// https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit\n\nlazy_static! {\n\n static ref ANSI_16_COLORS: HashMap<&'static str, u8> = {\n", "file_path": "src/color.rs", "rank": 67, "score": 202111.86352972814 }, { "content": "// Remove initial -/+ characters, expand tabs as spaces, retaining ANSI sequences. Terminate with\n\n// newline character.\n\npub fn prepare_raw_line(raw_line: &str, prefix_length: usize, config: &config::Config) -> String {\n\n format!(\n\n \"{}\\n\",\n\n ansi::ansi_preserving_slice(\n\n &expand_tabs(raw_line.graphemes(true), config.tab_width),\n\n prefix_length\n\n ),\n\n )\n\n}\n\n\n", "file_path": "src/paint.rs", "rank": 68, "score": 199368.40013984905 }, { "content": "pub fn delta_unreachable(message: &str) -> ! {\n\n fatal(format!(\n\n \"{} This should not be possible. \\\n\n Please report the bug at https://github.com/dandavison/delta/issues.\",\n\n message\n\n ));\n\n}\n\n\n\n#[cfg(test)]\n\n// Usual length of the header returned by `run_delta()`, often `skip()`-ed.\n\npub const HEADER_LEN: usize = 7;\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use crate::cli;\n\n use crate::tests::integration_test_utils;\n\n use crate::utils::bat::output::PagingMode;\n\n use std::fs::remove_file;\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 69, "score": 198456.30299943362 }, { "content": "fn is_no_syntax_highlighting_syntax_theme_name(theme_name: &str) -> bool {\n\n theme_name.to_lowercase() == \"none\"\n\n}\n\n\n\n/// Return a (theme_name, is_light_mode) tuple.\n\n/// theme_name == None in return value means syntax highlighting is disabled.\n\n///\n\n/// There are two types of color choices that have to be made:\n\n\n", "file_path": "src/options/theme.rs", "rank": 70, "score": 198214.03019727592 }, { "content": "/// Given input like\n\n/// \"--- one.rs\t2019-11-20 06:16:08.000000000 +0100\"\n\n/// Return \"rs\"\n\nfn get_file_extension_from_marker_line(line: &str) -> Option<&str> {\n\n line.split('\\t')\n\n .next()\n\n .and_then(|column| column.split(' ').nth(1))\n\n .and_then(|file| file.split('.').last())\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 71, "score": 197451.34263685695 }, { "content": "fn get_remote_url(git_config: &GitConfig) -> Option<GitConfigEntry> {\n\n git_config\n\n .repo\n\n .as_ref()?\n\n .find_remote(\"origin\")\n\n .ok()?\n\n .url()\n\n .and_then(|url| {\n\n GitRemoteRepo::from_str(url)\n\n .ok()\n\n .map(GitConfigEntry::GitRemote)\n\n })\n\n}\n\n\n", "file_path": "src/features/hyperlinks.rs", "rank": 72, "score": 195040.3119938921 }, { "content": "pub fn _make_feature(bold: bool) -> Vec<(String, OptionValueFunction)> {\n\n let mut feature = raw::make_feature();\n\n feature = feature\n\n .into_iter()\n\n .filter(|(s, _)| s != \"keep-plus-minus-markers\" && s != \"tabs\")\n\n .collect();\n\n feature.extend(builtin_feature!([\n\n (\n\n \"commit-style\",\n\n String,\n\n Some(\"color.diff.commit\"),\n\n _opt => \"raw\"\n\n ),\n\n (\n\n \"minus-style\",\n\n String,\n\n Some(\"color.diff.old\"),\n\n _opt => if bold { \"bold red\" } else { \"red\" }\n\n ),\n\n (\n", "file_path": "src/features/diff_highlight.rs", "rank": 73, "score": 194020.42637381534 }, { "content": "pub fn set_options(\n\n opt: &mut cli::Opt,\n\n git_config: &mut Option<GitConfig>,\n\n arg_matches: &clap::ArgMatches,\n\n assets: HighlightingAssets,\n\n) {\n\n if let Some(git_config) = git_config {\n\n if opt.no_gitconfig {\n\n git_config.enabled = false;\n\n }\n\n set_git_config_entries(opt, git_config);\n\n }\n\n opt.navigate = opt.navigate || env::get_boolean_env_var(\"DELTA_NAVIGATE\");\n\n if opt.syntax_theme.is_none() {\n\n opt.syntax_theme = env::get_env_var(\"BAT_THEME\")\n\n }\n\n\n\n let option_names = cli::Opt::get_argument_and_option_names();\n\n\n\n // Set features\n", "file_path": "src/options/set.rs", "rank": 74, "score": 191874.04190121597 }, { "content": "// Heuristics determining whether to quote string option values when printing values intended for\n\n// git config.\n\nfn format_option_value<S>(s: S) -> String\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let s = s.as_ref();\n\n if s.ends_with(' ')\n\n || s.starts_with(' ')\n\n || s.contains(&['\\\\', '{', '}', ':'][..])\n\n || s.is_empty()\n\n {\n\n format!(\"'{}'\", s)\n\n } else {\n\n s.to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::tests::integration_test_utils;\n\n\n", "file_path": "src/subcommands/show_config.rs", "rank": 75, "score": 191148.08675381192 }, { "content": "fn parse_merge_marker<'a>(line: &'a str, marker: &str) -> Option<&'a str> {\n\n match line.strip_prefix(marker) {\n\n Some(suffix) => {\n\n let suffix = suffix.trim();\n\n if !suffix.is_empty() {\n\n Some(suffix)\n\n } else {\n\n None\n\n }\n\n }\n\n None => None,\n\n }\n\n}\n\n\n\npub use MergeConflictCommit::*;\n\n\n\nimpl<T> Index<MergeConflictCommit> for MergeConflictCommits<T> {\n\n type Output = T;\n\n fn index(&self, commit: MergeConflictCommit) -> &Self::Output {\n\n match commit {\n", "file_path": "src/handlers/merge_conflict.rs", "rank": 76, "score": 190884.83539330334 }, { "content": "fn get_file_extension_from_diff_header_line_file_path(path: &str) -> Option<&str> {\n\n if path.is_empty() || path == \"/dev/null\" {\n\n None\n\n } else {\n\n get_extension(path).map(|ex| ex.trim())\n\n }\n\n}\n\n\n", "file_path": "src/handlers/diff_header.rs", "rank": 77, "score": 190068.57412814273 }, { "content": "pub fn retrieve_less_version() -> Option<usize> {\n\n if let Ok(less_path) = grep_cli::resolve_binary(\"less\") {\n\n let cmd = Command::new(less_path).arg(\"--version\").output().ok()?;\n\n parse_less_version(&cmd.stdout)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/utils/bat/less.rs", "rank": 78, "score": 188290.4506210715 }, { "content": "/// Extract set of 'special decoration attributes' and return it along with modified style string.\n\nfn extract_special_decoration_attributes(style_string: &str) -> (DecorationAttributes, String) {\n\n _extract_special_decoration_attributes(style_string, true)\n\n}\n\n\n", "file_path": "src/parse_style.rs", "rank": 79, "score": 188090.54989996826 }, { "content": "fn parse_width_specifier(width_arg: &str, terminal_width: usize) -> Result<usize, String> {\n\n let width_arg = width_arg.trim();\n\n\n\n let parse = |width: &str, must_be_negative, subexpression| -> Result<isize, String> {\n\n let remove_spaces = |s: &str| s.chars().filter(|c| c != &' ').collect::<String>();\n\n match remove_spaces(width).parse() {\n\n Ok(val) if must_be_negative && val > 0 => Err(()),\n\n Err(_) => Err(()),\n\n Ok(ok) => Ok(ok),\n\n }\n\n .map_err(|_| {\n\n let pos = if must_be_negative { \" negative\" } else { \"n\" };\n\n let subexpr = if subexpression {\n\n format!(\" (from {:?})\", width_arg)\n\n } else {\n\n \"\".into()\n\n };\n\n format!(\n\n \"{:?}{subexpr} is not a{pos} integer\",\n\n width,\n", "file_path": "src/options/set.rs", "rank": 80, "score": 188081.58313713563 }, { "content": "fn parse_paging_mode(paging_mode_string: &str) -> PagingMode {\n\n match paging_mode_string.to_lowercase().as_str() {\n\n \"always\" => PagingMode::Always,\n\n \"never\" => PagingMode::Never,\n\n \"auto\" => PagingMode::QuitIfOneScreen,\n\n _ => {\n\n fatal(format!(\n\n \"Invalid value for --paging option: {} (valid values are \\\"always\\\", \\\"never\\\", and \\\"auto\\\")\",\n\n paging_mode_string\n\n ));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/options/set.rs", "rank": 81, "score": 185741.37793288307 }, { "content": "fn paint_text(text_style: Style, text: &str, addendum: &str) -> String {\n\n if addendum.is_empty() {\n\n text_style.paint(text).to_string()\n\n } else {\n\n text_style\n\n .paint(text.to_string() + \" (\" + addendum + \")\")\n\n .to_string()\n\n }\n\n}\n\n\n\npub type DrawFunction = dyn FnMut(\n\n &mut dyn Write,\n\n &str,\n\n &str,\n\n &str,\n\n &Width,\n\n Style,\n\n ansi_term::Style,\n\n) -> std::io::Result<()>;\n\n\n", "file_path": "src/handlers/draw.rs", "rank": 82, "score": 185438.08389114722 }, { "content": "// The resulting vector is never empty\n\npub fn parse_line_number_format<'a>(\n\n format_string: &'a str,\n\n placeholder_regex: &Regex,\n\n mut prefix_with_space: bool,\n\n) -> FormatStringData<'a> {\n\n let mut format_data = Vec::new();\n\n let mut offset = 0;\n\n\n\n let mut expand_first_prefix = |prefix: SmolStr| {\n\n // Only prefix the first placeholder with a space, also see `UseFullPanelWidth`\n\n if prefix_with_space {\n\n let prefix = SmolStr::new(format!(\"{}{}\", ODD_PAD_CHAR, prefix));\n\n prefix_with_space = false;\n\n prefix\n\n } else {\n\n prefix\n\n }\n\n };\n\n\n\n for captures in placeholder_regex.captures_iter(format_string) {\n", "file_path": "src/format.rs", "rank": 83, "score": 184199.6525942419 }, { "content": "fn make_blame_palette(blame_palette: Option<String>, is_light_mode: bool) -> Vec<String> {\n\n match (blame_palette, is_light_mode) {\n\n (Some(string), _) => string\n\n .split_whitespace()\n\n .map(|s| s.to_owned())\n\n .collect::<Vec<String>>(),\n\n (None, true) => color::LIGHT_THEME_BLAME_PALETTE\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect::<Vec<String>>(),\n\n (None, false) => color::DARK_THEME_BLAME_PALETTE\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect::<Vec<String>>(),\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 84, "score": 183407.7228928146 }, { "content": "fn format_commit_line_captures_with_osc8_commit_hyperlink(\n\n captures: &Captures,\n\n repo: &GitRemoteRepo,\n\n) -> String {\n\n let commit = captures.get(2).unwrap().as_str();\n\n format!(\n\n \"{prefix}{osc}8;;{url}{st}{commit}{osc}8;;{st}{suffix}\",\n\n url = repo.format_commit_url(commit),\n\n commit = commit,\n\n prefix = captures.get(1).map(|m| m.as_str()).unwrap_or(\"\"),\n\n suffix = captures.get(3).unwrap().as_str(),\n\n osc = \"\\x1b]\",\n\n st = \"\\x1b\\\\\"\n\n )\n\n}\n\n\n\n#[cfg(not(target_os = \"windows\"))]\n\n#[cfg(test)]\n\npub mod tests {\n\n use std::iter::FromIterator;\n", "file_path": "src/features/hyperlinks.rs", "rank": 85, "score": 183042.56452975137 }, { "content": "fn _make_options_from_args_and_git_config(\n\n args: &[&str],\n\n git_config_contents: Option<&[u8]>,\n\n git_config_path: Option<&str>,\n\n honor_env_var: bool,\n\n) -> cli::Opt {\n\n let mut args: Vec<&str> = itertools::chain(&[\"/dev/null\", \"/dev/null\"], args)\n\n .map(|s| *s)\n\n .collect();\n\n let git_config = match (git_config_contents, git_config_path) {\n\n (Some(contents), Some(path)) => Some(make_git_config(contents, path, honor_env_var)),\n\n _ => {\n\n args.push(\"--no-gitconfig\");\n\n None\n\n }\n\n };\n\n cli::Opt::from_iter_and_git_config(args, git_config)\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 86, "score": 181766.71084649762 }, { "content": "#[cfg(not(test))]\n\npub fn calling_process() -> MutexGuard<'static, CallingProcess> {\n\n let (caller_mutex, determine_done) = &**CALLER;\n\n\n\n determine_done\n\n .wait_while(caller_mutex.lock().unwrap(), |caller| {\n\n *caller == CallingProcess::Pending\n\n })\n\n .unwrap()\n\n}\n\n\n\n// The return value is duck-typed to work in place of a MutexGuard when testing.\n", "file_path": "src/utils/process.rs", "rank": 87, "score": 180949.36647176702 }, { "content": "// Look up a value of type `T` associated with `option name`. The search rules are:\n\n//\n\n// 1. If there is a value associated with `option_name` in the main [delta] git config\n\n// section, then stop searching and return that value.\n\n//\n\n// 2. For each feature in the ordered list of enabled features:\n\n//\n\n// 2.1 Look-up the value, treating `feature` as a custom feature.\n\n// I.e., if there is a value associated with `option_name` in a git config section\n\n// named [delta \"`feature`\"] then stop searching and return that value.\n\n//\n\n// 2.2 Look-up the value, treating `feature` as a builtin feature.\n\n// I.e., if there is a value (not a default value) associated with `option_name` in a\n\n// builtin feature named `feature`, then stop searching and return that value.\n\n// Otherwise, record the default value and continue searching.\n\n//\n\n// 3. Return the last default value that was encountered.\n\npub fn get_option_value<T>(\n\n option_name: &str,\n\n builtin_features: &HashMap<String, features::BuiltinFeature>,\n\n opt: &cli::Opt,\n\n git_config: &mut Option<git_config::GitConfig>,\n\n) -> Option<T>\n\nwhere\n\n T: GitConfigGet,\n\n T: GetOptionValue,\n\n T: From<OptionValue>,\n\n T: Into<OptionValue>,\n\n{\n\n T::get_option_value(option_name, builtin_features, opt, git_config)\n\n}\n\n\n\nlazy_static! {\n\n static ref GIT_CONFIG_THEME_REGEX: Regex = Regex::new(r\"^delta\\.(.+)\\.(light|dark)$\").unwrap();\n\n}\n\n\n", "file_path": "src/options/get.rs", "rank": 88, "score": 180161.31202688842 }, { "content": "pub fn get_line_of_code_from_delta(\n\n input: &str,\n\n line_number: usize,\n\n expected_text: &str,\n\n config: &config::Config,\n\n) -> String {\n\n let output = run_delta(&input, config);\n\n let line_of_code = output.lines().nth(line_number).unwrap();\n\n assert!(ansi::strip_ansi_codes(line_of_code) == expected_text);\n\n line_of_code.to_string()\n\n}\n\n\n", "file_path": "src/tests/integration_test_utils.rs", "rank": 89, "score": 177690.518026879 }, { "content": "// Construct the regexp used by less for paging, if --show-themes or --navigate is enabled.\n\npub fn make_navigate_regex(\n\n show_themes: bool,\n\n file_modified_label: &str,\n\n file_added_label: &str,\n\n file_removed_label: &str,\n\n file_renamed_label: &str,\n\n hunk_label: &str,\n\n) -> String {\n\n if show_themes {\n\n \"^Theme:\".to_string()\n\n } else {\n\n let optional_regexp = |find: &str| {\n\n if !find.is_empty() {\n\n format!(\"|{}\", regex::escape(find))\n\n } else {\n\n \"\".to_string()\n\n }\n\n };\n\n format!(\n\n \"^(commit{}{}{}{}{})\",\n\n optional_regexp(file_added_label),\n\n optional_regexp(file_removed_label),\n\n optional_regexp(file_renamed_label),\n\n optional_regexp(file_modified_label),\n\n optional_regexp(hunk_label),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/features/navigate.rs", "rank": 90, "score": 176897.55368702722 }, { "content": "pub fn format_blame_metadata(\n\n format_data: &[format::FormatStringPlaceholderData],\n\n blame: &BlameLine,\n\n config: &config::Config,\n\n) -> String {\n\n let mut s = String::new();\n\n let mut suffix = \"\";\n\n for placeholder in format_data {\n\n s.push_str(placeholder.prefix.as_str());\n\n\n\n let alignment_spec = placeholder.alignment_spec.unwrap_or(format::Align::Left);\n\n let width = placeholder.width.unwrap_or(15);\n\n\n\n let field = match placeholder.placeholder {\n\n Some(Placeholder::Str(\"timestamp\")) => Some(Cow::from(\n\n chrono_humanize::HumanTime::from(blame.time).to_string(),\n\n )),\n\n Some(Placeholder::Str(\"author\")) => Some(Cow::from(blame.author)),\n\n Some(Placeholder::Str(\"commit\")) => Some(delta::format_raw_line(blame.commit, config)),\n\n None => None,\n", "file_path": "src/handlers/blame.rs", "rank": 91, "score": 176796.1279482115 }, { "content": "/// Interpret `color_string` as a color specifier and return it painted accordingly.\n\npub fn paint_color_string<'a>(\n\n color_string: &'a str,\n\n true_color: bool,\n\n git_config: Option<&GitConfig>,\n\n) -> ansi_term::ANSIGenericString<'a, str> {\n\n if let Some(color) = color::parse_color(color_string, true_color, git_config) {\n\n let style = ansi_term::Style {\n\n background: Some(color),\n\n ..ansi_term::Style::default()\n\n };\n\n style.paint(color_string)\n\n } else {\n\n ansi_term::ANSIGenericString::from(color_string)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Style {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if self.is_raw {\n\n return write!(f, \"raw\");\n", "file_path": "src/style.rs", "rank": 92, "score": 176462.28898345894 }, { "content": "/// Given input like\n\n/// \"@@ -74,15 +74,14 @@ pub fn delta(\"\n\n/// Return \" pub fn delta(\" and a vector of (line_number, hunk_length) tuples.\n\nfn parse_hunk_header(line: &str) -> Option<ParsedHunkHeader> {\n\n if let Some(caps) = HUNK_HEADER_REGEX.captures(line) {\n\n let file_coordinates = &caps[1];\n\n let line_numbers_and_hunk_lengths = HUNK_HEADER_FILE_COORDINATE_REGEX\n\n .captures_iter(file_coordinates)\n\n .map(|caps| {\n\n (\n\n caps[1].parse::<usize>().unwrap(),\n\n caps.get(2)\n\n .map(|m| m.as_str())\n\n // Per the specs linked above, if the hunk length is absent then it is 1.\n\n .unwrap_or(\"1\")\n\n .parse::<usize>()\n\n .unwrap(),\n\n )\n\n })\n\n .collect();\n\n let code_fragment = caps[2].to_string();\n\n Some(ParsedHunkHeader {\n\n code_fragment,\n\n line_numbers_and_hunk_lengths,\n\n })\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/handlers/hunk_header.rs", "rank": 93, "score": 175536.47832015925 }, { "content": "/// Convert 8-bit ANSI code to #RGBA string with ANSI code in red channel and 0 in alpha channel.\n\n// See https://github.com/sharkdp/bat/pull/543\n\npub fn syntect_color_from_ansi_number(n: u8) -> Option<Color> {\n\n Color::from_str(&format!(\"#{:02x}000000\", n)).ok()\n\n}\n\n\n", "file_path": "src/utils/syntect.rs", "rank": 94, "score": 175460.90013426152 }, { "content": "pub fn line_has_style_other_than(line: &str, styles: &[Style]) -> bool {\n\n if !ansi::string_starts_with_ansi_style_sequence(line) {\n\n return false;\n\n }\n\n for style in styles {\n\n if style.is_applied_to(line) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use super::*;\n\n\n\n // To add to these tests:\n\n // 1. Stage a file with a single line containing the string \"text\"\n\n // 2. git -c 'color.diff.new = $STYLE_STRING' diff --cached --color=always | cat -A\n", "file_path": "src/style.rs", "rank": 95, "score": 174138.88501957452 }, { "content": "pub fn parse_blame_line_numbers(arg: &str) -> BlameLineNumbers {\n\n if arg == \"none\" {\n\n return BlameLineNumbers::On(crate::format::FormatStringSimple::only_string(\"│\"));\n\n }\n\n\n\n let regex = make_placeholder_regex(&[\"n\"]);\n\n let f = match parse_line_number_format(arg, &regex, false) {\n\n v if v.len() > 1 => {\n\n fatal(\"Too many format arguments numbers for blame-line-numbers\".to_string())\n\n }\n\n mut v => v.pop().unwrap(),\n\n };\n\n\n\n let set_defaults = |mut format: crate::format::FormatStringSimple| {\n\n format.width = format.width.or(Some(4));\n\n format.alignment_spec = format.alignment_spec.or(Some(crate::format::Align::Center));\n\n\n\n format\n\n };\n\n\n", "file_path": "src/handlers/blame.rs", "rank": 96, "score": 173931.02974196343 }, { "content": "/// Return current working directory of the user's shell process. I.e. the directory which they are\n\n/// in when delta exits. This is the directory relative to which the file paths in delta output are\n\n/// constructed if they are using either (a) delta's relative-paths option or (b) git's --relative\n\n/// flag.\n\npub fn cwd_of_user_shell_process(\n\n cwd_of_delta_process: Option<&PathBuf>,\n\n cwd_relative_to_repo_root: Option<&str>,\n\n) -> Option<PathBuf> {\n\n match (cwd_of_delta_process, cwd_relative_to_repo_root) {\n\n (Some(cwd), None) => {\n\n // We are not a child process of git\n\n Some(PathBuf::from(cwd))\n\n }\n\n (Some(repo_root), Some(cwd_relative_to_repo_root)) => {\n\n // We are a child process of git; git spawned us from repo_root and preserved the user's\n\n // original cwd in the GIT_PREFIX env var (available as config.cwd_relative_to_repo_root)\n\n Some(PathBuf::from(repo_root).join(cwd_relative_to_repo_root))\n\n }\n\n (None, _) => {\n\n // Unexpected\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils/path.rs", "rank": 97, "score": 173173.30667015899 }, { "content": "pub fn format_blame_line_number(\n\n format: &BlameLineNumbers,\n\n line_number: usize,\n\n is_repeat: bool,\n\n) -> (&str, String, &str) {\n\n let (format, empty) = match &format {\n\n BlameLineNumbers::PerBlock(format) => (format, is_repeat),\n\n BlameLineNumbers::Every(n, format) => (format, is_repeat && line_number % n != 0),\n\n BlameLineNumbers::On(format) => (format, false),\n\n };\n\n let mut result = String::new();\n\n\n\n // depends on defaults being set when parsing arguments\n\n let line_number = if format.width.is_some() {\n\n format::pad(\n\n line_number,\n\n format.width.unwrap(),\n\n format.alignment_spec.unwrap(),\n\n None,\n\n )\n", "file_path": "src/handlers/blame.rs", "rank": 98, "score": 173022.18838386514 }, { "content": "pub fn describe_calling_process(args: &[String]) -> ProcessArgs<CallingProcess> {\n\n let mut args = args.iter().map(|s| s.as_str());\n\n\n\n fn is_any_of<'a, I>(cmd: Option<&str>, others: I) -> bool\n\n where\n\n I: IntoIterator<Item = &'a str>,\n\n {\n\n cmd.map(|cmd| others.into_iter().any(|o| o.eq_ignore_ascii_case(cmd)))\n\n .unwrap_or(false)\n\n }\n\n\n\n match args.next() {\n\n Some(command) => match Path::new(command).file_stem() {\n\n Some(s) if s.to_str().map(is_git_binary).unwrap_or(false) => {\n\n let mut args = args.skip_while(|s| {\n\n *s != \"diff\" && *s != \"show\" && *s != \"log\" && *s != \"reflog\" && *s != \"grep\"\n\n });\n\n match args.next() {\n\n Some(\"diff\") => {\n\n ProcessArgs::Args(CallingProcess::GitDiff(parse_command_line(args)))\n", "file_path": "src/utils/process.rs", "rank": 99, "score": 170002.82734147168 } ]
Rust
src/frame.rs
Inner-Heaven/libwhisper-rs
26e7331fd5b4ab2c1410ab46f738208b48a6aa7b
use bytes::{BufMut, Bytes, BytesMut}; use errors::{WhisperError, WhisperResult}; use nom::{IResult, rest}; use sodiumoxide::crypto::box_::{Nonce, PublicKey}; pub static HEADER_SIZE: usize = 57; #[derive(Debug, Clone, PartialEq, Copy, Eq, Hash)] pub enum FrameKind { Hello = 1, Welcome, Initiate, Ready, Request, Response, Notification, Termination, } impl FrameKind { pub fn from(kind: u8) -> Option<FrameKind> { match kind { 1 => Some(FrameKind::Hello), 2 => Some(FrameKind::Welcome), 3 => Some(FrameKind::Initiate), 4 => Some(FrameKind::Ready), 5 => Some(FrameKind::Request), 6 => Some(FrameKind::Response), 7 => Some(FrameKind::Notification), 255 => Some(FrameKind::Termination), _ => None, } } pub fn from_slice(kind: &[u8]) -> Option<FrameKind> { if kind.len() != 1 { return None; } FrameKind::from(kind[0]) } } #[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct Frame { pub id: PublicKey, pub nonce: Nonce, pub kind: FrameKind, pub payload: Bytes, } impl Frame { pub fn length(&self) -> usize { HEADER_SIZE + self.payload.len() } pub fn pack_to_buf(&self, buf: &mut BytesMut) { buf.reserve(self.length()); buf.extend_from_slice(&self.id.0); buf.extend_from_slice(&self.nonce.0); buf.put_u8(self.kind as u8); buf.extend_from_slice(&self.payload); } pub fn pack(&self) -> Bytes { let mut frame = BytesMut::with_capacity(self.length()); self.pack_to_buf(&mut frame); frame.freeze() } pub fn from_slice(i: &[u8]) -> WhisperResult<Frame> { match parse_frame(i) { IResult::Done(_, frame) => Ok(frame), IResult::Incomplete(_) => Err(WhisperError::IncompleteFrame), IResult::Error(_) => Err(WhisperError::BadFrame), } } } named!(parse_frame < &[u8], Frame >, do_parse!( pk: map_opt!(take!(32), PublicKey::from_slice) >> nonce: map_opt!(take!(24), Nonce::from_slice) >> kind: map_opt!(take!(1), FrameKind::from_slice) >> payload: rest >> ({ let mut vec = Vec::with_capacity(payload.len()); vec.extend(payload.iter().cloned()); Frame { id: pk, nonce: nonce, kind: kind, payload: vec.into() } }) ) ); #[cfg(test)] mod test { use super::*; use errors::WhisperError; use sodiumoxide::crypto::box_::{gen_keypair, gen_nonce}; #[test] fn pack_and_unpack() { let frame = make_frame(); let packed_frame = frame.pack(); assert_eq!(packed_frame.len(), 60); let parsed_frame = Frame::from_slice(&packed_frame); assert_eq!(frame, parsed_frame.unwrap()); } #[test] fn frame_kind_from_slice() { let hello = FrameKind::from_slice(&[1]).unwrap(); let welcome = FrameKind::from_slice(&[2]).unwrap(); let initiate = FrameKind::from_slice(&[3]).unwrap(); let ready = FrameKind::from_slice(&[4]).unwrap(); let request = FrameKind::from_slice(&[5]).unwrap(); let response = FrameKind::from_slice(&[6]).unwrap(); let notification = FrameKind::from_slice(&[7]).unwrap(); let termination = FrameKind::from_slice(&[255]).unwrap(); let bad = FrameKind::from_slice(&[100]); let none = FrameKind::from_slice(&[]); assert_eq!(hello, FrameKind::Hello); assert_eq!(welcome, FrameKind::Welcome); assert_eq!(initiate, FrameKind::Initiate); assert_eq!(ready, FrameKind::Ready); assert_eq!(request, FrameKind::Request); assert_eq!(response, FrameKind::Response); assert_eq!(notification, FrameKind::Notification); assert_eq!(termination, FrameKind::Termination); assert!(bad.is_none()); assert!(none.is_none()); } #[test] fn malformed_frame() { let packed_frame = vec![1 as u8, 2, 3]; let parsed_frame = Frame::from_slice(&packed_frame); assert_eq!(parsed_frame.is_err(), true); let err = parsed_frame.err().unwrap(); let mut is_incomplete = false; if let WhisperError::IncompleteFrame = err { is_incomplete = true; } assert!(is_incomplete); } #[test] fn bad_frame() { let bad_frame = b"\x85\x0f\xc2?\xce\x80f\x16\xec8\x04\xc7{5\x98\xa7u<\xa5y\xda\x12\xfe\xad\xdc^%[\x8ap\xfa7q.-)\xe4V\xec\x94\xb2\x7f\r\x9a\x91\xc7\xcd\x08\xa4\xee\xbfbpH\x07%\r\0\0\0"; let result = Frame::from_slice(&bad_frame[0..59]); assert!(result.is_err()); let err = result.err().unwrap(); let mut is_bad = false; if let WhisperError::BadFrame = err { is_bad = true; } assert!(is_bad); } fn make_frame() -> Frame { let (pk, _) = gen_keypair(); let payload = vec![0, 0, 0]; let nonce = gen_nonce(); Frame { id: pk, nonce: nonce, kind: FrameKind::Hello, payload: payload.into(), } } }
use bytes::{BufMut, Bytes, BytesMut}; use errors::{WhisperError, WhisperResult}; use nom::{IResult, rest}; use sodiumoxide::crypto::box_::{Nonce, PublicKey}; pub static HEADER_SIZE: usize = 57; #[derive(Debug, Clone, PartialEq, Copy, Eq, Hash)] pub enum FrameKind { Hello = 1, Welcome, Initiate, Ready, Request, Response, Notification, Termination, } impl FrameKind { pub fn from(kind: u8) -> Option<FrameKind> { match kind { 1 => Some(FrameKind::Hello), 2 => Some(FrameKind::Welcome), 3 => Some(FrameKind::Initiate), 4 => Some(FrameKind::Ready), 5 => Some(FrameKind::Request), 6 => Some(FrameKind::Response), 7 => Some(FrameKind::Notification), 255 => Some(FrameKind::Termination), _ => None, } } pub fn from_slice(kind: &[u8]) -> Option<FrameKind> { if kind.len() != 1 { return None; } FrameKind::from(kind[0]) } } #[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct Frame { pub id: PublicKey, pub nonce: Nonce, pub kind: FrameKind, pub payload: Bytes, } impl Frame { pub fn length(&self) -> usize { HEADER_SIZE + self.payload.len() } pub fn pack_to_buf(&self, buf: &mut BytesMut) { buf.reserve(self.length()); buf.extend_from_slice(&self.id.0); buf.extend_from_slice(&self.nonce.0); buf.put_u8(self.kind as u8); buf.extend_from_slice(&self.payload); } pub fn pack(&self) -> Bytes { let mut frame = BytesMut::with_capacity(self.length()); self.pack_to_buf(&mut frame); frame.freeze() } pub fn from_slice(i: &[u8]) -> WhisperResult<Frame> { match parse_frame(i) { IResult::Done(_, frame) => Ok(frame), IResult::Incomplete(_) => Err(WhisperError::IncompleteFrame), IResult::Error(_) => Err(WhisperError::BadFrame), } } } named!(parse_frame < &[u8], Frame >, do_parse!( pk: map_opt!(take!(32), PublicKey::from_slice) >> nonce: map_opt!(take!(24), Nonce::from_slice) >> kind: map_opt!(take!(1), FrameKind::from_slice) >> payload: rest >> ({ let mut vec = Vec::with_capacity(payload.len()); vec.extend(payload.iter().cloned()); Frame { id: pk, nonce: nonce, kind: kind, payload: vec.into() } }) ) ); #[cfg(test)] mod test { use super::*; use errors::WhisperError; use sodiumoxide::crypto::box_::{gen_keypair, gen_nonce}; #[test] fn pack_and_unpack() { let frame = make_frame(); let packed_frame = frame.pack(); assert_eq!(packed_frame.len(), 60); let parsed_frame = Frame::from_slice(&packed_frame); assert_eq!(frame, parsed_frame.unwrap()); } #[test] fn frame_kind_from_slice() { let hello = FrameKind::from_slice(&[1]).unwrap(); let welcome = FrameKind::from_slice(&[2]).unwrap(); let initiate = FrameKind::from_slice(&[3]).unwrap(); let ready = FrameKind::from_slice(&[4]).unwrap(); let request = FrameKind::from_slice(&[5]).unwrap(); let response = FrameKind::from_slice(&[6]).unwrap(); let notification = FrameKind::from_slice(&[7]).unwrap(); let termination = FrameKind::from_slice(&[255]).unwrap(); let bad = FrameKind::from_slice(&[100]); let none = FrameKind::from_slice(&[]); assert_eq!(hello, FrameKind::Hello); assert_eq!(welcome, FrameKind::Welcome); assert_eq!(initiate, FrameKind::Initiate); assert_eq!(ready, FrameKind::Ready); assert_eq!(request, FrameKind::Request); assert_eq!(response, FrameKind::Response); assert_eq!(notification, FrameKind::Notification); assert_eq!(termination, FrameKind::Termination); assert!(bad.is_none()); assert!(none.is_none()); } #[test] fn malformed_frame() { let packed_frame = vec![1 as u8, 2, 3]; let parsed_frame = Frame::from_slice(&packed_frame); assert_eq!(parsed_frame.is_err(), true); let err = parsed_frame.err().unwrap(); let mut is_incomplete = false; if let WhisperError::IncompleteFrame = err {
nonce: nonce, kind: FrameKind::Hello, payload: payload.into(), } } }
is_incomplete = true; } assert!(is_incomplete); } #[test] fn bad_frame() { let bad_frame = b"\x85\x0f\xc2?\xce\x80f\x16\xec8\x04\xc7{5\x98\xa7u<\xa5y\xda\x12\xfe\xad\xdc^%[\x8ap\xfa7q.-)\xe4V\xec\x94\xb2\x7f\r\x9a\x91\xc7\xcd\x08\xa4\xee\xbfbpH\x07%\r\0\0\0"; let result = Frame::from_slice(&bad_frame[0..59]); assert!(result.is_err()); let err = result.err().unwrap(); let mut is_bad = false; if let WhisperError::BadFrame = err { is_bad = true; } assert!(is_bad); } fn make_frame() -> Frame { let (pk, _) = gen_keypair(); let payload = vec![0, 0, 0]; let nonce = gen_nonce(); Frame { id: pk,
random
[ { "content": "/// In order to make libsodium threadsafe you must call this function before using any of it's andom number generation functions.\n\n/// It's safe to call this method more than once and from more than one thread.\n\npub fn init() -> WhisperResult<()> {\n\n if sodiumoxide::init() {\n\n Ok(())\n\n } else {\n\n Err(WhisperError::InitializationFailed)\n\n }\n\n}", "file_path": "src/crypto.rs", "rank": 0, "score": 46253.120398151805 }, { "content": " let payload = box_::seal(&NULL_BYTES,\n\n &nonce,\n\n &self.remote_identity_key,\n\n &self.local_session_keypair.secret_key);\n\n Frame {\n\n id: self.local_session_keypair.public_key,\n\n nonce: nonce,\n\n kind: FrameKind::Hello,\n\n payload: payload.into(),\n\n }\n\n }\n\n\n\n /// Helper to make am Initiate frame, a reply to Welcome frame. Client\n\n /// workflow.\n\n pub fn make_initiate(&mut self, welcome: &Frame) -> WhisperResult<Frame> {\n\n if self.state != SessionState::Initiated || welcome.kind != FrameKind::Welcome {\n\n return Err(WhisperError::InvalidSessionState);\n\n }\n\n // Try to obtain server short public key from the box.\n\n if let Ok(server_pk) = box_::open(&welcome.payload,\n", "file_path": "src/session.rs", "rank": 12, "score": 27.24736458159115 }, { "content": "//! implementation of that is not part of the protocol.\n\n\n\n\n\nuse bytes::Bytes;\n\nuse chrono::{DateTime, Duration};\n\nuse chrono::offset::Utc;\n\nuse errors::{WhisperError, WhisperResult};\n\nuse sodiumoxide::crypto::box_;\n\nuse sodiumoxide::crypto::box_::{Nonce, PrecomputedKey, PublicKey};\n\n\n\nuse frame::{Frame, FrameKind};\n\nuse crypto::KeyPair;\n\n\n\n/// Array of null bytes used in Hello package. Needs to be bigger than Welcome\n\n/// frame to prevent amplification attacks. Maybe, 256 is too much...who knows?\n\npub static NULL_BYTES: [u8; 256] = [b'\\x00'; 256];\n\n/// Payload \"server\" side supposed to send to client when.\n\npub static READY_PAYLOAD: &'static [u8; 16] = b\"My body is ready\";\n\n\n\n/// How much time client and server have to agree on shared secret.\n", "file_path": "src/session.rs", "rank": 13, "score": 24.859816599856224 }, { "content": " self.state = SessionState::Error;\n\n return Err(WhisperError::InvalidHelloFrame);\n\n }\n\n\n\n self.state = SessionState::Initiated;\n\n\n\n let nonce = box_::gen_nonce();\n\n let welcome_box = box_::seal(self.local_session_keypair.public_key.as_ref(),\n\n &nonce,\n\n &hello.id,\n\n &self.local_identity_keypair.secret_key);\n\n\n\n let welcome_frame = Frame {\n\n // Server uses client id in reply.\n\n id: hello.id,\n\n nonce: nonce,\n\n kind: FrameKind::Welcome,\n\n payload: welcome_box.into(),\n\n };\n\n Ok(welcome_frame)\n", "file_path": "src/session.rs", "rank": 14, "score": 23.01814876706191 }, { "content": " &welcome.nonce,\n\n &self.remote_identity_key,\n\n &self.local_session_keypair.secret_key)\n\n {\n\n if let Some(key) = PublicKey::from_slice(&server_pk) {\n\n self.remote_session_key = Some(key);\n\n let mut initiate_box = Vec::with_capacity(104);\n\n initiate_box.extend_from_slice(&self.local_identity_keypair.public_key.0);\n\n initiate_box.extend(self.make_vouch());\n\n let nonce = box_::gen_nonce();\n\n let payload = box_::seal(&initiate_box,\n\n &nonce,\n\n &self.remote_session_key.expect(\"Shit is on fire yo\"),\n\n &self.local_session_keypair.secret_key);\n\n let frame = Frame {\n\n id: welcome.id,\n\n nonce: nonce,\n\n kind: FrameKind::Initiate,\n\n payload: payload.into(),\n\n };\n", "file_path": "src/session.rs", "rank": 15, "score": 22.012496668520438 }, { "content": " remote_session_key: remote_session_key,\n\n remote_identity_key: None,\n\n state: SessionState::Fresh,\n\n }\n\n }\n\n /// Helper to make a Welcome frame, a reply to Hello frame. Server worflow.\n\n pub fn make_welcome(&mut self, hello: &Frame) -> WhisperResult<Frame> {\n\n if self.state != SessionState::Fresh || hello.kind != FrameKind::Hello {\n\n return Err(WhisperError::InvalidSessionState);\n\n }\n\n // Verify content of the box\n\n if let Ok(payload) = box_::open(&hello.payload,\n\n &hello.nonce,\n\n &hello.id,\n\n &self.local_identity_keypair.secret_key)\n\n {\n\n // We're not going to verify that box content itself, but will verify it's\n\n // length since\n\n // that is what matters the most.\n\n if payload.len() != 256 {\n", "file_path": "src/session.rs", "rank": 16, "score": 21.897650188891056 }, { "content": " assert_eq!(client_established_session.session_state(),\n\n SessionState::Ready);\n\n assert_eq!(client_session.session_state(), SessionState::Ready);\n\n }\n\n\n\n #[test]\n\n fn test_ping_pong() {\n\n let (client, server) = handshake();\n\n\n\n let ping_bytes = b\"ping\";\n\n let ping = client.make_request(ping_bytes).unwrap();\n\n assert_eq!(ping.kind, FrameKind::Request);\n\n let ping_payload = server.read_msg(&ping).unwrap();\n\n assert_eq!(&ping_payload.as_ref(), &ping_bytes);\n\n\n\n let pong_bytes = b\"pong\";\n\n let pong = server.make_response(pong_bytes).unwrap();\n\n assert_eq!(pong.kind, FrameKind::Response);\n\n let pong_payload = client.read_msg(&pong).unwrap();\n\n assert_eq!(&pong_payload.as_ref(), &pong_bytes);\n\n\n\n let score = server.make_notification(b\"Player B Scored\").unwrap();\n\n\n\n assert_eq!(score.kind, FrameKind::Notification);\n\n }\n\n}\n", "file_path": "src/session.rs", "rank": 17, "score": 21.53947679876411 }, { "content": " init().unwrap();\n\n let client_identity_keypair = KeyPair::new();\n\n let server_identity_keypair = KeyPair::new();\n\n\n\n let mut client_session =\n\n ClientSession::new(client_identity_keypair.clone(),\n\n server_identity_keypair.public_key.clone());\n\n let mut server_session = ServerSession::new(server_identity_keypair.clone(), client_session.id().clone());\n\n assert_eq!(client_session.state, SessionState::Fresh);\n\n assert_eq!(server_session.state, SessionState::Fresh);\n\n assert_eq!(client_session.id(), server_session.id());\n\n\n\n let hello_frame = client_session.make_hello();\n\n assert_eq!(hello_frame.kind, FrameKind::Hello);\n\n assert_eq!(client_session.state, SessionState::Initiated);\n\n\n\n let welcome_frame =\n\n server_session.make_welcome(&hello_frame)\n\n .expect(\"Failed to create welcome!\");\n\n assert_eq!(server_session.state, SessionState::Initiated);\n", "file_path": "src/session.rs", "rank": 18, "score": 20.920837063630813 }, { "content": " Ok(frame)\n\n } else {\n\n self.state = SessionState::Error;\n\n\n\n return Err(WhisperError::InvalidWelcomeFrame);\n\n }\n\n } else {\n\n self.state = SessionState::Error;\n\n return Err(WhisperError::DecryptionFailed);\n\n }\n\n }\n\n /// Verify that reply to initiate frame is correct ready frame. Changes\n\n /// session state if so.\n\n pub fn read_ready(&mut self, ready: &Frame) -> WhisperResult<EstablishedSession> {\n\n if self.state != SessionState::Initiated || ready.kind != FrameKind::Ready {\n\n return Err(WhisperError::InvalidSessionState);\n\n }\n\n // This can never fail when used properly.\n\n let session = EstablishedSession::new(self.remote_session_key.unwrap().clone(),\n\n self.local_session_keypair.clone());\n", "file_path": "src/session.rs", "rank": 19, "score": 20.598848356958456 }, { "content": " pub fn make_ready(&mut self,\n\n initiate: &Frame,\n\n client_identity_key: &PublicKey)\n\n -> WhisperResult<(EstablishedSession, Frame)> {\n\n if self.state != SessionState::Initiated || initiate.kind != FrameKind::Initiate {\n\n return Err(WhisperError::InvalidSessionState);\n\n }\n\n\n\n // If client spend more than 3 minutes to come up with initiate - fuck him.\n\n let duration_since = Utc::now().signed_duration_since(self.created_at);\n\n if duration_since > Duration::minutes(HANDSHAKE_DURATION) {\n\n return Err(WhisperError::ExpiredSession);\n\n }\n\n self.state = SessionState::Ready;\n\n self.remote_identity_key = Some(*client_identity_key);\n\n\n\n let session = EstablishedSession::new(self.remote_session_key.clone(),\n\n self.local_session_keypair.clone());\n\n let (nonce, payload) = session.seal_msg(READY_PAYLOAD);\n\n let frame = Frame {\n", "file_path": "src/session.rs", "rank": 20, "score": 20.231318801029275 }, { "content": " let msg = session.read_msg(ready)?;\n\n if msg.as_ref() == READY_PAYLOAD {\n\n self.state = SessionState::Ready;\n\n Ok(session)\n\n } else {\n\n Err(WhisperError::InvalidReadyFrame)\n\n }\n\n }\n\n // Helper to make a vouch\n\n fn make_vouch(&self) -> Vec<u8> {\n\n let nonce = box_::gen_nonce();\n\n let our_sk = &self.local_identity_keypair.secret_key;\n\n let pk = &self.local_session_keypair.public_key;\n\n let vouch_box = box_::seal(&pk.0,\n\n &nonce,\n\n &self.remote_session_key.expect(\"Shit is on fire yo\"),\n\n our_sk);\n\n\n\n let mut vouch = Vec::with_capacity(72);\n\n vouch.extend_from_slice(&nonce.0);\n", "file_path": "src/session.rs", "rank": 21, "score": 18.79711620887446 }, { "content": "\n\nimpl Session for EstablishedSession {\n\n fn is_expired(&self) -> bool { self.expire_at < Utc::now() }\n\n fn session_state(&self) -> SessionState { SessionState::Ready }\n\n fn id(&self) -> PublicKey { self.id }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use frame::FrameKind;\n\n use session::{ClientSession, EstablishedSession, KeyPair, ServerSession, Session, SessionState};\n\n use crypto::init;\n\n\n\n /// Helper to create two established sessions.\n\n fn handshake() -> (EstablishedSession, EstablishedSession) {\n\n let client_identity_keypair = KeyPair::new();\n\n let server_identity_keypair = KeyPair::new();\n\n let mut client_session =\n\n ClientSession::new(client_identity_keypair.clone(),\n\n server_identity_keypair.public_key.clone());\n", "file_path": "src/session.rs", "rank": 22, "score": 18.61063099416871 }, { "content": "\n\n /// Method used to create new requests.\n\n pub fn make_request(&self, data: &[u8]) -> WhisperResult<Frame> {\n\n self.make_message(data, FrameKind::Request)\n\n }\n\n\n\n /// Method used to create new responses.\n\n pub fn make_response(&self, data: &[u8]) -> WhisperResult<Frame> {\n\n self.make_message(data, FrameKind::Response)\n\n }\n\n\n\n /// Method used to create new notifications.\n\n pub fn make_notification(&self, data: &[u8]) -> WhisperResult<Frame> {\n\n self.make_message(data, FrameKind::Notification)\n\n }\n\n}\n\n\n\n/// Common session functions that apply to all session types.\n", "file_path": "src/session.rs", "rank": 23, "score": 18.519452296468888 }, { "content": " id: initiate.id,\n\n nonce: nonce,\n\n kind: FrameKind::Ready,\n\n payload: payload,\n\n };\n\n Ok((session, frame))\n\n }\n\n}\n\n\n\n/// Client-side session.\n\n#[derive(Debug, Clone)]\n\npub struct ClientSession {\n\n expire_at: DateTime<Utc>,\n\n created_at: DateTime<Utc>,\n\n local_session_keypair: KeyPair,\n\n local_identity_keypair: KeyPair,\n\n remote_session_key: Option<PublicKey>,\n\n remote_identity_key: PublicKey,\n\n state: SessionState,\n\n}\n", "file_path": "src/session.rs", "rank": 24, "score": 18.37022103551213 }, { "content": "pub static HANDSHAKE_DURATION: i64 = 3;\n\n/// How much time one shared secret can last.\n\npub static SESSION_DURATION: i64 = 55;\n\n\n\n/// Enum representing session state.\n\n#[derive(Debug, Clone, PartialEq, Copy)]\n\npub enum SessionState {\n\n /// Session has been created, but handshake isn't initiated yet.\n\n Fresh,\n\n /// This state means that handshake has started.\n\n Initiated,\n\n /// This state means that session is established and messages can be sent\n\n /// both ways.\n\n Ready,\n\n /// This state means that session established, but can't be used at the\n\n /// time.\n\n Error,\n\n}\n\n\n\n/// Server-side session.\n", "file_path": "src/session.rs", "rank": 25, "score": 18.019227870587315 }, { "content": " if let Ok(msg) = box_::open_precomputed(&frame.payload, &frame.nonce, &self.session_secret) {\n\n Ok(msg.into())\n\n } else {\n\n Err(WhisperError::DecryptionFailed)\n\n }\n\n }\n\n\n\n fn make_message(&self, data: &[u8], kind: FrameKind) -> WhisperResult<Frame> {\n\n if self.is_expired() {\n\n return Err(WhisperError::ExpiredSession);\n\n }\n\n let (nonce, payload) = self.seal_msg(data);\n\n let frame = Frame {\n\n id: self.id(),\n\n nonce: nonce,\n\n kind: kind,\n\n payload: payload,\n\n };\n\n Ok(frame)\n\n }\n", "file_path": "src/session.rs", "rank": 26, "score": 17.894001625276104 }, { "content": " let mut server_session = ServerSession::new(server_identity_keypair, client_session.id().clone());\n\n let hello_frame = client_session.make_hello();\n\n let welcome_frame =\n\n server_session.make_welcome(&hello_frame)\n\n .expect(\"Failed to create welcome!\");\n\n let initiate_frame =\n\n client_session.make_initiate(&welcome_frame)\n\n .expect(\"Failed to create initiate!\");\n\n let client_identity_key =\n\n server_session.validate_initiate(&initiate_frame)\n\n .expect(\"Failed to unpack PublicKey\");\n\n let (server_established_session, ready_frame) =\n\n server_session.make_ready(&initiate_frame, &client_identity_key)\n\n .expect(\"Failed to create ready!\");\n\n let client_established_session =\n\n client_session.read_ready(&ready_frame)\n\n .expect(\"Failed to read ready frame!\");\n\n (client_established_session, server_established_session)\n\n }\n\n\n", "file_path": "src/session.rs", "rank": 27, "score": 17.67212878011582 }, { "content": " let pk = PublicKey::from_slice(&initiate_payload[0..32])\n\n .expect(\"Failed to slice pk from payload\");\n\n let v_nonce = Nonce::from_slice(&initiate_payload[32..56])\n\n .expect(\"Failed to slice nonce from payload\");\n\n let v_box = &initiate_payload[56..initiate_payload.len()];\n\n\n\n if let Ok(vouch_payload) =\n\n box_::open(v_box, &v_nonce, &pk, &self.local_session_keypair.secret_key)\n\n {\n\n let v_pk = PublicKey::from_slice(&vouch_payload).expect(\"Wrong Size Key!!!\");\n\n if vouch_payload.len() == 32 || v_pk == self.remote_session_key {\n\n return Ok(pk);\n\n }\n\n }\n\n }\n\n Err(WhisperError::InvalidInitiateFrame)\n\n }\n\n\n\n /// Helper to make a Ready frame, a reply to Initiate frame. Server\n\n /// workflow.\n", "file_path": "src/session.rs", "rank": 28, "score": 17.071446100318028 }, { "content": " /// Decryption of payload failed.\n\n DecryptionFailed {}\n\n /// Server sent invalid Welcome frame.\n\n InvalidWelcomeFrame {}\n\n /// Client sent invalid Initiate frame.\n\n InvalidInitiateFrame {}\n\n /// Not having enough bytes to decode frame.\n\n IncompleteFrame {}\n\n /// Either restarting a handshake or forgetting to do handshake at all.\n\n InvalidSessionState {}\n\n /// Enough bytes to decode, but bytes make no sense.\n\n BadFrame {}\n\n /// Trying to use expired session.\n\n ExpiredSession {}\n\n /// Initialization of libsodium failed.\n\n /// This might happen when machine just booted and doesn't have enough entropy.\n\n InitializationFailed {}\n\n }\n\n}\n\n\n\n/// Result type used by this library.\n\npub type WhisperResult<T> = Result<T, WhisperError>;\n", "file_path": "src/errors.rs", "rank": 29, "score": 16.926850181012778 }, { "content": " pub fn new(remote_session_key: PublicKey,\n\n local_session_keypair: KeyPair)\n\n -> EstablishedSession {\n\n let now = Utc::now();\n\n let our_precomputed_key = box_::precompute(&remote_session_key,\n\n &local_session_keypair.secret_key);\n\n EstablishedSession {\n\n id: local_session_keypair.public_key,\n\n expire_at: now + Duration::minutes(SESSION_DURATION),\n\n session_secret: our_precomputed_key,\n\n }\n\n }\n\n fn seal_msg(&self, data: &[u8]) -> (Nonce, Bytes) {\n\n let nonce = box_::gen_nonce();\n\n let payload = box_::seal_precomputed(data, &nonce, &self.session_secret);\n\n (nonce, payload.into())\n\n }\n\n\n\n /// Method use to open payload.\n\n pub fn read_msg(&self, frame: &Frame) -> WhisperResult<Bytes> {\n", "file_path": "src/session.rs", "rank": 30, "score": 16.016880877338075 }, { "content": "//! This module contain error type returned by this library.\n\n\n\nuse std::result::Result;\n\n\n\nquick_error! {\n\n #[derive(Debug)]\n\n /// Error kinds returns by this library.\n\n pub enum WhisperError {\n\n /// Server sent invalid payload for Ready frame.\n\n InvalidReadyFrame {\n\n description(\"Server sent invalid payload for Ready frame.\")\n\n }\n\n /// Client sent invalid payload for Hello frame.\n\n InvalidHelloFrame {\n\n description(\"Client sent invalid payload for Hello frame.\")\n\n }\n\n /// Public key failed validation.\n\n InvalidPublicKey {\n\n description(\"Public key failed validation.\")\n\n }\n", "file_path": "src/errors.rs", "rank": 31, "score": 15.261480691043893 }, { "content": "//! frames.\n\n//! This library doesn't handle anything else like request routing, RPC, etc.\n\n//! The plan is to build a framework on top of this.\n\n//!\n\n//! This library in no way production or even development ready. Meaning\n\n//! everything including wire format is subject to change.\n\n//! The goal is to have at least three languages talking to each other using\n\n//! this protocol by the end of 2017.\n\n//!\n\n//! ## Usage\n\n//! TODO: Write usage instructions here\n\n\n\nextern crate chrono;\n\nextern crate sodiumoxide;\n\nextern crate bytes;\n\n#[macro_use]\n\nextern crate quick_error;\n\n#[macro_use]\n\nextern crate nom;\n\n\n\npub mod session;\n\npub mod frame;\n\npub mod errors;\n\npub mod crypto;\n", "file_path": "src/lib.rs", "rank": 32, "score": 14.79240152898024 }, { "content": "impl ClientSession {\n\n /// Create new session. This method is private because it will create\n\n /// session with a few missing values.\n\n pub fn new(local_identity_keypair: KeyPair, remote_identity_key: PublicKey) -> ClientSession {\n\n let now = Utc::now();\n\n ClientSession {\n\n expire_at: now + Duration::minutes(HANDSHAKE_DURATION),\n\n created_at: now,\n\n local_session_keypair: KeyPair::new(),\n\n local_identity_keypair:\n\n local_identity_keypair,\n\n remote_session_key: None,\n\n remote_identity_key: remote_identity_key,\n\n state: SessionState::Fresh,\n\n }\n\n }\n\n /// Helper to make Hello frame. Client workflow.\n\n pub fn make_hello(&mut self) -> Frame {\n\n self.state = SessionState::Initiated;\n\n let nonce = box_::gen_nonce();\n", "file_path": "src/session.rs", "rank": 33, "score": 14.426009695334379 }, { "content": " } else {\n\n self.state = SessionState::Error;\n\n Err(WhisperError::DecryptionFailed)\n\n }\n\n }\n\n /// A helper to extract client's permamanet public key from initiate frame\n\n /// in order to\n\n /// authenticate client. Authentication happens in another place.\n\n pub fn validate_initiate(&self, initiate: &Frame) -> WhisperResult<PublicKey> {\n\n if let Ok(initiate_payload) =\n\n box_::open(&initiate.payload,\n\n &initiate.nonce,\n\n &self.remote_session_key,\n\n &self.local_session_keypair.secret_key)\n\n {\n\n // TODO: change to != with proper size\n\n if initiate_payload.len() < 60 {\n\n return Err(WhisperError::InvalidInitiateFrame);\n\n }\n\n // unwrapping here because they only panic when input is shorter than needed.\n", "file_path": "src/session.rs", "rank": 34, "score": 13.18911565300881 }, { "content": "\n\n let initiate_frame =\n\n client_session.make_initiate(&welcome_frame)\n\n .expect(\"Failed to create initiate!\");\n\n\n\n let client_identity_key =\n\n server_session.validate_initiate(&initiate_frame)\n\n .expect(\"Failed to unpack PublicKey\");\n\n assert_eq!(&client_identity_key, &client_identity_keypair.public_key);\n\n\n\n let (server_established_session, ready_frame) =\n\n server_session.make_ready(&initiate_frame, &client_identity_key)\n\n .expect(\"Failed to create ready!\");\n\n assert_eq!(server_established_session.session_state(),\n\n SessionState::Ready);\n\n assert_eq!(server_session.session_state(), SessionState::Ready);\n\n\n\n let client_established_session =\n\n client_session.read_ready(&ready_frame)\n\n .expect(\"Failed to read ready frame!\");\n", "file_path": "src/session.rs", "rank": 35, "score": 12.298366083739602 }, { "content": " vouch.extend(vouch_box);\n\n vouch\n\n }\n\n}\n\n\n\n/// This structure represent session that completed handshake.\n\n///\n\n/// Only way to create is to have ClientSession and ServerSession agree on\n\n/// shared secret a.k.a. session_key a.k.a. PrecomputedKey.\n\n/// ServerSession turns into EstablishedSession by verifying Initiate frame.\n\n/// ClientSession turns into EstablishedSession by verifying Ready frame.\n\npub struct EstablishedSession {\n\n id: PublicKey,\n\n expire_at: DateTime<Utc>,\n\n session_secret: PrecomputedKey,\n\n}\n\n\n\nimpl EstablishedSession {\n\n /// Create EstablishSession by precomputing shared secret. Don't use this\n\n /// directly.\n", "file_path": "src/session.rs", "rank": 36, "score": 11.807243613385502 }, { "content": "//! This module handles Session (singular) management. The session is\n\n//! responsible for Frame generation and encryption.\n\n//!\n\n//! ### Client session vs Server session\n\n//! Only different between ClientSession and ServerSession is that Client\n\n//! doesn't know about server's session key at the beginning, while Server key\n\n//! doesn't know about client's identity key.\n\n//!\n\n//! This handshake is heavily based on CurveCP and CurveZMQ.\n\n//! ### Handshake\n\n//! This is a very rough explanation. detailed one is coming later.\n\n//!\n\n//! 1. Client sends Hello frame to server\n\n//! 2. Server replies with Welcome frame\n\n//! 3. Client replies with Initiate frame\n\n//! 4. Server verifies that client is allowed to talk to this server and\n\n//! replies with Ready or Terminate frame\n\n//!\n\n//! ### Messages\n\n//! The protocol allows bi-directorial message exchange. However,\n", "file_path": "src/session.rs", "rank": 37, "score": 10.186895367047038 }, { "content": "//! This module is mostly reexports of sodiumoxide.\n\n\n\nuse errors::{WhisperResult, WhisperError};\n\nuse sodiumoxide;\n\nuse sodiumoxide::crypto::box_::gen_keypair;\n\n\n\npub use sodiumoxide::crypto::box_::{PublicKey, SecretKey};\n\n/// A keypair. This is just a helper type.\n\n#[derive(Debug, Clone)]\n\npub struct KeyPair {\n\n /// Public key.\n\n pub public_key: PublicKey,\n\n /// Secret key.\n\n pub secret_key: SecretKey,\n\n}\n\nimpl KeyPair {\n\n /// Generate new keypair using libsodium.\n\n #[inline]\n\n pub fn new() -> KeyPair {\n\n let (public_key, secret_key) = gen_keypair();\n\n KeyPair {\n\n secret_key: secret_key,\n\n public_key: public_key,\n\n }\n\n }\n\n}\n\n\n\n/// In order to make libsodium threadsafe you must call this function before using any of it's andom number generation functions.\n\n/// It's safe to call this method more than once and from more than one thread.\n", "file_path": "src/crypto.rs", "rank": 38, "score": 8.441474170814729 }, { "content": " #[test]\n\n fn test_expire_client() {\n\n let local = KeyPair::new();\n\n let remote = KeyPair::new();\n\n\n\n let client_session = ClientSession::new(local, remote.public_key.clone());\n\n assert!(!client_session.is_expired());\n\n }\n\n\n\n #[test]\n\n fn test_expire_server() {\n\n let local = KeyPair::new();\n\n let remote = KeyPair::new();\n\n\n\n let server_session = ServerSession::new(local, remote.public_key.clone());\n\n assert!(!server_session.is_expired());\n\n }\n\n\n\n #[test]\n\n fn test_successful_hashshake() {\n", "file_path": "src/session.rs", "rank": 39, "score": 5.757621357453298 }, { "content": "#[derive(Debug, Clone)]\n\npub struct ServerSession {\n\n expire_at: DateTime<Utc>,\n\n created_at: DateTime<Utc>,\n\n local_session_keypair: KeyPair,\n\n local_identity_keypair: KeyPair,\n\n remote_session_key: PublicKey,\n\n remote_identity_key: Option<PublicKey>,\n\n state: SessionState,\n\n}\n\nimpl ServerSession {\n\n /// Server side session.\n\n pub fn new(local_identity_keypair: KeyPair, remote_session_key: PublicKey) -> ServerSession {\n\n let now = Utc::now();\n\n ServerSession {\n\n expire_at: now + Duration::minutes(HANDSHAKE_DURATION),\n\n created_at: now,\n\n local_session_keypair: KeyPair::new(),\n\n local_identity_keypair:\n\n local_identity_keypair,\n", "file_path": "src/session.rs", "rank": 40, "score": 5.278627157295565 }, { "content": "# Contributing to libwhisper-rs\n\n\n\nFirst and foremost, thank you! We appreciate that you want to contribute to libwhisper-rs, your time is valuable, and your contributions mean a lot to us.\n\n\n\n## Important!\n\n\n\nBy contributing to this project, you:\n\n\n\n* Agree that you have authored 100% of the content\n\n* Agree that you have the necessary rights to the content\n\n* Agree that you have received the necessary permissions from your employer to make the contributions (if applicable)\n\n* Agree that the content you contribute may be provided under the Project license(s)\n\n\n\n## Getting started\n\n\n\n**What does \"contributing\" mean?**\n\n\n\nCreating an issue is the simplest form of contributing to a project. But there are many ways to contribute, including the following:\n\n\n\n- Updating or correcting documentation\n\n- Feature requests\n\n- Bug reports\n\n\n\nIf you'd like to learn more about contributing in general, the [Guide to Idiomatic Contributing](https://github.com/jonschlinkert/idiomatic-contributing) has a lot of useful information.\n\n\n\n**Showing support for libwhisper-rs**\n\n\n\nPlease keep in mind that open source software is built by people like you, who spend their free time creating things the rest the community can use.\n\n\n\nDon't have time to contribute? No worries, here are some other ways to show your support for libwhisper-rs:\n\n\n\n- star the [project](https://github.com/Inner-Heaven/libwhisper-rs)\n\n- tweet your support for libwhisper-rs\n\n\n\n## Issues\n\n\n", "file_path": "contributing.md", "rank": 41, "score": 4.38412649448237 }, { "content": " # Angel Whisper\n\n[![Gitter](https://badges.gitter.im/Inner-Heaven/angel-whisper.svg)](https://gitter.im/Inner-Heaven/whisper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)\n\n[![Build Status](https://travis-ci.org/Inner-Heaven/libwhisper-rs.svg?branch=master)](https://travis-ci.org/Inner-Heaven/libwhisper-rs)\n\n[![codecov](https://codecov.io/gh/Inner-Heaven/libwhisper-rs/branch/master/graph/badge.svg)](https://codecov.io/gh/Inner-Heaven/libwhisper-rs)\n\n[![Crates.io](https://img.shields.io/crates/v/libwhisper.svg)](https://crates.io/crates/libwhisper)\n\n\n\n The reference implementation of Angel Whisper Wire Protocol under development. As of today, this is the only documentation of protocol available. This is refactoring of [my first attempt](https://github.com/Inner-Heaven/angel-whisper) to write this thing. However my first attempt was too broad, so I've made a decision to separate `llsd` module into its own crate to allow the creation of implementation in other languages.\n\n\n\n Angel Whisper is my attempt to build light and fast wire protocol that is suitable in IoT world and the just regular world. However, no promises. \n\n This library meant to handle encryption as well and decoding/encoding of frames.\n\nThis library doesn't handle anything else like request routing, RPC, etc. The plan is to build a framework on top of this.\n\n\n\n This library in no way production or even development ready. Meaning everything including wire format is subject to change.\n\nThe goal is to have at least three languages talking to each other using this protocol by the end of 2017.\n\n\n\n## Installation\n\n`libwhisper` is available on crates.io and can be included in your Cargo enabled project like this:\n\n\n\n```\n\n[dependencies]\n\nlibwhisper = \"0.1.0\"\n\n```\n\n\n\nNore that library is under development and public API might change. The protocol is stable.\n\n\n\n## Usage\n\nWell... Not much you can do with it right now. Right now [unit tests](https://github.com/Inner-Heaven/libwhisper-rs/blob/master/src/session.rs#L425) is the best usage example. Sorry about that. \n\n\n", "file_path": "README.md", "rank": 42, "score": 4.079239120001279 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, gender identity and expression, level of experience,\n\nnationality, personal appearance, race, religion, or sexual identity and\n\norientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\nadvances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 43, "score": 3.2920361462742225 }, { "content": "# Changelog\n\n\n\nChangelog entries are classified using the following labels _(from [keep-a-changelog][]_):\n\n\n\n- `added`: for new features\n\n- `changed`: for changes in existing functionality\n\n- `deprecated`: for once-stable features removed in upcoming releases\n\n- `removed`: for deprecated features removed in this release\n\n- `fixed`: for any bug fixes\n\n\n\n## [0.1.1] - 2017-11-02\n\nSee [code changes](https://github.com/Inner-Heaven/libwhisper-rs/compare/0.1.0...v0.1.1).\n\n### Changed\n\n- Another readme cleanup\n\n- Session cosntructors (all 3 of them) signature. **BREAKING**\n\n### Added\n\n- crates.io badge\n\n\n\n## [0.1.0] - 2017-10-30\n\n\n\nThe initial version that was available to `crates.io`.\n\n\n\n[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog\n\n\n", "file_path": "CHANGELOG.md", "rank": 44, "score": 3.1659603292841183 }, { "content": "### Before creating an issue\n\n\n\nPlease try to determine if the issue is caused by an underlying library, and if so, create the issue there. Sometimes this is difficult to know. We only ask that you attempt to give a reasonable attempt to find out. Oftentimes the readme will have advice about where to go to create issues.\n\n\n\nTry to follow these guidelines\n\n\n\n- **Avoid creating issues for implementation help**. It's much better for discoverability, SEO, and semantics - to keep the issue tracker focused on bugs and feature requests - to ask implementation-related questions on [stackoverflow.com][so]\n\n- **Investigate the issue**:\n\n- **Check the readme** - oftentimes you will find notes about creating issues, and where to go depending on the type of issue.\n\n- Create the issue in the appropriate repository.\n\n\n\n### Creating an issue\n\n\n\nPlease be as descriptive as possible when creating an issue. Give us the information we need to successfully answer your question or address your issue by answering the following in your issue:\n\n\n\n- **version**: please note the version of libwhisper-rs are you using\n\n- **extensions, plugins, helpers, etc** (if applicable): please list any extensions you're using\n\n- **error messages**: please paste any error messages into the issue, or a [gist](https://gist.github.com/)\n\n\n\n### Closing issues\n\n\n\nThe original poster or the maintainer's of libwhisper-rs may close an issue at any time. Typically, but not exclusively, issues are closed when:\n\n\n\n- The issue is resolved\n\n- The project's maintainers have determined the issue is out of scope\n\n- An issue is clearly a duplicate of another issue, in which case the duplicate issue will be linked.\n\n- A discussion has clearly run its course\n\n\n\n\n", "file_path": "contributing.md", "rank": 45, "score": 2.7129449522973736 }, { "content": "## Next Steps\n\nI'm waiting on tokio to stabilize to start working on service layer of this protocol. Ideally, service layer will me designed in a way tokio can be swapped...good ol' threads for example. \n\n\n\n## Development\n\nRight now I'm using taskwarrior for task management, which is obviously won't scale for more than one developer. You can find me either on gitter, IRC.\n\n\n\n ## Questions I would be asking\n\n #### Do I need any help?\n\n\n\n Yes.\n\n\n\n #### Is it secure?\n\n\n\nMaybe, maybe not. I'm not a cryptographer, this is just for the lulz. Maybe can someone help with that?\n\n\n\n ##### Why does it use secp256k1 at the beginning instead of Curve25519?\n\n\n\nBecause I had a dream where I built ethereum wallet with p2p direct messaging. Therefore, I ended up using secp256k1 public keys for identity. First attempt was using Curve25519. After some discussions, I've decided to switch back to Curve25519. Honestly, I don't like the way rust binding to `libsecp256k1`work.\n\n\n\n #### What other languages it supports?\n\n\n\nRight now — only rust. In a very close feature — C via rust library. Next step is Ruby via c library. After that pure Kotlin implementation.\n\n\n\n#### Why Rust? You barely even know rust!\n\n\n\nWell... I wanted to learn rust by writing something interesting in it. Something that involves using many features of rust. \n\n\n\n #### Why not helix?\n\n\n\n Seems like a wrong tool for this job. Fight me.\n\n\n\n #### Why Kotlin and not Java?\n\n\n", "file_path": "README.md", "rank": 46, "score": 2.3143066589875914 }, { "content": "## Next steps\n\n\n\n**Tips for creating idiomatic issues**\n\n\n\nSpending just a little extra time to review best practices and brush up on your contributing skills will, at minimum, make your issue easier to read, easier to resolve, and more likely to be found by others who have the same or similar issue in the future. At best, it will open up doors and potential career opportunities by helping you be at your best.\n\n\n\nThe following resources were hand-picked to help you be the most effective contributor you can be:\n\n\n\n- The [Guide to Idiomatic Contributing](https://github.com/jonschlinkert/idiomatic-contributing) is a great place for newcomers to start, but there is also information for experienced contributors there.\n\n- Take some time to learn basic markdown. We can't stress this enough. Don't start pasting code into GitHub issues before you've taken a moment to review this [markdown cheatsheet](https://gist.github.com/jonschlinkert/5854601)\n\n- The GitHub guide to [basic markdown](https://help.github.com/articles/markdown-basics/) is another great markdown resource.\n\n- Learn about [GitHub Flavored Markdown](https://help.github.com/articles/github-flavored-markdown/). And if you want to really go above and beyond, read [mastering markdown](https://guides.github.com/features/mastering-markdown/).\n\n\n\nAt the very least, please try to:\n\n\n\n- Use backticks to wrap code. This ensures that it retains its formatting and isn't modified when it's rendered by GitHub, and makes the code more readable to others\n\n- When applicable, use syntax highlighting by adding the correct language name after the first \"code fence\"\n\n\n\n\n\n[so]: http://stackoverflow.com/questions/tagged/libwhisper-rs\n", "file_path": "contributing.md", "rank": 47, "score": 1.4869084157112498 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at [email protected]. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at [http://contributor-covenant.org/version/1/4][version]\n\n\n\n[homepage]: http://contributor-covenant.org\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 48, "score": 1.1476269404725077 } ]
Rust
src/crypto/mac.rs
chmoder/iso8583_rs
d7e8e4256e4e6923bb1c451db38449645355e772
use crate::crypto::{tdes_encrypt_cbc, des_encrypt_cbc}; pub enum MacAlgo { CbcMac, RetailMac, } pub enum PaddingType { Type1, Type2, } pub struct MacError { pub msg: String } pub fn verify_mac(algo: &MacAlgo, padding_type: &PaddingType, data: &[u8], key: &Vec<u8>, expected_mac: &Vec<u8>) -> Result<(), MacError> { let mac = generate_mac(algo, padding_type, &data.to_vec(), key)?; if mac.eq(expected_mac) { Ok(()) } else { Err(MacError { msg: format!("computed mac: {} doesn't match expected_mac: {}", hex::encode(mac), hex::encode(expected_mac)) }) } } pub fn generate_mac(algo: &MacAlgo, padding_type: &PaddingType, data: &Vec<u8>, key: &Vec<u8>) -> Result<Vec<u8>, MacError> { let new_data = apply_padding(padding_type, data); let mut iv = Vec::<u8>::new(); iv.extend_from_slice(hex::decode("0000000000000000").unwrap().as_slice()); println!("generating mac on {}", hex::encode(data)); match algo { MacAlgo::CbcMac => { let res = tdes_encrypt_cbc(&new_data, key, &iv); Ok(res[res.len() - 8..].to_vec()) } MacAlgo::RetailMac => { let k = key.as_slice()[0..8].to_vec(); if data.len() == 8 { Ok(tdes_encrypt_cbc(&data, key, &iv)) } else { let d1 = &new_data[0..new_data.len() - 8].to_vec(); let d2 = &new_data[new_data.len() - 8..].to_vec(); let res1 = des_encrypt_cbc(&d1, &k, &iv); Ok(tdes_encrypt_cbc(&d2, key, &res1[(res1.len() - 8)..].to_vec())) } } } } fn apply_padding(padding_type: &PaddingType, data: &Vec<u8>) -> Vec<u8> { let mut new_data = data.clone(); match padding_type { PaddingType::Type1 => {} PaddingType::Type2 => { new_data.push(0x80); } }; while new_data.len() < 8 { new_data.push(0x00); } while new_data.len() % 8 != 0 { new_data.push(0x00); } new_data } #[cfg(test)] mod tests { use crate::crypto::mac::{apply_padding, PaddingType, generate_mac, MacAlgo}; use hex_literal::hex; #[test] fn test_padding1_shortof8() { let data = hex::decode("0102030405").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405000000"); } #[test] fn test_padding1_exact() { let data = hex::decode("0102030405060708").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405060708"); } #[test] fn test_padding1_typical_short() { let data = hex::decode("0102030405060708090a").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405060708090a000000000000"); } #[test] fn test_padding2_shortof8() { let data = hex::decode("0102030405").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "0102030405800000"); } #[test] fn test_padding2_exact() { let data = hex::decode("0102030405060708").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "01020304050607088000000000000000"); } #[test] fn test_padding2_typical_short() { let data = hex::decode("0102030405060708090a").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "0102030405060708090a800000000000"); } #[test] fn test_gen_mac_cbc_nopads() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("7d34c3071da931b9", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_cbc_2() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("01020304050607080102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("0fe28f4b5537ee79", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_cbc_3() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("01020304050607080102030405")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("8fb12963d5661a22", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_cbc_2_paddingtype2() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type2, &Vec::from(hex!("01020304050607080102030405")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("8568cd2b7698605f", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail1_nopads() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("7d34c3071da931b9", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail2_padtype1() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708010203040506070801020304050607080000")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!(hex::encode(m), "149f99288681d292"); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail_padtype2() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type2, &Vec::from(hex!("0102030405060708010203040506070801020304050607080000")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!(hex::encode(m), "4689dd5a87015394"); } Err(e) => { assert!(false, e.msg) } } } }
use crate::crypto::{tdes_encrypt_cbc, des_encrypt_cbc}; pub enum MacAlgo { CbcMac, RetailMac, } pub enum PaddingType { Type1, Type2, } pub struct MacError { pub msg: String } pub fn verify_mac(algo: &MacAlgo, padding_type: &PaddingType, data: &[u8], key: &Vec<u8>, expected_mac: &Vec<u8>) -> Result<(), MacError> { let mac = generate_mac(algo, padding_type, &data.to_vec(), key)?; if mac.eq(expected_mac) { Ok(()) } else { Err(MacError { msg: format!("computed mac: {} doesn't match expected_mac: {}", hex::encode(mac), hex::encode(expected_mac)) }) } } pub fn generate_mac(algo: &MacAlgo, padding_type: &PaddingType, data: &Vec<u8>, key: &Vec<u8>) -> Result<Vec<u8>, MacError> { let new_data = apply_padding(padding_type, data); let mut iv = Vec::<u8>::new(); iv.extend_from_slice(hex::decode("0000000000000000").unwrap().as_slice()); println!("generating mac on {}", hex::encode(data)); match algo { MacAlgo::CbcMac => { let res = tdes_encrypt_cbc(&new_data, key, &iv); Ok(res[res.len() - 8..].to_vec()) } MacAlgo::RetailMac => { let k = key.as_slice()[0..8].to_vec(); if data.len() == 8 { Ok(tdes_encrypt_cbc(&data, key, &iv)) } else { let d1 = &new_data[0..new_data.len() - 8].to_vec(); let d2 = &new_data[new_data.len() - 8..].to_vec(); let res1 = des_encrypt_cbc(&d1, &k, &iv); Ok(tdes_encrypt_cbc(&d2, key, &res1[(res1.len() - 8)..].to_vec())) } } } } fn apply_padding(padding_type: &PaddingType, data: &Vec<u8>) -> Vec<u8> { let mut new_data = data.clone(); match padding_type { PaddingType::Type1 => {} PaddingType::Type2 => { new_data.push(0x80); } }; while new_data.len() < 8 { new_data.push(0x00); } while new_data.len() % 8 != 0 { new_data.push(0x00); } new_data } #[cfg(test)] mod tests { use crate::crypto::mac::{apply_padding, PaddingType, generate_mac, MacAlgo}; use hex_literal::hex; #[test] fn test_padding1_shortof8() { let data = hex::decode("0102030405").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405000000"); } #[test] fn test_padding1_exact() { let data = hex::decode("0102030405060708").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405060708"); } #[test] fn test_padding1_typical_short() { let data = hex::decode("0102030405060708090a").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type1, &data)), "0102030405060708090a000000000000"); } #[test] fn test_padding2_shortof8() { let data = hex::decode("0102030405").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "0102030405800000"); } #[test] fn test_padding2_exact() { let data = hex::decode("0102030405060708").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "01020304050607088000000000000000"); } #[test] fn test_padding2_typical_short() { let data = hex::decode("0102030405060708090a").unwrap(); assert_eq!(hex::encode(apply_padding(&PaddingType::Type2, &data)), "0102030405060708090a800000000000"); } #[test] fn test_gen_mac_cbc_nopads() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("7d34c3071da931b9", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_cbc_2() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("01020304050607080102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("0fe28f4b5537ee79", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_cbc_3() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type1, &Vec::from(hex!("01020304050607080102030405")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d")));
} #[test] fn test_gen_mac_cbc_2_paddingtype2() { let res = generate_mac(&MacAlgo::CbcMac, &PaddingType::Type2, &Vec::from(hex!("01020304050607080102030405")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("8568cd2b7698605f", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail1_nopads() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("7d34c3071da931b9", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail2_padtype1() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type1, &Vec::from(hex!("0102030405060708010203040506070801020304050607080000")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!(hex::encode(m), "149f99288681d292"); } Err(e) => { assert!(false, e.msg) } } } #[test] fn test_gen_mac_retail_padtype2() { let res = generate_mac(&MacAlgo::RetailMac, &PaddingType::Type2, &Vec::from(hex!("0102030405060708010203040506070801020304050607080000")), &Vec::from(hex!("e0f4543f3e2a2c5ffc7e5e5a222e3e4d"))); match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!(hex::encode(m), "4689dd5a87015394"); } Err(e) => { assert!(false, e.msg) } } } }
match res { Ok(m) => { println!("mac = {}", hex::encode(m.as_slice())); assert_eq!("8fb12963d5661a22", hex::encode(m)); } Err(e) => { assert!(false, e.msg) } }
if_condition
[ { "content": "/// Pad a random hex string to'data' to make it 8 bytes\n\nfn pad_8(data: &mut String) {\n\n let padding: [u8; 8] = rand::thread_rng().gen();\n\n data.push_str(hex::encode(padding).as_str());\n\n data.truncate(16);\n\n}\n\n\n", "file_path": "src/crypto/pin.rs", "rank": 2, "score": 161636.28270005088 }, { "content": "/// Pad a random hex string (only from A to F) to 'data' to make it 8 bytes\n\nfn pad_8_a2f(data: &mut String) {\n\n let mut padding: [u8; 8] = rand::thread_rng().gen();\n\n padding.iter_mut().for_each(|f: &mut u8| {\n\n //just ensure a min of A for each :-)\n\n *f = *f | (0xAA as u8);\n\n });\n\n data.push_str(hex::encode(padding).as_str());\n\n data.truncate(16);\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::crypto::pin::{generate_pin_block, verify_pin};\n\n use crate::crypto::pin::PinFormat::{ISO0, ISO1, ISO2, ISO3};\n\n\n\n #[test]\n\n fn test_iso0() {\n\n match generate_pin_block(&ISO0, \"1234\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n", "file_path": "src/crypto/pin.rs", "rank": 3, "score": 157901.38305624068 }, { "content": "/// Converts ASCII bytes to EBCDIC bytes\n\nfn ascii_to_ebcdic(data: &mut Vec<u8>) -> Vec<u8> {\n\n for i in 0..data.len() {\n\n encoding8::ascii::make_ebcdic(data.get_mut(i).unwrap())\n\n }\n\n data.to_vec()\n\n}\n\n\n\n\n\npub(in crate::iso8583) fn string_to_vec(encoding: &Encoding, data: &str) -> Vec<u8> {\n\n match encoding {\n\n ASCII => {\n\n data.to_string().into_bytes()\n\n }\n\n EBCDIC => {\n\n let mut ebcdic = vec![];\n\n (&mut data.to_string()).as_bytes().iter().for_each(|b| ebcdic.push(encoding8::ascii::to_ebcdic(b.clone())));\n\n ebcdic\n\n }\n\n BINARY => {\n\n hex::decode(data).unwrap()\n", "file_path": "src/iso8583/field.rs", "rank": 4, "score": 151480.5426083854 }, { "content": "/// Converts EBCDIC bytes into a ASCII string\n\nfn ebcdic_to_ascii(data: &Vec<u8>) -> String {\n\n let mut ascii_str = String::new();\n\n data.iter().for_each(|f| ascii_str.push(char::from(encoding8::ebcdic::to_ascii(f.clone()))));\n\n ascii_str\n\n}\n\n\n", "file_path": "src/iso8583/field.rs", "rank": 5, "score": 150004.1657043483 }, { "content": "// Handle the incoming 1100 message based on amount\n\n// if amount (F4) <100 then\n\n// F38 = APPR01;\n\n// F39 = 000;\n\n// else\n\n// F39 = 100;\n\n//\n\n//\n\nfn handle_1100(iso_msg: &IsoMsg, raw_msg: &Vec<u8>, iso_resp_msg: &mut IsoMsg) -> Result<(), IsoError> {\n\n iso_resp_msg.set(\"message_type\", \"1110\").unwrap_or_default();\n\n //validate the mac\n\n if iso_msg.bmp.is_on(64) || iso_msg.bmp.is_on(128) {\n\n\n\n let key=hex!(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\").to_vec();\n\n let expected_mac = match iso_msg.bmp.is_on(64) {\n\n true => {\n\n iso_msg.bmp_child_value(64)\n\n }\n\n false => {\n\n iso_msg.bmp_child_value(128)\n\n }\n\n };\n\n let mac_data=&raw_msg.as_slice()[0..raw_msg.len() - 8];\n\n match verify_mac(&RetailMac, &Type1, mac_data, &key, &hex::decode(expected_mac.unwrap()).unwrap()) {\n\n Ok(_) => {\n\n debug!(\"mac verified OK!\");\n\n }\n\n Err(e) => {\n", "file_path": "src/main.rs", "rank": 6, "score": 146364.15966652604 }, { "content": "pub fn generate_pin_block(fmt: &PinFormat, c_pin: &str, pan: &str, key: &str) -> Result<Vec<u8>, PinError> {\n\n match fmt {\n\n PinFormat::ISO0 => {\n\n let mut b1 = format!(\"0{:X}{}\", c_pin.len(), c_pin);\n\n pad_8(&mut b1);\n\n println!(\"= {}\", b1);\n\n\n\n //rightmost 12 not including check digit\n\n let mut b2 = String::from(\"0000\");\n\n b2.push_str(&pan[pan.len() - 13..pan.len() - 1]);\n\n\n\n let res = xor_hexstr(b1.as_str(), b2.as_str());\n\n let res = tdes_ede2_encrypt(&res, &hex::decode(key).unwrap().to_vec());\n\n\n\n Ok(res.to_vec())\n\n }\n\n PinFormat::ISO1 => {\n\n let mut b1 = format!(\"1{:X}{}\", c_pin.len(), c_pin);\n\n pad_8(&mut b1);\n\n match hex::decode(b1) {\n", "file_path": "src/crypto/pin.rs", "rank": 7, "score": 130613.5818501488 }, { "content": "/// Verifies the pin in the 'pin_block' against expected_pin and returns a boolean to indicate if there was\n\n/// was a successful match\n\npub fn verify_pin(fmt: &PinFormat, expected_pin: &str, pin_block: &Vec<u8>, pan: &str, key: &str) -> Result<bool, PinError> {\n\n debug!(\"verifying pin - expected_pin: {}, block: {}, pan:{}, key:{}\", expected_pin, hex::encode(pin_block), pan, key);\n\n match fmt {\n\n PinFormat::ISO0 => {\n\n let mut b2 = String::from(\"0000\");\n\n b2.push_str(&pan[pan.len() - 13..pan.len() - 1]);\n\n\n\n let res = tdes_ede2_decrypt(&pin_block, &hex::decode(key).unwrap().to_vec());\n\n let res = xor_hexstr(hex::encode(res.as_slice()).as_str(), b2.as_str());\n\n let pin_len = res.get(0).unwrap();\n\n let b1 = hex::encode(&res);\n\n let actual_pin = b1.get(2 as usize..(2 + pin_len) as usize).unwrap().clone();\n\n if expected_pin == actual_pin {\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n }\n\n\n\n PinFormat::ISO1 => {\n", "file_path": "src/crypto/pin.rs", "rank": 9, "score": 121725.07118378577 }, { "content": "fn collect_children(f: &dyn Field, ordered_fields: &mut Vec<String>) {\n\n ordered_fields.push(f.name().clone());\n\n f.children().iter().for_each(|f| collect_children(*f, ordered_fields));\n\n}\n\n\n\nimpl Display for IsoMsg {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n let mut res = \"\".to_string();\n\n let mut ordered_fields = vec![];\n\n self.msg.fields.iter().for_each(|f| collect_children(f.as_ref(), &mut ordered_fields));\n\n\n\n res = res + format!(\"\\n{:20.40} : {:5} : {} \", \"-Field-\", \"-Position-\", \"-Field Value-\").as_str();\n\n for f in ordered_fields {\n\n if self.fd_map.contains_key(f.as_str()) {\n\n let field = self.msg.field_by_name(&f).unwrap();\n\n let field_value = &self.fd_map.get(f.as_str()).unwrap();\n\n let mut pos_str: String = String::new();\n\n if field.position() > 0 {\n\n pos_str = format!(\"{:03}\", field.position());\n\n }\n\n\n\n //debug!(\"** formatting {}\",field.name());\n\n res = res + format!(\"\\n{:20.40} : {:^10} : {} \", f, pos_str.as_str(), field.to_string(field_value)).as_str();\n\n }\n\n }\n\n f.write_str(&res).unwrap();\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 10, "score": 98146.82702633212 }, { "content": "pub fn read_spec(spec_file: &str) -> Result<Spec, IsoError> {\n\n match std::fs::File::open(spec_file) {\n\n Ok(f) => {\n\n let mut yaml_str = String::new();\n\n let _ = (&f).read_to_string(&mut yaml_str);\n\n\n\n match serde_yaml::from_str::<YSpec>(&yaml_str) {\n\n Ok(y_spec) => {\n\n Ok(y_spec.into())\n\n }\n\n Err(e) => Err(IsoError { msg: e.to_string() })\n\n }\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.to_string() })\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/iso8583/yaml_de.rs", "rank": 11, "score": 95904.75703642951 }, { "content": "/// Returns a empty IsoMsg that can be used to create a message\n\npub fn new_msg(spec: &'static Spec, seg: &'static MessageSegment) -> IsoMsg {\n\n IsoMsg {\n\n spec,\n\n msg: seg,\n\n fd_map: HashMap::new(),\n\n bmp: Bitmap::new(0, 0, 0),\n\n }\n\n}\n\n\n\nimpl Spec {\n\n /// Returns a IsoMsg after parsing data or an ParseError on failure\n\n pub fn parse(&'static self, data: &mut Vec<u8>) -> Result<IsoMsg, ParseError> {\n\n let msg = self.get_msg_segment(data);\n\n if msg.is_err() {\n\n return Err(ParseError { msg: msg.err().unwrap().msg });\n\n }\n\n\n\n let mut iso_msg = IsoMsg {\n\n spec: &self,\n\n msg: &msg.unwrap(),\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 12, "score": 84870.42023256913 }, { "content": "/// Returns a spec given its name\n\npub fn spec(_name: &str) -> &'static Spec {\n\n //TODO:: handle case of multiple specs, for now just return the first\n\n ALL_SPECS.iter().find_map(|(_k, v)| Some(v)).unwrap()\n\n}\n\n\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 13, "score": 68292.55256691121 }, { "content": "/// XOR the contents of 2 hex string (of equal length) and return the result\n\n/// as a Vec<u8>\n\nfn xor_hexstr(b1: &str, b2: &str) -> Vec<u8> {\n\n assert_eq!(b1.len(), b2.len());\n\n hex::decode(b1).unwrap().iter().\n\n zip(hex::decode(b2).\n\n unwrap().iter()).\n\n map(|f| f.0 ^ f.1).collect::<Vec<u8>>()\n\n}\n\n\n\n\n", "file_path": "src/crypto/pin.rs", "rank": 14, "score": 66065.69780846193 }, { "content": "/// This trait whose implementation is used by the IsoServer to handle incoming requests\n\npub trait MsgProcessor: Send + Sync {\n\n fn process(&self, iso_server: &ISOServer, msg: &mut Vec<u8>) -> Result<(Vec<u8>, IsoMsg), IsoError>;\n\n}\n\n\n\nimpl ISOServer {\n\n /// Returns a new ISO server on success or a IsoServer if the provided addr is incorrect\n\n pub fn new<'a>(host_port: String, spec: &'static Spec, mli_type: MLIType, msg_processor: Box<dyn MsgProcessor>) -> Result<ISOServer, IsoServerError> {\n\n let mli: Arc<Box<dyn MLI>>;\n\n\n\n match mli_type {\n\n MLIType::MLI2E => {\n\n mli = Arc::new(Box::new(MLI2E {}));\n\n }\n\n MLIType::MLI2I => {\n\n mli = Arc::new(Box::new(MLI2I {}));\n\n }\n\n MLIType::MLI4E => {\n\n mli = Arc::new(Box::new(MLI4E {}));\n\n }\n\n MLIType::MLI4I => {\n", "file_path": "src/iso8583/server.rs", "rank": 15, "score": 63608.7876477188 }, { "content": "/// convert a std::io::Error into an IsoError\n\nfn convert_err(e: &Error) -> IsoError {\n\n match e.kind() {\n\n ErrorKind::ConnectionReset | ErrorKind::UnexpectedEof => {\n\n IsoError { msg: format!(\"connection closed. cause: {:?}\", e.kind()) }\n\n }\n\n _ => {\n\n IsoError { msg: format!(\"{:?}: {}\", e.kind(), e.to_string()) }\n\n }\n\n }\n\n}\n\n\n\nimpl MLI for MLI2E {\n\n fn parse(&self, in_buf: &mut dyn Read) -> Result<u32, IsoError> {\n\n match in_buf.read_u16::<byteorder::BigEndian>() {\n\n Ok(n) => {\n\n Ok(n as u32)\n\n }\n\n Err(e) => {\n\n Err(convert_err(&e))\n\n }\n", "file_path": "src/iso8583/mli.rs", "rank": 16, "score": 59093.753150161065 }, { "content": "fn main() {\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n let spec_file = path.to_str().unwrap();\n\n std::env::set_var(\"SPEC_FILE\", spec_file);\n\n\n\n let _ = simplelog::SimpleLogger::init(simplelog::LevelFilter::Debug, simplelog::Config::default());\n\n\n\n let iso_spec = iso8583_rs::iso8583::iso_spec::spec(\"\");\n\n\n\n info!(\"starting iso server for spec {} at port {}\", iso_spec.name(), 6666);\n\n let server = match ISOServer::new(\"127.0.0.1:6666\".to_string(),\n\n iso_spec,\n\n MLI2E,\n\n Box::new(SampleMsgProcessor {})) {\n\n Ok(server) => {\n\n server\n\n }\n\n Err(e) => {\n\n error!(\"failed to start ISO server - {}\", e.msg);\n\n panic!(e)\n\n }\n\n };\n\n server.start().join().unwrap()\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 17, "score": 42311.648973469855 }, { "content": "/// This trait represents a ISO field (specific implementations are FixedField, VarField and BmpField)\n\npub trait Field: Sync {\n\n /// Returns the name of the field\n\n fn name(&self) -> &String;\n\n\n\n /// Parses the field by reading from in_buf and stores the result into f2d_map\n\n /// Returns a ParseError on failure\n\n fn parse(&self, in_buf: &mut dyn BufRead, f2d_map: &mut HashMap<String, Vec<u8>>) -> Result<(), ParseError>;\n\n\n\n /// Assembles the field i.e. appends it data into out_buf\n\n /// Returns the number of bytes written on success or a ParseError on failure\n\n fn assemble(&self, out_buf: &mut Vec<u8>, iso_msg: &IsoMsg) -> Result<u32, ParseError>;\n\n\n\n /// Returns the position of the field in the parent field (mostly applicable for chlidren of BmpField)\n\n fn position(&self) -> u32;\n\n\n\n /// Returns children as Vec\n\n fn children(&self) -> Vec<&dyn Field>;\n\n\n\n /// Returns the child field by position\n\n fn child_by_pos(&self, pos: u32) -> &dyn Field;\n", "file_path": "src/iso8583/field.rs", "rank": 18, "score": 42153.78766224771 }, { "content": "pub trait MLI: Sync + Send {\n\n /// Extracts MLI from in_buf\n\n fn parse(&self, in_buf: &mut dyn Read) -> Result<u32, IsoError>;\n\n /// Creates a Vec<u8> that represents the MLI containing n bytes\n\n fn create(&self, n: &usize) -> Result<Vec<u8>, IsoError>;\n\n}\n\n\n\n/// This struct represents an MLI of 2E (i.e 2 bytes of length indicator exclusive of its own length)\n\npub struct MLI2E {}\n\n\n\n/// This struct represents an MLI of 2I (i.e 2 bytes of length indicator inclusive of its own length)\n\npub struct MLI2I {}\n\n\n\n/// This struct represents an MLI of 4E (i.e 4 bytes of length indicator exclusive of its own length)\n\npub struct MLI4E {}\n\n\n\n/// This struct represents an MLI of 4I (i.e 4 bytes of length indicator inclusive of its own length)\n\npub struct MLI4I {}\n\n\n", "file_path": "src/iso8583/mli.rs", "rank": 19, "score": 40289.86195043273 }, { "content": "/// Runs a new thread to handle a new incoming connection\n\nfn new_client(iso_server: &ISOServer, stream_: TcpStream) {\n\n let server = ISOServer {\n\n sock_addr: iso_server.sock_addr.clone(),\n\n spec: iso_server.spec,\n\n mli: iso_server.mli.clone(),\n\n msg_processor: iso_server.msg_processor.clone(),\n\n };\n\n\n\n std::thread::spawn(move || {\n\n let stream = stream_;\n\n let mut reading_mli = true;\n\n let mut mli: u32 = 0;\n\n\n\n let mut reader = BufReader::new(&stream);\n\n let mut writer: Box<dyn Write> = Box::new(&stream);\n\n\n\n 'done:\n\n loop {\n\n if reading_mli {\n\n match server.mli.parse(&mut reader) {\n", "file_path": "src/iso8583/server.rs", "rank": 20, "score": 31136.914198767536 }, { "content": " pub(crate) msg: String\n\n}\n\n\n\npub(crate) fn tdes_ede2_encrypt(data: &Vec<u8>, key: &Vec<u8>) -> Vec<u8> {\n\n let block_cipher = des::TdesEde2::new(GenericArray::from_slice(key.as_slice()));\n\n\n\n let mut cp_data = data.clone();\n\n block_cipher.encrypt_block(GenericArray::from_mut_slice(&mut cp_data));\n\n cp_data\n\n}\n\n\n\npub(crate) fn tdes_ede2_decrypt(data: &Vec<u8>, key: &Vec<u8>) -> Vec<u8> {\n\n let block_cipher = des::TdesEde2::new(GenericArray::from_slice(key.as_slice()));\n\n\n\n let mut cp_data = data.clone();\n\n block_cipher.decrypt_block(GenericArray::from_mut_slice(&mut cp_data));\n\n cp_data\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 21, "score": 29755.126399070978 }, { "content": "//! This module contains functions related to ISO8583 specifications, message, parsers etc\n\npub mod client;\n\npub mod bitmap;\n\npub mod field;\n\npub mod iso_spec;\n\npub mod server;\n\nmod test;\n\nmod yaml_de;\n\npub mod mli;\n\npub mod config;\n\n\n\n/// IsoError represents a generic error throughout this and dependent sub-modules\n\n#[derive(Debug)]\n\npub struct IsoError {\n\n pub msg: String,\n\n}\n", "file_path": "src/iso8583/mod.rs", "rank": 22, "score": 29749.753996274638 }, { "content": "pub mod mac;\n\npub mod pin;\n\n\n\nextern crate rand;\n\nextern crate des;\n\nextern crate block_modes;\n\nextern crate hex_literal;\n\n\n\n\n\nuse generic_array::{GenericArray};\n\nuse des::block_cipher::NewBlockCipher;\n\nuse des::block_cipher::BlockCipher;\n\n\n\n\n\nuse self::block_modes::{BlockMode};\n\n\n\n\n\n/// CryptoError is a generic error in processing within this crate\n\n#[allow(unused)]\n\npub(crate) struct CryptoError {\n", "file_path": "src/crypto/mod.rs", "rank": 23, "score": 29747.171919596163 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use crate::iso8583::client::ISOTcpClient;\n\n use crate::iso8583::{iso_spec, IsoError};\n\n use crate::iso8583::mli::MLIType::MLI2E;\n\n use crate::crypto::pin::PinFormat::ISO0;\n\n use crate::iso8583::config::Config;\n\n use crate::crypto::mac::MacAlgo::RetailMac;\n\n use crate::crypto::mac::PaddingType::Type1;\n\n\n\n use log;\n\n use simplelog;\n\n use std::env::join_paths;\n\n use std::path::Path;\n\n\n\n #[test]\n\n #[ignore]\n\n fn test_send_recv_iso_1100() -> Result<(), IsoError> {\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n std::env::set_var(\"SPEC_FILE\", path.to_str().unwrap());\n", "file_path": "src/iso8583/test.rs", "rank": 24, "score": 29712.19597326039 }, { "content": " // This should be the last thing you should do with the msg\n\n // as any further modifications will not recompute the MAC\n\n iso_msg.set_mac(&cfg);\n\n //end --------- set mac\n\n\n\n\n\n let mut client = ISOTcpClient::new(\"localhost:6666\", &spec, MLI2E);\n\n\n\n match client.send(&iso_msg) {\n\n Ok(resp_iso_msg) => {\n\n println!(\"Received {} \\n {}\", resp_iso_msg.msg.name(), resp_iso_msg);\n\n }\n\n Err(e) => {\n\n eprintln!(\"{:?}\", e)\n\n }\n\n }\n\n client.close();\n\n Ok(())\n\n }\n\n\n", "file_path": "src/iso8583/test.rs", "rank": 25, "score": 29711.34766020482 }, { "content": "\n\n let spec = crate::iso8583::iso_spec::spec(\"\");\n\n let msg_seg = spec.get_message_from_header(\"1100\").unwrap();\n\n\n\n\n\n let mut iso_msg = iso_spec::new_msg(spec, msg_seg);\n\n\n\n iso_msg.set(\"message_type\", \"1100\").unwrap();\n\n iso_msg.set_on(2, \"4567909845671235\").unwrap();\n\n iso_msg.set_on(3, \"004000\").unwrap();\n\n iso_msg.set_on(4, \"000000000029\").unwrap();\n\n iso_msg.set_on(11, \"779581\").unwrap();\n\n iso_msg.set_on(14, \"2204\").unwrap();\n\n iso_msg.set_on(19, \"840\").unwrap();\n\n\n\n\n\n let mut cfg = Config::new();\n\n cfg.with_pin(ISO0, String::from(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\"))\n\n .with_mac(RetailMac, Type1, String::from(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\"));\n\n\n", "file_path": "src/iso8583/test.rs", "rank": 26, "score": 29708.719221011568 }, { "content": " iso_msg.set_on(4, \"000000000199\").unwrap();\n\n iso_msg.set_on(11, \"779581\").unwrap();\n\n iso_msg.set_on(14, \"2204\").unwrap();\n\n iso_msg.set_on(19, \"840\").unwrap();\n\n iso_msg.set_on(96, \"1234\").unwrap();\n\n iso_msg.set_on(160, \"5678\").unwrap();\n\n\n\n\n\n match client.send(&iso_msg) {\n\n Ok(resp_iso_msg) => {\n\n println!(\"Received {} \\n {}\", resp_iso_msg.msg.name(), resp_iso_msg);\n\n }\n\n Err(e) => {\n\n eprintln!(\"{:?}\", e)\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/iso8583/test.rs", "rank": 27, "score": 29706.200272526832 }, { "content": "\n\n #[test]\n\n #[ignore]\n\n fn test_send_recv_iso_1420() -> Result<(), IsoError> {\n\n\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n std::env::set_var(\"SPEC_FILE\", path.to_str().unwrap());\n\n\n\n let spec = crate::iso8583::iso_spec::spec(\"\");\n\n let msg_seg = spec.get_message_from_header(\"1420\").unwrap();\n\n\n\n let mut client = ISOTcpClient::new(\"localhost:6666\", &spec, MLI2E);\n\n\n\n //send 10 messages\n\n for _ in 1..10 {\n\n let mut iso_msg = iso_spec::new_msg(spec, msg_seg);\n\n\n\n iso_msg.set(\"message_type\", \"1420\").unwrap();\n\n iso_msg.set_on(2, \"123456789101\").unwrap();\n\n iso_msg.set_on(3, \"004000\").unwrap();\n", "file_path": "src/iso8583/test.rs", "rank": 28, "score": 29705.674661786335 }, { "content": "\n\n //start --------- set pin - F52\n\n\n\n //this will compute a pin based on cfg and the supplied pan and set bit position 52\n\n iso_msg.set_pin(\"1234\", iso_msg.bmp_child_value(2).unwrap().as_str(), &cfg).unwrap();\n\n\n\n // You can also directly set this if there are other means of computing the pin block\n\n // iso_msg.set_on(52, \"0102030405060708\").unwrap(); //binary field are represented in their hex encoded format\n\n\n\n //end --------- set pin - F52\n\n\n\n\n\n // some other fields\n\n iso_msg.set_on(61, \"reserved_1\").unwrap();\n\n iso_msg.set_on(62, \"reserved-2\").unwrap();\n\n iso_msg.set_on(63, \"87877622525\").unwrap();\n\n iso_msg.set_on(96, \"1234\").unwrap();\n\n\n\n //start --------- set mac - either F64 or F128\n\n //\n", "file_path": "src/iso8583/test.rs", "rank": 29, "score": 29700.799980895095 }, { "content": "type DesCbc = block_modes::Cbc::<des::Des, block_modes::block_padding::NoPadding>;\n\n\n\n#[allow(unused)]\n\npub(crate) fn des_decrypt_cbc(data: &Vec<u8>, key: &Vec<u8>, iv: &Vec<u8>) -> Result<Vec<u8>, CryptoError> {\n\n let block_cipher = DesCbc::new_var(key.as_slice(), iv.as_slice()).unwrap();\n\n\n\n match block_cipher.decrypt_vec(data) {\n\n Ok(d) => {\n\n Ok(d)\n\n }\n\n Err(e) => {\n\n Err(CryptoError { msg: e.to_string() })\n\n }\n\n }\n\n}", "file_path": "src/crypto/mod.rs", "rank": 40, "score": 19170.512712959022 }, { "content": "type TripleDESCBC = block_modes::Cbc::<des::TdesEde2, block_modes::block_padding::NoPadding>;\n\n\n\npub(crate) fn tdes_encrypt_cbc(data: &Vec<u8>, key: &Vec<u8>, iv: &Vec<u8>) -> Vec<u8> {\n\n let block_cipher = TripleDESCBC::new_var(key.as_slice(), &iv.as_slice()).unwrap();\n\n\n\n let encrypted_data = block_cipher.encrypt_vec(data.as_slice());\n\n encrypted_data\n\n}\n\n\n\n\n\npub(crate) fn des_encrypt_cbc(data: &Vec<u8>, key: &Vec<u8>, iv: &Vec<u8>) -> Vec<u8> {\n\n let block_cipher = block_modes::Cbc::<des::Des, block_modes::block_padding::NoPadding>::new_var(key.as_slice(), iv.as_slice()).unwrap();\n\n block_cipher.encrypt_vec(data)\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 41, "score": 18619.22709690918 }, { "content": " pub encoding: Encoding,\n\n // Position of the field within the parent\n\n pub position: u32,\n\n}\n\n\n\nimpl Field for FixedField {\n\n fn name(&self) -> &String {\n\n &self.name\n\n }\n\n\n\n fn parse(self: &Self, in_buf: &mut dyn BufRead, f2d_map: &mut HashMap<String, Vec<u8>>) -> Result<(), ParseError> {\n\n let mut f_data = vec![0; self.len as usize];\n\n match in_buf.read_exact(&mut f_data[..]) {\n\n Ok(_) => {\n\n f2d_map.insert(self.name.clone(), f_data);\n\n Ok(())\n\n }\n\n Err(_) => {\n\n Err(ParseError { msg: format!(\"not enough data to parse - {}\", self.name) })\n\n }\n", "file_path": "src/iso8583/field.rs", "rank": 42, "score": 23.607669574259564 }, { "content": " Ok(())\n\n }\n\n Err(e) => Err(e)\n\n }\n\n }\n\n\n\n /// Assembles the messages into a Vec<u8> or a IsoError on failure\n\n pub fn assemble(&self) -> Result<Vec<u8>, IsoError> {\n\n let mut out_buf: Vec<u8> = Vec::new();\n\n for f in &self.msg.fields {\n\n match f.assemble(&mut out_buf, &self) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return Err(IsoError { msg: e.msg });\n\n }\n\n }\n\n }\n\n Ok(out_buf)\n\n }\n\n\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 43, "score": 23.051201680036602 }, { "content": " /// or a IsoError on failure\n\n pub fn send(&mut self, iso_msg: &IsoMsg) -> Result<IsoMsg, IsoError> {\n\n match iso_msg.assemble() {\n\n Ok(data) => {\n\n let mut buf = self.mli.create(&data.len()).unwrap();\n\n buf.extend(data);\n\n self.send_recv(&buf)\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: format!(\"Failed to assemble request message: {}\", e.msg) })\n\n }\n\n }\n\n }\n\n\n\n fn send_recv(&mut self, raw_msg: &Vec<u8>) -> Result<IsoMsg, IsoError> {\n\n println!(\"raw iso msg = {}\", hex::encode(raw_msg.as_slice()));\n\n\n\n if self._tcp_stream.is_none() {\n\n self._tcp_stream = match TcpStream::connect(&self.server_addr) {\n\n Err(e) => return Err(IsoError { msg: e.to_string() }),\n", "file_path": "src/iso8583/client.rs", "rank": 44, "score": 22.926714532980643 }, { "content": " }\n\n\n\n /// Returns the MAC'ing algorithm associated with this config\n\n pub fn get_mac_algo(&self) -> &Option<MacAlgo> {\n\n &self.mac_algo\n\n }\n\n\n\n /// Returns the MAC padding scheme associated with this config\n\n pub fn get_mac_padding(&self) -> &Option<PaddingType> {\n\n &self.mac_padding\n\n }\n\n\n\n\n\n /// Use the Config with a builder pattern\n\n pub fn with_pin(&mut self, fmt: PinFormat, key: String) -> &mut Config {\n\n self.pin_format = Some(fmt);\n\n self.pin_key = Some(key);\n\n self\n\n }\n\n\n\n /// Use the Config with a builder pattern\n\n pub fn with_mac(&mut self, algo: MacAlgo, mac_padding: PaddingType, key: String) -> &mut Config {\n\n self.mac_algo = Some(algo);\n\n self.mac_key = Some(key);\n\n self.mac_padding = Some(mac_padding);\n\n self\n\n }\n\n}", "file_path": "src/iso8583/config.rs", "rank": 45, "score": 22.85476538824236 }, { "content": " /// Sets F52 based on provided clear pin, and format, key provided via cfg\n\n pub fn set_pin(&mut self, pin: &str, pan: &str, cfg: &Config) -> Result<(), IsoError> {\n\n if cfg.get_pin_fmt().is_none() || cfg.get_pin_key().is_none() {\n\n return Err(IsoError { msg: format!(\"missing pin_format or key in call to set_pin\") });\n\n }\n\n\n\n match generate_pin_block(&cfg.get_pin_fmt().as_ref().unwrap(), pin, pan, cfg.get_pin_key().as_ref().unwrap().as_str()) {\n\n Ok(v) => {\n\n self.set_on(52, hex::encode(v).as_str())\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.msg })\n\n }\n\n }\n\n }\n\n\n\n /// Sets F64 or F128 based on algo, padding and key provided via cfg\n\n pub fn set_mac(&mut self, cfg: &Config) -> Result<(), IsoError> {\n\n if cfg.get_mac_algo().is_none() || cfg.get_mac_padding().is_none() || cfg.get_mac_key().is_none() {\n\n return Err(IsoError { msg: format!(\"missing mac_algo or padding or key in call to set_mac\") });\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 46, "score": 22.732028504924624 }, { "content": "## Example Server Application: \n\n(from main.rs)\n\n\n\n``` rust\n\nextern crate byteorder;\n\nextern crate hex;\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate simplelog;\n\n#[macro_use]\n\nextern crate hex_literal;\n\n\n\nuse iso8583_rs::iso8583::iso_spec::{IsoMsg, new_msg};\n\nuse iso8583_rs::iso8583::IsoError;\n\nuse iso8583_rs::iso8583::mli::MLIType::MLI2E;\n\nuse iso8583_rs::iso8583::server::ISOServer;\n\nuse iso8583_rs::iso8583::server::MsgProcessor;\n\nuse iso8583_rs::crypto::pin::verify_pin;\n\nuse iso8583_rs::crypto::pin::PinFormat::ISO0;\n\nuse std::path::Path;\n\nuse iso8583_rs::crypto::mac::MacAlgo::RetailMac;\n\nuse iso8583_rs::crypto::mac::PaddingType::Type1;\n\nuse iso8583_rs::crypto::mac::verify_mac;\n\n\n\n\n\n// Below is an example implementation of a MsgProcessor i.e the entity responsible for handling incoming messages\n\n// at the server\n\n#[derive(Copy, Clone)]\n\npub struct SampleMsgProcessor {}\n\n\n\n\n\nimpl MsgProcessor for SampleMsgProcessor {\n\n fn process(&self, iso_server: &ISOServer, msg: &mut Vec<u8>) -> Result<(Vec<u8>, IsoMsg), IsoError> {\n\n match iso_server.spec.parse(msg) {\n\n Ok(iso_msg) => {\n\n debug!(\"parsed incoming request - message = \\\"{}\\\" successfully. \\n : parsed message: \\n --- \\n {} \\n ----\\n\",\n\n iso_msg.msg.name(), iso_msg);\n\n\n\n let req_msg_type = iso_msg.get_field_value(&\"message_type\".to_string()).unwrap();\n\n let resp_msg_type = if req_msg_type == \"1100\" {\n\n \"1110\"\n\n } else if req_msg_type == \"1420\" {\n\n \"1430\"\n\n } else {\n\n return Err(IsoError { msg: format!(\"unsupported msg_type {}\", req_msg_type) });\n\n };\n\n\n\n\n", "file_path": "README.md", "rank": 47, "score": 22.181290314083533 }, { "content": " }\n\n\n\n /// Returns a message that corresponds to the given header value or an IsoError if such a selector\n\n /// doesn't exist\n\n pub fn get_message_from_header(&self, header_val: &str) -> Result<&MessageSegment, IsoError> {\n\n for msg in &self.messages {\n\n if msg.selector.contains(&header_val.to_string()) {\n\n return Ok(msg);\n\n }\n\n }\n\n return Err(IsoError { msg: format!(\"message not found for header - {}\", header_val) });\n\n }\n\n\n\n /// Returns a segment by first parsing the header field and then matching the header value against\n\n /// the selector\n\n pub fn get_msg_segment(&'static self, data: &Vec<u8>) -> Result<&MessageSegment, IsoError> {\n\n let mut selector = String::new();\n\n let mut f2d_map = HashMap::new();\n\n\n\n let mut in_buf = Cursor::new(data);\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 48, "score": 21.42929084426514 }, { "content": "\n\n match generate_mac(&cfg.get_mac_algo().as_ref().unwrap(), &cfg.get_mac_padding().as_ref().unwrap(),\n\n &data[0..data.len()-8].to_vec(), &hex::decode(cfg.get_mac_key().as_ref().unwrap()).unwrap()) {\n\n Ok(v) => {\n\n if self.bmp.is_on(1) {\n\n self.set_on(128, hex::encode(v).as_str());\n\n } else {\n\n self.set_on(64, hex::encode(v).as_str());\n\n }\n\n Ok(())\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.msg })\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 49, "score": 21.17606418574072 }, { "content": " pub fn set(&mut self, name: &str, val: &str) -> Result<(), IsoError> {\n\n match self.msg.field_by_name(&name.to_string()) {\n\n Ok(f) => {\n\n self.fd_map.insert(f.name().clone(), f.to_raw(val));\n\n Ok(())\n\n }\n\n Err(e) => Err(e)\n\n }\n\n }\n\n\n\n /// Sets a field in the bitmap with the given value\n\n pub fn set_on(&mut self, pos: u32, val: &str) -> Result<(), IsoError> {\n\n match self.msg.field_by_name(&\"bitmap\".to_string()) {\n\n Ok(f) => {\n\n let cf = f.child_by_pos(pos);\n\n self.fd_map.insert(cf.name().clone(), cf.to_raw(val));\n\n self.bmp.set_on(pos);\n\n Ok(())\n\n }\n\n Err(e) => Err(e)\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 50, "score": 21.00961480197616 }, { "content": " trace!(\"parsed-data (len-ind) : {}\", hex::encode(&len_data));\n\n\n\n\n\n let data_len = self.data_len(&len_data);\n\n let mut f_data = vec![0; data_len as usize];\n\n\n\n match in_buf.read_exact(&mut f_data[..]) {\n\n Ok(_) => {\n\n f2d_map.insert(self.name.clone(), f_data);\n\n Ok(())\n\n }\n\n Err(e) => {\n\n Result::Err(ParseError { msg: format!(\"insufficient data, failed to parse {}, Error = {}\", self.name, e.to_string()) })\n\n }\n\n }\n\n }\n\n Err(_) => {\n\n Result::Err(ParseError { msg: format!(\"insufficient data, failed to parse length indicator for - {}\", self.name) })\n\n }\n\n }\n", "file_path": "src/iso8583/field.rs", "rank": 51, "score": 20.75768093185655 }, { "content": " if iso_msg.bmp.is_on(64) || iso_msg.bmp.is_on(128) {\n\n\n\n let key=hex!(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\").to_vec();\n\n let expected_mac = match iso_msg.bmp.is_on(64) {\n\n true => {\n\n iso_msg.bmp_child_value(64)\n\n }\n\n false => {\n\n iso_msg.bmp_child_value(128)\n\n }\n\n };\n\n let mac_data=&raw_msg.as_slice()[0..raw_msg.len() - 8];\n\n match verify_mac(&RetailMac, &Type1, mac_data, &key, &hex::decode(expected_mac.unwrap()).unwrap()) {\n\n Ok(_) => {\n\n debug!(\"mac verified OK!\");\n\n }\n\n Err(e) => {\n\n error!(\"failed to verify mac. Reason: {}\", e.msg);\n\n iso_resp_msg.set(\"message_type\", \"1110\").unwrap_or_default();\n\n iso_resp_msg.set_on(39, \"916\").unwrap_or_default();\n\n iso_resp_msg.echo_from(&iso_msg, &[2, 3, 4, 11, 14, 19, 96]);\n\n return Ok(());\n\n }\n\n }\n\n }\n\n\n\n\n\n if !iso_msg.bmp.is_on(4) {\n\n error!(\"No amount in request, responding with F39 = 115 \");\n\n iso_resp_msg.set(\"message_type\", \"1110\").unwrap_or_default();\n\n iso_resp_msg.set_on(39, \"115\").unwrap_or_default();\n\n iso_resp_msg.echo_from(&iso_msg, &[2, 3, 4, 11, 14, 19, 96])\n\n } else {\n\n // process the incoming request based on amount\n\n let amt = iso_msg.bmp_child_value(4).unwrap();\n\n match amt.parse::<u32>() {\n\n Ok(i_amt) => {\n\n debug!(\"amount = {}\", i_amt);\n\n if i_amt < 100 {\n\n iso_resp_msg.set_on(39, \"000\").unwrap_or_default();\n\n } else {\n\n iso_resp_msg.set_on(39, \"100\").unwrap_or_default();\n", "file_path": "README.md", "rank": 52, "score": 20.138818993672384 }, { "content": " fd_map: HashMap::new(),\n\n bmp: Bitmap::new(0, 0, 0),\n\n };\n\n\n\n let mut cp_data = Cursor::new(data);\n\n\n\n for f in &iso_msg.msg.fields {\n\n debug!(\"parsing field : {}\", f.name());\n\n let res = match f.parse(&mut cp_data, &mut iso_msg.fd_map) {\n\n Err(e) => Result::Err(e),\n\n Ok(_) => {\n\n //if this is \"THE\" bitmap, then save it on isomsg\n\n if f.name() == \"bitmap\" {\n\n let bmp_data = iso_msg.fd_map.get(f.name()).unwrap();\n\n iso_msg.bmp = Bitmap::from_vec(bmp_data);\n\n }\n\n Ok(())\n\n }\n\n };\n\n\n\n if res.is_err() {\n\n return Result::Err(res.err().unwrap());\n\n }\n\n }\n\n Ok(iso_msg)\n\n }\n\n}", "file_path": "src/iso8583/iso_spec.rs", "rank": 53, "score": 20.037654254987558 }, { "content": " }\n\n\n\n\n\n fn assemble(&self, out_buf: &mut Vec<u8>, iso_msg: &iso_spec::IsoMsg) -> Result<u32, ParseError> {\n\n let bmp_data = iso_msg.bmp.as_vec();\n\n out_buf.extend(bmp_data);\n\n\n\n for pos in 2..193 {\n\n if iso_msg.bmp.is_on(pos) {\n\n if pos == 1 || pos == 65 {\n\n continue;\n\n }\n\n\n\n match self.by_position(pos) {\n\n Ok(f) => {\n\n match iso_msg.fd_map.get(f.name()) {\n\n Some(_) => {\n\n match f.assemble(out_buf, iso_msg) {\n\n Ok(_) => {}\n\n Err(e) => {\n", "file_path": "src/iso8583/bitmap.rs", "rank": 54, "score": 19.9305750526096 }, { "content": "\n\n match client.read_exact(&mut out_buf[..]) {\n\n Ok(()) => {\n\n println!(\"received response: with {} bytes. \\n {}\\n\", len, get_hexdump(&out_buf));\n\n match self.spec.parse(&mut out_buf) {\n\n Ok(resp_iso_msg) => {\n\n Ok(resp_iso_msg)\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.msg })\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.to_string() })\n\n }\n\n }\n\n }\n\n\n\n pub fn close(&mut self) {\n\n self._tcp_stream.as_ref().unwrap().shutdown(Shutdown::Both);\n\n }\n\n}", "file_path": "src/iso8583/client.rs", "rank": 55, "score": 19.6378733634269 }, { "content": " }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use byteorder::WriteBytesExt;\n\n use crate::iso8583::mli::{MLI2E, MLI4E, MLI2I, MLI4I};\n\n use crate::iso8583::mli::MLI;\n\n use std::io::{Cursor};\n\n\n\n #[test]\n\n fn test_2e() {\n\n let msg = \"hello world\";\n\n let mut data: Vec<u8> = vec![];\n\n data.write_u16::<byteorder::BigEndian>(msg.len() as u16);\n\n data.extend_from_slice(msg.as_bytes());\n\n\n\n\n\n let mli: &dyn MLI = &MLI2E {};\n", "file_path": "src/iso8583/mli.rs", "rank": 56, "score": 19.236791186335836 }, { "content": "//! This module deals with various configurations that can be applied while creating a iso msg like\n\n//! crypto field F52, F64/128 etc\n\n\n\nuse crate::crypto::pin::PinFormat;\n\nuse crate::crypto::mac::{MacAlgo, PaddingType};\n\n\n\npub struct Config {\n\n pin_format: Option<PinFormat>,\n\n pin_key: Option<String>,\n\n mac_algo: Option<MacAlgo>,\n\n mac_padding: Option<PaddingType>,\n\n mac_key: Option<String>,\n\n}\n\n\n\n\n\nimpl Config {\n\n // Creates a new empty Config\n\n pub fn new() -> Config {\n\n Config {\n\n pin_format: None,\n", "file_path": "src/iso8583/config.rs", "rank": 57, "score": 18.934722028650235 }, { "content": " Ok(n) => {\n\n mli = n;\n\n reading_mli = false;\n\n }\n\n Err(e) => {\n\n error!(\"client socket_err: {} {}\", &stream.peer_addr().unwrap().to_string(), e.msg);\n\n break 'done;\n\n }\n\n };\n\n } else {\n\n if mli > 0 {\n\n let mut data = vec![0; mli as usize];\n\n match reader.read_exact(&mut data[..]) {\n\n Err(e) => {\n\n error!(\"client socket_err: {} {}\", stream.peer_addr().unwrap().to_string(), e.to_string());\n\n break 'done;\n\n }\n\n _ => (),\n\n };\n\n\n", "file_path": "src/iso8583/server.rs", "rank": 58, "score": 18.447788331457357 }, { "content": " let mut iso_resp_msg = new_msg(&iso_msg.spec, &iso_msg.spec.get_message_from_header(resp_msg_type).unwrap());\n\n\n\n if req_msg_type == \"1420\" {\n\n iso_resp_msg.set(\"message_type\", resp_msg_type).unwrap_or_default();\n\n iso_resp_msg.echo_from(&iso_msg, &[2, 3, 4, 11, 14, 19, 96])?;\n\n iso_resp_msg.set_on(39, \"400\").unwrap_or_default();\n\n } else if req_msg_type == \"1100\" {\n\n handle_1100(&iso_msg, msg, &mut iso_resp_msg)?\n\n }\n\n\n\n\n\n match iso_resp_msg.assemble() {\n\n Ok(resp_data) => Ok((resp_data, iso_resp_msg)),\n\n Err(e) => {\n\n error!(\"Failed to assemble response message, dropping message - {}\", e.msg);\n\n Err(IsoError { msg: format!(\"error: msg assembly failed..{} \", e.msg) })\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n Err(IsoError { msg: e.msg })\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n// Handle the incoming 1100 message based on amount\n\n// if amount (F4) <100 then\n\n// F38 = APPR01;\n\n// F39 = 000;\n\n// else\n\n// F39 = 100;\n\n//\n\n//\n\nfn handle_1100(iso_msg: &IsoMsg, raw_msg: &Vec<u8>, iso_resp_msg: &mut IsoMsg) -> Result<(), IsoError> {\n\n iso_resp_msg.set(\"message_type\", \"1110\").unwrap_or_default();\n\n //validate the mac\n", "file_path": "README.md", "rank": 59, "score": 18.430889492069383 }, { "content": " match verify_pin(&ISO0, \"12341123456\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_iso1() {\n\n match generate_pin_block(&ISO1, \"8976\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n\n match verify_pin(&ISO1, \"8976\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n", "file_path": "src/crypto/pin.rs", "rank": 60, "score": 18.059023584088116 }, { "content": " match verify_pin(&ISO2, \"8976\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_iso3() {\n\n match generate_pin_block(&ISO3, \"1234\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n\n match verify_pin(&ISO3, \"1234\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n", "file_path": "src/crypto/pin.rs", "rank": 61, "score": 18.059023584088116 }, { "content": " }\n\n\n\n\n\n if self.bmp.is_on(1) {\n\n self.set_on(128,\"0000000000000000\");\n\n } else {\n\n self.set_on(64,\"0000000000000000\");\n\n }\n\n\n\n\n\n let data: Vec<u8> = match self.assemble() {\n\n Ok(v) => {\n\n v\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n };\n\n\n\n debug!(\"generating mac on: {}\", hex::encode(&data));\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 62, "score": 17.997716193673725 }, { "content": " assert_eq!(mli.parse(&mut Cursor::new(data)).unwrap(), 11 as u32);\n\n assert_eq!(mli.create(&(msg.len() as usize)).unwrap(), vec![0 as u8, 0x0b as u8]);\n\n }\n\n\n\n #[test]\n\n fn test_2i() {\n\n let msg = \"hello world\";\n\n let mut data: Vec<u8> = vec![];\n\n data.write_u16::<byteorder::BigEndian>((msg.len() + 2) as u16);\n\n data.extend_from_slice(msg.as_bytes());\n\n\n\n\n\n let mli: &dyn MLI = &MLI2I {};\n\n assert_eq!(mli.parse(&mut Cursor::new(data)).unwrap(), 11 as u32);\n\n assert_eq!(mli.create(&(msg.len() as usize)).unwrap(), vec![0 as u8, 0x0d as u8]);\n\n }\n\n\n\n #[test]\n\n fn test_4e() {\n\n let mut msg = String::new();\n", "file_path": "src/iso8583/mli.rs", "rank": 63, "score": 17.992846520965365 }, { "content": " }\n\n }\n\n\n\n /// Echoes (sets the value with the identical field in req_msg) for given positions in the bitmap\n\n pub fn echo_from(&mut self, req_msg: &IsoMsg, positions: &[u32]) -> Result<(), IsoError> {\n\n match self.msg.field_by_name(&\"bitmap\".to_string()) {\n\n Ok(f) => {\n\n for pos in positions {\n\n let cf = f.child_by_pos(*pos);\n\n match req_msg.bmp_child_value(*pos) {\n\n Ok(res) => {\n\n debug!(\"echoing .. {}: {}\", pos, res);\n\n self.fd_map.insert(cf.name().clone(), cf.to_raw(res.as_str()));\n\n self.bmp.set_on(*pos);\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n }\n\n }\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 64, "score": 17.861961581639015 }, { "content": "impl Field for BmpField {\n\n fn name(&self) -> &String {\n\n &self.name\n\n }\n\n\n\n fn parse(&self, in_buf: &mut dyn BufRead, f2d_map: &mut HashMap<String, Vec<u8>>) -> Result<(), ParseError> {\n\n let mut f_data = vec![0; 8];\n\n\n\n match in_buf.read_exact(&mut f_data[..]) {\n\n Ok(_) => {\n\n let b1 = byteorder::BigEndian::read_u64(f_data.as_slice());\n\n let mut b2: u64 = 0;\n\n let mut b3: u64 = 0;\n\n\n\n if f_data[0] & 0x80 == 0x80 {\n\n let mut s_bmp_data = vec![0; 8];\n\n match in_buf.read_exact(&mut s_bmp_data[..]) {\n\n Ok(_) => {\n\n trace!(\"parsed sec...\");\n\n b2 = byteorder::BigEndian::read_u64(s_bmp_data.as_slice());\n", "file_path": "src/iso8583/bitmap.rs", "rank": 65, "score": 17.715879566211562 }, { "content": " Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_iso2() {\n\n match generate_pin_block(&ISO2, \"8976\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n\n assert_eq!(hex::encode(&p), \"795e511357332491\");\n\n\n", "file_path": "src/crypto/pin.rs", "rank": 66, "score": 17.569839915376207 }, { "content": "\n\n for f in &self.header_fields {\n\n match f.parse(&mut in_buf, &mut f2d_map) {\n\n Ok(_) => {\n\n selector.extend(f.to_string(f2d_map.get(f.name()).unwrap()).chars());\n\n }\n\n Err(e) => {\n\n return Err(IsoError { msg: e.msg });\n\n }\n\n }\n\n }\n\n\n\n debug!(\"computed header value for incoming message = {}\", selector);\n\n match self.get_message_from_header(selector.as_str()) {\n\n Ok(msg) => {\n\n Ok(msg)\n\n }\n\n Err(e) => Err(e)\n\n }\n\n }\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 67, "score": 17.485764244022423 }, { "content": "\n\n fn create(&self, n: &usize) -> Result<Vec<u8>, IsoError> {\n\n let mut mli = Vec::<u8>::new();\n\n let _ = mli.write_u32::<byteorder::BigEndian>(n.clone() as u32);\n\n Ok(mli)\n\n }\n\n}\n\n\n\n\n\nimpl MLI for MLI2I {\n\n fn parse(&self, in_buf: &mut dyn Read) -> Result<u32, IsoError> {\n\n match in_buf.read_u16::<byteorder::BigEndian>() {\n\n Ok(n) => Ok((n - 2) as u32),\n\n Err(e) => {\n\n Err(convert_err(&e))\n\n }\n\n }\n\n }\n\n\n\n fn create(&self, n: &usize) -> Result<Vec<u8>, IsoError> {\n", "file_path": "src/iso8583/mli.rs", "rank": 68, "score": 17.029959850878047 }, { "content": " }\n\n }\n\n\n\n fn create(&self, n: &usize) -> Result<Vec<u8>, IsoError> {\n\n let mut mli = Vec::<u8>::new();\n\n let _ = mli.write_u16::<byteorder::BigEndian>(n.clone() as u16);\n\n Ok(mli)\n\n }\n\n}\n\n\n\n\n\nimpl MLI for MLI4E {\n\n fn parse(&self, in_buf: &mut dyn Read) -> Result<u32, IsoError> {\n\n match in_buf.read_u32::<byteorder::BigEndian>() {\n\n Ok(n) => Ok(n),\n\n Err(e) => {\n\n Err(convert_err(&e))\n\n }\n\n }\n\n }\n", "file_path": "src/iso8583/mli.rs", "rank": 69, "score": 16.90443488616569 }, { "content": " for _ in 0..257 {\n\n msg.push('a');\n\n }\n\n let mut data: Vec<u8> = vec![];\n\n data.write_u32::<byteorder::BigEndian>(msg.len() as u32);\n\n data.extend_from_slice(msg.as_bytes());\n\n\n\n\n\n let mli: &dyn MLI = &MLI4E {};\n\n assert_eq!(mli.parse(&mut Cursor::new(data)).unwrap(), 257 as u32);\n\n assert_eq!(mli.create(&(msg.len() as usize)).unwrap(), vec![0x00, 0x00, 0x01 as u8, 0x01 as u8]);\n\n }\n\n\n\n #[test]\n\n fn test_4i() {\n\n let mut msg = String::new();\n\n for _ in 0..257 {\n\n msg.push('a');\n\n }\n\n let mut data: Vec<u8> = vec![];\n\n data.write_u32::<byteorder::BigEndian>((msg.len() + 4) as u32);\n\n data.extend_from_slice(msg.as_bytes());\n\n\n\n\n\n let mli: &dyn MLI = &MLI4I {};\n\n assert_eq!(mli.parse(&mut Cursor::new(data)).unwrap(), 257 as u32);\n\n assert_eq!(mli.create(&(msg.len() as usize)).unwrap(), vec![0x00, 0x00, 0x01 as u8, 0x05 as u8]);\n\n }\n\n}", "file_path": "src/iso8583/mli.rs", "rank": 70, "score": 16.797720887980226 }, { "content": " let mut mli = Vec::<u8>::new();\n\n let _ = mli.write_u16::<byteorder::BigEndian>((n.clone() as u16) + 2);\n\n Ok(mli)\n\n }\n\n}\n\n\n\nimpl MLI for MLI4I {\n\n fn parse(&self, in_buf: &mut dyn Read) -> Result<u32, IsoError> {\n\n match in_buf.read_u32::<byteorder::BigEndian>() {\n\n Ok(n) => Ok(n - 4),\n\n Err(e) => {\n\n Err(convert_err(&e))\n\n }\n\n }\n\n }\n\n\n\n fn create(&self, n: &usize) -> Result<Vec<u8>, IsoError> {\n\n let mut mli = Vec::<u8>::new();\n\n let _ = mli.write_u32::<byteorder::BigEndian>((n.clone() as u32) + 4);\n\n Ok(mli)\n", "file_path": "src/iso8583/mli.rs", "rank": 71, "score": 16.685314154799567 }, { "content": " match self.len {\n\n 1 => hex::decode(format!(\"{:02}\", len)).unwrap(),\n\n 2 => hex::decode(format!(\"{:04}\", len)).unwrap(),\n\n _ => panic!(\"Cannot support more than 2 bytes (4 BCD digits) of length indicator when expressed in bcd\")\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Field for VarField\n\n{\n\n fn name(&self) -> &String {\n\n &self.name\n\n }\n\n\n\n fn parse(&self, in_buf: &mut dyn BufRead, f2d_map: &mut HashMap<String, Vec<u8>>) -> Result<(), ParseError> {\n\n let mut len_data = vec![0; self.len as usize];\n\n match in_buf.read_exact(&mut len_data[..]) {\n\n Ok(_) => {\n", "file_path": "src/iso8583/field.rs", "rank": 72, "score": 16.648153075254065 }, { "content": "## Sample TCP client\n\n\n\n```rust\n\n \n\nfn test_send_recv_iso_1100() -> Result<(), IsoError> {\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n std::env::set_var(\"SPEC_FILE\", path.to_str().unwrap());\n\n\n\n let spec = crate::iso8583::iso_spec::spec(\"\");\n\n let msg_seg = spec.get_message_from_header(\"1100\").unwrap();\n\n\n\n\n\n let mut iso_msg = iso_spec::new_msg(spec, msg_seg);\n\n\n\n iso_msg.set(\"message_type\", \"1100\").unwrap();\n\n iso_msg.set_on(2, \"4567909845671235\").unwrap();\n\n iso_msg.set_on(3, \"004000\").unwrap();\n\n iso_msg.set_on(4, \"000000000029\").unwrap();\n\n iso_msg.set_on(11, \"779581\").unwrap();\n\n iso_msg.set_on(14, \"2204\").unwrap();\n\n iso_msg.set_on(19, \"840\").unwrap();\n\n\n\n\n\n let mut cfg = Config::new();\n\n cfg.with_pin(ISO0, String::from(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\"))\n\n .with_mac(RetailMac, Type1, String::from(\"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\"));\n\n\n\n\n\n //--------- set pin - F52\n\n\n\n //this will compute a pin based on cfg and the supplied pan and set bit position 52\n\n iso_msg.set_pin(\"1234\", iso_msg.bmp_child_value(2).unwrap().as_str(), &cfg).unwrap();\n\n\n\n // You can also directly set this if there are other means of computing the pin block\n\n // iso_msg.set_on(52, \"0102030405060708\").unwrap(); //binary field are represented in their hex encoded format\n\n\n\n //--------- set pin - F52\n\n\n\n iso_msg.set_on(61, \"reserved_1\").unwrap();\n\n iso_msg.set_on(62, \"reserved-2\").unwrap();\n\n iso_msg.set_on(63, \"87877622525\").unwrap();\n\n iso_msg.set_on(96, \"1234\").unwrap();\n\n\n\n\n\n //--------- set mac - either F64 or F128\n\n iso_msg.set_mac(&cfg);\n\n //--------- set mac\n\n\n\n\n", "file_path": "README.md", "rank": 73, "score": 16.262488291711378 }, { "content": " Ok(res) => {\n\n let res = tdes_ede2_encrypt(&res, &hex::decode(key).unwrap().to_vec());\n\n Ok(res)\n\n }\n\n Err(e) => {\n\n Err(PinError { msg: e.to_string() })\n\n }\n\n }\n\n }\n\n\n\n PinFormat::ISO2 => {\n\n let mut b1 = format!(\"2{:X}{}\", c_pin.len(), c_pin);\n\n while b1.len() != 16 {\n\n b1.push('F');\n\n }\n\n println!(\"= {}\", b1);\n\n match hex::decode(b1) {\n\n Ok(res) => {\n\n let res = tdes_ede2_encrypt(&res, &hex::decode(key).unwrap().to_vec());\n\n Ok(res)\n", "file_path": "src/crypto/pin.rs", "rank": 74, "score": 16.09075067218276 }, { "content": " if s_bmp_data[0] & 0x80 == 0x80 {\n\n let mut t_bmp_data = vec![0; 8];\n\n match in_buf.read_exact(&mut t_bmp_data[..]) {\n\n Ok(_) => {\n\n trace!(\"parsed tertiary...\");\n\n b3 = byteorder::BigEndian::read_u64(t_bmp_data.as_slice());\n\n }\n\n Err(_) => {\n\n return Err(ParseError { msg: format!(\"failed to parse tertiary bitmap - {}\", self.name) });\n\n }\n\n }\n\n }\n\n }\n\n Err(_) => {\n\n return Err(ParseError { msg: format!(\"failed to secondary parse - {}\", self.name) });\n\n }\n\n }\n\n }\n\n\n\n\n", "file_path": "src/iso8583/bitmap.rs", "rank": 75, "score": 16.067521249141933 }, { "content": " pin_key: None,\n\n mac_algo: None,\n\n mac_key: None,\n\n mac_padding: None,\n\n }\n\n }\n\n\n\n /// Returns the PIN block format associated with this config\n\n pub fn get_pin_fmt(&self) -> &Option<PinFormat> {\n\n &self.pin_format\n\n }\n\n\n\n /// Returns the PIN key associated with this config\n\n pub fn get_pin_key(&self) -> &Option<String> {\n\n &self.pin_key\n\n }\n\n\n\n /// Returns the MAC key associated with this config\n\n pub fn get_mac_key(&self) -> &Option<String> {\n\n &self.mac_key\n", "file_path": "src/iso8583/config.rs", "rank": 76, "score": 15.9477463978858 }, { "content": " }\n\n }\n\n\n\n fn assemble(self: &Self, out_buf: &mut Vec<u8>, iso_msg: &IsoMsg) -> Result<u32, ParseError> {\n\n match iso_msg.fd_map.get(&self.name) {\n\n Some(fd) => {\n\n out_buf.extend(fd);\n\n Ok(fd.as_slice().len() as u32)\n\n }\n\n None => {\n\n Err(ParseError { msg: format!(\"field {} is not available!\", self.name) })\n\n }\n\n }\n\n }\n\n\n\n fn position(&self) -> u32 {\n\n return self.position;\n\n }\n\n\n\n fn children(&self) -> Vec<&dyn Field> {\n", "file_path": "src/iso8583/field.rs", "rank": 77, "score": 15.565862709907911 }, { "content": "\n\n /// Returns the value of a top level field like message_type\n\n pub fn get_field_value(&self, name: &String) -> Result<String, IsoError> {\n\n match self.msg.fields.iter().find(|f| -> bool {\n\n if f.name() == name {\n\n true\n\n } else {\n\n false\n\n }\n\n }) {\n\n Some(f) => {\n\n Ok(f.to_string(self.fd_map.get(name).unwrap()))\n\n }\n\n None => {\n\n Err(IsoError { msg: format!(\"No such field : {}\", name) })\n\n }\n\n }\n\n }\n\n\n\n /// sets a top-level field like message_type etc\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 78, "score": 15.392606520644227 }, { "content": "\n\n\n\n// Below is an example implementation of a MsgProcessor i.e the entity responsible for handling incoming messages\n\n// at the server\n\n#[derive(Copy, Clone)]\n\npub struct SampleMsgProcessor {}\n\n\n\n\n\nimpl MsgProcessor for SampleMsgProcessor {\n\n fn process(&self, iso_server: &ISOServer, msg: &mut Vec<u8>) -> Result<(Vec<u8>, IsoMsg), IsoError> {\n\n match iso_server.spec.parse(msg) {\n\n Ok(iso_msg) => {\n\n debug!(\"parsed incoming request - message = \\\"{}\\\" successfully. \\n : parsed message: \\n --- \\n {} \\n ----\\n\",\n\n iso_msg.msg.name(), iso_msg);\n\n\n\n let req_msg_type = iso_msg.get_field_value(&\"message_type\".to_string()).unwrap();\n\n let resp_msg_type = if req_msg_type == \"1100\" {\n\n \"1110\"\n\n } else if req_msg_type == \"1420\" {\n\n \"1430\"\n", "file_path": "src/main.rs", "rank": 79, "score": 15.365115103842838 }, { "content": " /// Returns the value of a field by position in the bitmap\n\n pub fn bmp_child_value(&self, pos: u32) -> Result<String, IsoError> {\n\n let f = self.msg.fields.iter().find(|f| -> bool {\n\n if f.name() == \"bitmap\" {\n\n true\n\n } else {\n\n false\n\n }\n\n }).unwrap();\n\n\n\n let cf = f.child_by_pos(pos);\n\n match self.fd_map.get(cf.name()) {\n\n None => {\n\n Err(IsoError { msg: format!(\"no value for field at position {}\", pos) })\n\n }\n\n Some(v) => {\n\n Ok(cf.to_string(v))\n\n }\n\n }\n\n }\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 80, "score": 15.342758473507086 }, { "content": " debug!(\"received request: \\n{}\\n len = {}\", get_hexdump(&data), mli);\n\n let t1 = std::time::Instant::now();\n\n\n\n match server.msg_processor.process(&server, &mut data) {\n\n Ok(resp) => {\n\n debug!(\"iso_response : {} \\n parsed :\\n --- {} \\n --- \\n\", get_hexdump(&resp.0), resp.1);\n\n match server.mli.create(&(resp.0).len()) {\n\n Ok(mut resp_data) => {\n\n debug!(\"request processing time = {} millis\", std::time::Instant::now().duration_since(t1).as_millis());\n\n (&mut resp_data).write_all(resp.0.as_slice()).unwrap();\n\n writer.write_all(resp_data.as_slice()).unwrap();\n\n writer.flush().unwrap();\n\n }\n\n Err(e) => {\n\n error!(\"failed to construct mli {}\", e.msg)\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n error!(\"failed to handle incoming req - {}\", e.msg)\n", "file_path": "src/iso8583/server.rs", "rank": 81, "score": 15.315092712649282 }, { "content": " Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n\n\n match generate_pin_block(&ISO3, \"12341123456\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n\n match verify_pin(&ISO3, \"12341123456\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n", "file_path": "src/crypto/pin.rs", "rank": 82, "score": 15.298872124102818 }, { "content": " }\n\n\n\n /// Returns the bitmap as a hexadecimal string\n\n pub fn hex_string(&self) -> String {\n\n format!(\"{:016.0x}{:016.0x}{:016.0x}\", self.p_bmp, self.s_bmp, self.t_bmp)\n\n }\n\n\n\n /// Returns the bitmap as a Vec<u8>\n\n pub fn as_vec(&self) -> Vec<u8> {\n\n let mut bmp_data = vec![0; 8];\n\n\n\n byteorder::BigEndian::write_u64(&mut bmp_data[0..], self.p_bmp);\n\n if ((self.p_bmp >> 63) & 0x01) == 0x01 {\n\n bmp_data.resize(16, 0);\n\n byteorder::BigEndian::write_u64(&mut bmp_data[8..], self.s_bmp);\n\n }\n\n if ((self.s_bmp >> 63) & 0x01) == 0x01 {\n\n bmp_data.resize(24, 0);\n\n byteorder::BigEndian::write_u64(&mut bmp_data[16..], self.t_bmp);\n\n }\n", "file_path": "src/iso8583/bitmap.rs", "rank": 83, "score": 15.080208926394384 }, { "content": " }\n\n\n\n\n\n fn assemble(&self, out_buf: &mut Vec<u8>, iso_msg: &IsoMsg) -> Result<u32, ParseError> {\n\n match iso_msg.fd_map.get(&self.name) {\n\n Some(fd) => {\n\n let len_ind = self.build_len_ind(fd.len());\n\n out_buf.extend(len_ind);\n\n out_buf.extend(fd);\n\n //fd.as_slice().iter().for_each(|d| out_buf.push(*d));\n\n Ok(fd.as_slice().len() as u32)\n\n }\n\n None => {\n\n Err(ParseError { msg: format!(\"field {} is not available!\", self.name) })\n\n }\n\n }\n\n }\n\n\n\n\n\n fn position(&self) -> u32 {\n", "file_path": "src/iso8583/field.rs", "rank": 84, "score": 14.438574834220855 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use crate::iso8583::yaml_de::read_spec;\n\n use std::path::Path;\n\n\n\n #[test]\n\n fn test_deserialize_yaml_spec() {\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n\n\n println!(\"path is {}\", path.to_str().unwrap());\n\n match read_spec(path.to_str().unwrap()) {\n\n Ok(spec) => {\n\n assert_eq!(2, (&spec.messages).len());\n\n }\n\n Err(e) => assert!(false, e)\n\n };\n\n }\n\n}\n", "file_path": "src/iso8583/yaml_de.rs", "rank": 85, "score": 14.423727680895677 }, { "content": " }\n\n\n\n fn to_raw(&self, val: &str) -> Vec<u8> {\n\n string_to_vec(&self.encoding, val)\n\n }\n\n}\n\n\n\npub(in crate::iso8583) fn vec_to_string(encoding: &Encoding, data: &Vec<u8>) -> String {\n\n match encoding {\n\n ASCII => {\n\n String::from_utf8(data.clone()).unwrap()\n\n }\n\n EBCDIC => {\n\n ebcdic_to_ascii(data)\n\n }\n\n BINARY => {\n\n hex::encode(data.as_slice())\n\n }\n\n BCD => {\n\n hex::encode(data.as_slice())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/iso8583/field.rs", "rank": 86, "score": 14.414169369902773 }, { "content": " };\n\n\n\n iso_resp_msg.echo_from(&iso_msg, &[2, 3, 4, 11, 14, 19, 96])?;\n\n iso_resp_msg.fd_map.insert(\"bitmap\".to_string(), iso_resp_msg.bmp.as_vec());\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n\n\nfn main() {\n\n let path = Path::new(\".\").join(\"sample_spec\").join(\"sample_spec.yaml\");\n\n let spec_file = path.to_str().unwrap();\n\n std::env::set_var(\"SPEC_FILE\", spec_file);\n\n\n\n let _ = simplelog::SimpleLogger::init(simplelog::LevelFilter::Debug, simplelog::Config::default());\n\n\n\n let iso_spec = iso8583_rs::iso8583::iso_spec::spec(\"\");\n\n\n\n info!(\"starting iso server for spec {} at port {}\", iso_spec.name(), 6666);\n\n let server = match ISOServer::new(\"127.0.0.1:6666\".to_string(),\n\n iso_spec,\n\n MLI2E,\n\n Box::new(SampleMsgProcessor {})) {\n\n Ok(server) => {\n\n server\n\n }\n\n Err(e) => {\n\n error!(\"failed to start ISO server - {}\", e.msg);\n\n panic!(e)\n\n }\n\n };\n\n server.start().join().unwrap()\n\n}\n\n\n\n\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 87, "score": 14.360907348495989 }, { "content": " //assert_eq!(hex::encode(&p), \"6042012526a9c2e0\");\n\n\n\n match verify_pin(&ISO0, \"1234\", &p, \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(res) => {\n\n assert_eq!(res, true)\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n assert!(false, e.msg.to_string());\n\n }\n\n }\n\n\n\n match generate_pin_block(&ISO0, \"12341123456\", \"4111111111111111\", \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(p) => {\n\n //assert_eq!(hex::encode(&p), \"6042012526a9c2e0\");\n\n\n", "file_path": "src/crypto/pin.rs", "rank": 88, "score": 14.288602559702415 }, { "content": " }\n\n Err(e) => {\n\n Err(PinError { msg: e.to_string() })\n\n }\n\n }\n\n }\n\n\n\n PinFormat::ISO3 => {\n\n let mut b1 = format!(\"3{:X}{}\", c_pin.len(), c_pin);\n\n pad_8_a2f(&mut b1);\n\n println!(\"= {}\", b1);\n\n\n\n //rightmost 12 not including check digit\n\n let mut b2 = String::from(\"0000\");\n\n b2.push_str(&pan[pan.len() - 13..pan.len() - 1]);\n\n\n\n let res = xor_hexstr(b1.as_str(), b2.as_str());\n\n let res = tdes_ede2_encrypt(&res, &hex::decode(key).unwrap().to_vec());\n\n\n\n Ok(res.to_vec())\n\n }\n\n\n\n _ => {\n\n Err(PinError { msg: format!(\"{:?} is not supported yet.\", fmt) })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/crypto/pin.rs", "rank": 89, "score": 14.256026757777013 }, { "content": " } else {\n\n return Err(IsoError { msg: format!(\"unsupported msg_type {}\", req_msg_type) });\n\n };\n\n\n\n\n\n let mut iso_resp_msg = new_msg(&iso_msg.spec, &iso_msg.spec.get_message_from_header(resp_msg_type).unwrap());\n\n\n\n if req_msg_type == \"1420\" {\n\n iso_resp_msg.set(\"message_type\", resp_msg_type).unwrap_or_default();\n\n iso_resp_msg.echo_from(&iso_msg, &[2, 3, 4, 11, 14, 19, 96])?;\n\n iso_resp_msg.set_on(39, \"400\").unwrap_or_default();\n\n } else if req_msg_type == \"1100\" {\n\n handle_1100(&iso_msg, msg, &mut iso_resp_msg)?\n\n }\n\n\n\n\n\n match iso_resp_msg.assemble() {\n\n Ok(resp_data) => Ok((resp_data, iso_resp_msg)),\n\n Err(e) => {\n\n error!(\"Failed to assemble response message, dropping message - {}\", e.msg);\n", "file_path": "src/main.rs", "rank": 90, "score": 14.05018553177495 }, { "content": "\n\n bmp_data\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::iso8583::bitmap::Bitmap;\n\n\n\n #[test]\n\n fn test_bmp() {\n\n let mut bmp = Bitmap::new(0, 0, 0);\n\n\n\n bmp.set_on(4);\n\n bmp.set_on(11);\n\n bmp.set_on(64);\n\n bmp.set_on(99);\n\n bmp.set_on(133);\n\n bmp.set_on(6);\n\n\n", "file_path": "src/iso8583/bitmap.rs", "rank": 91, "score": 13.921345553054042 }, { "content": " }\n\n }\n\n mli = 0;\n\n reading_mli = true;\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n\n\n\npub(in crate::iso8583) fn get_hexdump(data: &Vec<u8>) -> String {\n\n let mut hexdmp = String::new();\n\n hexdmp.push_str(\"\\n\");\n\n hexdump_iter(data).for_each(|f| {\n\n hexdmp.push_str(f.as_ref());\n\n hexdmp.push_str(\"\\n\");\n\n });\n\n hexdmp\n\n}\n\n\n\n\n\n\n", "file_path": "src/iso8583/server.rs", "rank": 92, "score": 13.41463986350788 }, { "content": " Some(f) => {\n\n Ok(f.as_ref())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Spec {\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n /// Returns a message segment given its name or a IsoError if such a segment is not present\n\n pub fn get_message(&self, name: &str) -> Result<&MessageSegment, IsoError> {\n\n for msg in &self.messages {\n\n if msg.name() == name {\n\n return Ok(msg);\n\n }\n\n }\n\n return Err(IsoError { msg: format!(\"{} message not found\", name) });\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 93, "score": 13.332673360609949 }, { "content": " mli = Arc::new(Box::new(MLI4I {}));\n\n }\n\n }\n\n\n\n match host_port.to_socket_addrs() {\n\n Ok(addrs) => {\n\n let addrs = addrs.as_slice();\n\n //use only ipv4 for now\n\n let addrs = addrs.iter().filter(|s| s.is_ipv4()).map(|s| *s).collect::<Vec<SocketAddr>>();\n\n\n\n if addrs.len() > 0 {\n\n Ok(ISOServer { sock_addr: addrs, spec, mli, msg_processor: Arc::new(msg_processor) })\n\n } else {\n\n Err(IsoServerError { msg: format!(\"invalid host_port: {} : unresolvable?\", &host_port) })\n\n }\n\n }\n\n Err(e) => Err(IsoServerError { msg: format!(\"invalid host_port: {}: cause: {}\", &host_port, e.to_string()) })\n\n }\n\n }\n\n\n", "file_path": "src/iso8583/server.rs", "rank": 94, "score": 13.18232705113936 }, { "content": " }\n\n\n\n\n\n if iso_msg.bmp.is_on(52) {\n\n //validate the pin\n\n let f52 = iso_msg.bmp_child_value(52).unwrap();\n\n debug!(\"{}\", \"verifying pin ... \");\n\n match verify_pin(&ISO0, \"1234\", &hex::decode(f52).unwrap(),\n\n iso_msg.bmp_child_value(2).unwrap().as_str(), \"e0f4543f3e2a2c5ffc7e5e5a222e3e4d\") {\n\n Ok(res) => {\n\n if res {\n\n debug!(\"{}\", \"PIN verified OK.\");\n\n } else {\n\n warn!(\"{}\", \"PIN verified Failed!!\");\n\n iso_resp_msg.set_on(39, \"117\").unwrap_or_default();\n\n }\n\n }\n\n Err(e) => {\n\n error!(\"failed to verify PIN, {}\", e.msg);\n\n iso_resp_msg.set_on(39, \"126\").unwrap_or_default();\n\n }\n\n };\n\n }\n\n\n\n if iso_msg.bmp.is_on(61) {\n\n let mut val = iso_msg.bmp_child_value(61).unwrap();\n\n val += \"-OK\";\n\n iso_resp_msg.set_on(61, val.as_str()).unwrap();\n\n }\n\n\n\n if iso_msg.bmp.is_on(62) {\n\n let mut val = iso_msg.bmp_child_value(62).unwrap();\n\n val += \"-OK\";\n\n iso_resp_msg.set_on(62, val.as_str()).unwrap();\n\n }\n\n\n\n iso_resp_msg.set_on(63, \"007\").unwrap_or_default();\n\n iso_resp_msg.set_on(160, \"F160\").unwrap_or_default();\n\n\n\n\n\n if iso_resp_msg.bmp_child_value(39).unwrap() == \"000\" {\n\n // generate a approval code\n\n iso_resp_msg.set_on(38, \"APPR01\").unwrap_or_default();\n\n }\n\n }\n\n Err(_e) => {\n\n iso_resp_msg.set_on(39, \"107\").unwrap_or_default();\n\n }\n", "file_path": "README.md", "rank": 95, "score": 13.077318290823795 }, { "content": " Ok(c) => {\n\n println!(\"connected to server @ {:?}\", c.local_addr());\n\n Option::Some(c)\n\n }\n\n }\n\n }\n\n\n\n let client = self._tcp_stream.as_mut().unwrap();\n\n\n\n client.write_all(raw_msg.as_slice()).unwrap();\n\n client.flush().unwrap();\n\n\n\n // read the response\n\n let len: u32;\n\n match self.mli.parse(client) {\n\n Ok(n) => len = n,\n\n Err(e) => return Err(e)\n\n };\n\n\n\n let mut out_buf = vec![0; len as usize];\n", "file_path": "src/iso8583/client.rs", "rank": 96, "score": 12.936893029170406 }, { "content": "\n\n match std::env::var_os(\"SPEC_FILE\") {\n\n Some(v) => {\n\n spec_file.push_str(v.to_str().unwrap());\n\n println!(\"spec-file: {}\",spec_file)\n\n }\n\n\n\n None => panic!(\"SPEC_FILE env variable not defined!\")\n\n }\n\n\n\n let mut specs=HashMap::<String,Spec>::new();\n\n\n\n match crate::iso8583::yaml_de::read_spec(spec_file.as_str()){\n\n Ok(spec)=> specs.insert(String::from(spec.name()),spec),\n\n Err(e)=> panic!(e.msg)\n\n };\n\n\n\n specs\n\n};\n\n}\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 97, "score": 12.732143995437179 }, { "content": "extern crate byteorder;\n\nextern crate hex;\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate simplelog;\n\n#[macro_use]\n\nextern crate hex_literal;\n\n\n\nuse iso8583_rs::iso8583::iso_spec::{IsoMsg, new_msg};\n\nuse iso8583_rs::iso8583::IsoError;\n\nuse iso8583_rs::iso8583::mli::MLIType::MLI2E;\n\nuse iso8583_rs::iso8583::server::ISOServer;\n\nuse iso8583_rs::iso8583::server::MsgProcessor;\n\nuse iso8583_rs::crypto::pin::verify_pin;\n\nuse iso8583_rs::crypto::pin::PinFormat::ISO0;\n\nuse std::path::Path;\n\nuse iso8583_rs::crypto::mac::MacAlgo::RetailMac;\n\nuse iso8583_rs::crypto::mac::PaddingType::Type1;\n\nuse iso8583_rs::crypto::mac::verify_mac;\n", "file_path": "src/main.rs", "rank": 98, "score": 12.731251289837875 }, { "content": "}\n\n\n\n/// This struct represents a parsed message for a given spec\n\npub struct IsoMsg {\n\n // The spec associated with this IsoMsg\n\n pub spec: &'static Spec,\n\n /// The segment that the IsoMsg represents\n\n pub msg: &'static MessageSegment,\n\n /// field data map - name to raw value\n\n pub fd_map: std::collections::HashMap<String, Vec<u8>>,\n\n /// the bitmap on the iso message\n\n pub bmp: bitmap::Bitmap,\n\n}\n\n\n\n/// Operations on IsoMsg\n\nimpl IsoMsg {\n\n pub fn spec(&self) -> &'static Spec {\n\n self.spec\n\n }\n\n\n", "file_path": "src/iso8583/iso_spec.rs", "rank": 99, "score": 12.650592883249558 } ]
Rust
src/client.rs
line/centraldogma-rs
9c223222f430a985bd32332fba5de79994bc652e
use std::time::Duration; use reqwest::{header::HeaderValue, Body, Method, Request}; use thiserror::Error; use url::Url; use crate::model::Revision; const WATCH_BUFFER_TIMEOUT: Duration = Duration::from_secs(5); #[derive(Error, Debug)] pub enum Error { #[error("HTTP Client error")] HttpClient(#[from] reqwest::Error), #[allow(clippy::upper_case_acronyms)] #[error("Invalid URL")] InvalidURL(#[from] url::ParseError), #[error("Failed to parse json")] ParseError(#[from] serde_json::Error), #[error("Invalid params: {0}")] InvalidParams(&'static str), #[error("Error response: [{0}] {1}")] ErrorResponse(u16, String), } #[derive(Clone)] pub struct Client { base_url: Url, token: HeaderValue, http_client: reqwest::Client, } impl Client { pub async fn new(base_url: &str, token: Option<&str>) -> Result<Self, Error> { let url = url::Url::parse(base_url)?; let http_client = reqwest::Client::builder().user_agent("cd-rs").build()?; let mut header_value = HeaderValue::from_str(&format!( "Bearer {}", token.as_ref().unwrap_or(&"anonymous") )) .map_err(|_| Error::InvalidParams("Invalid token received"))?; header_value.set_sensitive(true); Ok(Client { base_url: url, token: header_value, http_client, }) } pub(crate) async fn request(&self, req: reqwest::Request) -> Result<reqwest::Response, Error> { Ok(self.http_client.execute(req).await?) } pub(crate) fn new_request<S: AsRef<str>>( &self, method: reqwest::Method, path: S, body: Option<Body>, ) -> Result<reqwest::Request, Error> { self.new_request_inner(method, path.as_ref(), body) } fn new_request_inner( &self, method: reqwest::Method, path: &str, body: Option<Body>, ) -> Result<reqwest::Request, Error> { let mut req = Request::new(method, self.base_url.join(path)?); req.headers_mut() .insert("Authorization", self.token.clone()); if let Method::PATCH = *req.method() { req.headers_mut().insert( "Content-Type", HeaderValue::from_static("application/json-patch+json"), ); } else { req.headers_mut() .insert("Content-Type", HeaderValue::from_static("application/json")); } *req.body_mut() = body; Ok(req) } pub(crate) fn new_watch_request<S: AsRef<str>>( &self, method: reqwest::Method, path: S, body: Option<Body>, last_known_revision: Option<Revision>, timeout: Duration, ) -> Result<reqwest::Request, Error> { let mut req = self.new_request(method, path, body)?; match last_known_revision { Some(rev) => { let val = HeaderValue::from_str(&rev.to_string()).unwrap(); req.headers_mut().insert("if-none-match", val); } None => { let val = HeaderValue::from_str(&Revision::HEAD.to_string()).unwrap(); req.headers_mut().insert("if-none-match", val); } } if timeout.as_secs() != 0 { let val = HeaderValue::from_str(&format!("wait={}", timeout.as_secs())).unwrap(); req.headers_mut().insert("prefer", val); } let req_timeout = timeout.checked_add(WATCH_BUFFER_TIMEOUT).unwrap(); req.timeout_mut().replace(req_timeout); Ok(req) } pub fn project<'a>(&'a self, project_name: &'a str) -> ProjectClient<'a> { ProjectClient { client: self, project: project_name, } } pub fn repo<'a>(&'a self, project_name: &'a str, repo_name: &'a str) -> RepoClient<'a> { RepoClient { client: self, project: project_name, repo: repo_name, } } } pub struct ProjectClient<'a> { pub(crate) client: &'a Client, pub(crate) project: &'a str, } pub struct RepoClient<'a> { pub(crate) client: &'a Client, pub(crate) project: &'a str, pub(crate) repo: &'a str, }
use std::time::Duration; use reqwest::{header::HeaderValue, Body, Method, Request}; use thiserror::Error; use url::Url; use crate::model::Revision; const WATCH_BUFFER_TIMEOUT: Duration = Duration::from_secs(5); #[derive(Error, Debug)] pub enum Error { #[error("HTTP Client error")] HttpClient(#[from] reqwest::Error), #[allow(clippy::upper_case_acronyms)] #[error("Invalid URL")] InvalidURL(#[from] url::ParseError), #[error("Failed to parse json")] ParseError(#[from] serde_json::Error), #[error("Invalid params: {0}")] InvalidParams(&'static str), #[error("Error response: [{0}] {1}")] ErrorResponse(u16, String), } #[derive(Clone)] pub struct Client { base_url: Url, token: HeaderValue, http_client: reqwest::Client, } impl Client { pub async fn new(base_url: &str, token: Option<&str>) -> Result<Self, Error> { let url = url::Url::parse(base_url)?; let http_client = reqwest::Client::builder().user_agent("cd-rs").build()?; let mut header_value = HeaderValue::from_str(&format!( "Bearer {}", token.as_ref().unwrap_or(&"anonymous") )) .map_err(|_| Error::InvalidParams("Invalid token received"))?; header_value.set_sensitive(true); Ok(Client { base_url: url, token: header_value, http_client, }) } pub(crate) async fn request(&self, req: reqwest::Request) -> Result<reqwest::Response, Error> { Ok(self.http_client.execute(req).await?) } pub(crate) fn new_request<S: AsRef<str>>( &self, method: reqwest::Method, path: S, body: Option<Body>, ) -> Result<reqwest::Request, Error> { self.new_request_inner(method, path.as_ref(), body) }
pub(crate) fn new_watch_request<S: AsRef<str>>( &self, method: reqwest::Method, path: S, body: Option<Body>, last_known_revision: Option<Revision>, timeout: Duration, ) -> Result<reqwest::Request, Error> { let mut req = self.new_request(method, path, body)?; match last_known_revision { Some(rev) => { let val = HeaderValue::from_str(&rev.to_string()).unwrap(); req.headers_mut().insert("if-none-match", val); } None => { let val = HeaderValue::from_str(&Revision::HEAD.to_string()).unwrap(); req.headers_mut().insert("if-none-match", val); } } if timeout.as_secs() != 0 { let val = HeaderValue::from_str(&format!("wait={}", timeout.as_secs())).unwrap(); req.headers_mut().insert("prefer", val); } let req_timeout = timeout.checked_add(WATCH_BUFFER_TIMEOUT).unwrap(); req.timeout_mut().replace(req_timeout); Ok(req) } pub fn project<'a>(&'a self, project_name: &'a str) -> ProjectClient<'a> { ProjectClient { client: self, project: project_name, } } pub fn repo<'a>(&'a self, project_name: &'a str, repo_name: &'a str) -> RepoClient<'a> { RepoClient { client: self, project: project_name, repo: repo_name, } } } pub struct ProjectClient<'a> { pub(crate) client: &'a Client, pub(crate) project: &'a str, } pub struct RepoClient<'a> { pub(crate) client: &'a Client, pub(crate) project: &'a str, pub(crate) repo: &'a str, }
fn new_request_inner( &self, method: reqwest::Method, path: &str, body: Option<Body>, ) -> Result<reqwest::Request, Error> { let mut req = Request::new(method, self.base_url.join(path)?); req.headers_mut() .insert("Authorization", self.token.clone()); if let Method::PATCH = *req.method() { req.headers_mut().insert( "Content-Type", HeaderValue::from_static("application/json-patch+json"), ); } else { req.headers_mut() .insert("Content-Type", HeaderValue::from_static("application/json")); } *req.body_mut() = body; Ok(req) }
function_block-full_function
[ { "content": "fn watch_stream<D: Watchable>(client: Client, path: String) -> impl Stream<Item = D> + Send {\n\n let init_state = WatchState {\n\n client,\n\n path,\n\n last_known_revision: None,\n\n failed_count: 0,\n\n success_delay: None,\n\n };\n\n futures::stream::unfold(init_state, |mut state| async move {\n\n if let Some(d) = state.success_delay.take() {\n\n tokio::time::sleep(d).await;\n\n }\n\n\n\n loop {\n\n let req = match state.client.new_watch_request(\n\n Method::GET,\n\n &state.path,\n\n None,\n\n state.last_known_revision,\n\n DEFAULT_TIMEOUT,\n", "file_path": "src/services/watch.rs", "rank": 0, "score": 140740.26351883466 }, { "content": "fn normalize_path_pattern(path_pattern: &str) -> Cow<str> {\n\n if path_pattern.is_empty() {\n\n return Cow::Borrowed(\"/**\");\n\n }\n\n if path_pattern.starts_with(\"**\") {\n\n return Cow::Owned(format!(\"/{}\", path_pattern));\n\n }\n\n if !path_pattern.starts_with('/') {\n\n return Cow::Owned(format!(\"/**/{}\", path_pattern));\n\n }\n\n\n\n Cow::Borrowed(path_pattern)\n\n}\n\n\n\npub(crate) fn projects_path() -> String {\n\n format!(\"{}/projects\", PATH_PREFIX)\n\n}\n\n\n\npub(crate) fn removed_projects_path() -> String {\n\n format!(\"{}/projects?status=removed\", PATH_PREFIX)\n", "file_path": "src/services/path.rs", "rank": 1, "score": 110710.52300347164 }, { "content": "fn add_pair<'a, T>(s: &mut form_urlencoded::Serializer<'a, T>, key: &str, value: &str)\n\nwhere\n\n T: form_urlencoded::Target,\n\n{\n\n if !value.is_empty() {\n\n s.append_pair(key, value);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_content_commits_path() {\n\n let full_arg_path = content_commits_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::from(1),\n\n Revision::from(2),\n", "file_path": "src/services/path.rs", "rank": 2, "score": 105778.12522522586 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ErrorMessage {\n\n message: String,\n\n}\n\n\n\n/// convert HTTP Response with status < 200 and > 300 to Error\n\nasync fn status_unwrap(resp: Response) -> Result<Response, Error> {\n\n match resp.status().as_u16() {\n\n code if !(200..300).contains(&code) => {\n\n let err_body = resp.text().await?;\n\n let err_msg: ErrorMessage =\n\n serde_json::from_str(&err_body).unwrap_or(ErrorMessage { message: err_body });\n\n\n\n Err(Error::ErrorResponse(code, err_msg.message))\n\n }\n\n _ => Ok(resp),\n\n }\n\n}\n\n\n\npub(super) async fn do_request<T: DeserializeOwned>(\n\n client: &Client,\n\n req: reqwest::Request,\n\n) -> Result<T, Error> {\n\n let resp = client.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n}\n", "file_path": "src/services/mod.rs", "rank": 3, "score": 67792.88991002044 }, { "content": "fn delay_time_for(failed_count: usize) -> Duration {\n\n let base_time_ms = MAX_BASE_TIME_MS.min(failed_count * 1000);\n\n let jitter = (fastrand::f32() * JITTER_RATE * base_time_ms as f32) as u64;\n\n Duration::from_millis(base_time_ms as u64 + jitter)\n\n}\n\n\n", "file_path": "src/services/watch.rs", "rank": 4, "score": 54755.34931034775 }, { "content": "fn t<'a>(ctx: &'a mut TestContext) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {\n\n async move {\n\n let r = ctx.client.repo(&ctx.project.name, &ctx.repo.name);\n\n\n\n // Push data\n\n let push_result = {\n\n let commit_msg = CommitMessage {\n\n summary: \"New file\".to_string(),\n\n detail: Some(CommitDetail::Plaintext(\"detail\".to_string())),\n\n };\n\n let changes = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(json!({\n\n \"test_key\": \"test_value\"\n\n })),\n\n }, Change {\n\n path: \"/folder/b.txt\".to_string(),\n\n content: ChangeContent::UpsertText(\"text value\".to_string()),\n\n }];\n\n\n", "file_path": "tests/content.rs", "rank": 5, "score": 53940.244505435316 }, { "content": "fn t1<'a>(ctx: &'a mut TestContext) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {\n\n async move {\n\n let r = ctx.client.project(&ctx.project.name);\n\n\n\n // List repositories\n\n let repos = r\n\n .list_repos()\n\n .await\n\n .context(\"Failed to list repositories from project\")?;\n\n ensure!(repos.len() == 2, here!(\"New project should have 2 repos\"));\n\n\n\n // Create new repository\n\n let repo_name = \"TestRepo\";\n\n let new_repo = r\n\n .create_repo(repo_name)\n\n .await\n\n .context(\"Failed to create new Repository\")?;\n\n ensure!(repo_name == new_repo.name, here!(\"Wrong repo name\"));\n\n\n\n // Remove created repository\n", "file_path": "tests/repo.rs", "rank": 6, "score": 52574.66143799439 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Push {\n\n commit_message: CommitMessage,\n\n changes: Vec<Change>,\n\n}\n\n\n\n/// Content-related APIs\n", "file_path": "src/services/content.rs", "rank": 7, "score": 43613.97109345619 }, { "content": "struct TestContext {\n\n client: cd::Client,\n\n project: Project,\n\n repo: Repository,\n\n}\n\n\n\nasync fn run_test<T>(test: T)\n\nwhere\n\n for<'a> T: FnOnce(&'a mut TestContext) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>,\n\n{\n\n let mut ctx = setup().await.expect(\"Failed to setup for test\");\n\n\n\n let result = test(&mut ctx).await;\n\n\n\n teardown(ctx).await.expect(\"Failed to teardown test setup\");\n\n\n\n result.unwrap();\n\n}\n\n\n\nasync fn setup() -> Result<TestContext> {\n", "file_path": "tests/content.rs", "rank": 8, "score": 43610.469240536535 }, { "content": "struct TestContext {\n\n client: cd::Client,\n\n project: cd::model::Project,\n\n}\n\n\n\nasync fn run_test<T>(test: T)\n\nwhere\n\n for<'a> T: FnOnce(&'a mut TestContext) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>,\n\n{\n\n let mut ctx = setup().await.expect(\"Failed to setup for test\");\n\n\n\n let result = test(&mut ctx).await;\n\n\n\n teardown(ctx).await.expect(\"Failed to teardown test setup\");\n\n\n\n result.unwrap();\n\n}\n\n\n\nasync fn setup() -> Result<TestContext> {\n\n let client = cd::Client::new(\"http://localhost:36462\", None)\n", "file_path": "tests/repo.rs", "rank": 9, "score": 43610.469240536535 }, { "content": "struct TestContext {\n\n client: cd::Client,\n\n project: cd::model::Project,\n\n repo: cd::model::Repository,\n\n}\n\n\n\nasync fn run_test<T>(test: T)\n\nwhere\n\n for<'a> T: FnOnce(&'a mut TestContext) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>,\n\n{\n\n let mut ctx = setup().await.expect(\"Failed to setup for test\");\n\n\n\n let result = test(&mut ctx).await;\n\n\n\n teardown(ctx).await.expect(\"Failed to teardown test setup\");\n\n\n\n result.unwrap();\n\n}\n\n\n\nasync fn setup() -> Result<TestContext> {\n", "file_path": "tests/watch.rs", "rank": 10, "score": 43610.469240536535 }, { "content": "struct WatchState {\n\n client: Client,\n\n path: String,\n\n last_known_revision: Option<Revision>,\n\n failed_count: usize,\n\n success_delay: Option<Duration>,\n\n}\n\n\n", "file_path": "src/services/watch.rs", "rank": 11, "score": 42225.71833630976 }, { "content": "#[async_trait]\n\npub trait ProjectService {\n\n /// Creates a project.\n\n async fn create_project(&self, name: &str) -> Result<Project, Error>;\n\n\n\n /// Removes a project. A removed project can be [unremoved](#tymethod.unremove_project).\n\n async fn remove_project(&self, name: &str) -> Result<(), Error>;\n\n\n\n /// Purges a project that was removed before.\n\n async fn purge_project(&self, name: &str) -> Result<(), Error>;\n\n\n\n /// Unremoves a project.\n\n async fn unremove_project(&self, name: &str) -> Result<Project, Error>;\n\n\n\n /// Retrieves the list of the projects.\n\n async fn list_projects(&self) -> Result<Vec<Project>, Error>;\n\n\n\n /// Retrieves the list of the removed projects,\n\n /// which can be [unremoved](#tymethod.unremove_project)\n\n /// or [purged](#tymethod.purge_project).\n\n async fn list_removed_projects(&self) -> Result<Vec<String>, Error>;\n", "file_path": "src/services/project.rs", "rank": 12, "score": 40478.85585855048 }, { "content": "#[async_trait]\n\npub trait ContentService {\n\n /// Retrieves the list of the files at the specified [`Revision`] matched by the path pattern.\n\n ///\n\n /// A path pattern is a variant of glob:\n\n /// * `\"/**\"` - find all files recursively\n\n /// * `\"*.json\"` - find all JSON files recursively\n\n /// * `\"/foo/*.json\"` - find all JSON files under the directory /foo\n\n /// * `\"/*/foo.txt\"` - find all files named foo.txt at the second depth level\n\n /// * `\"*.json,/bar/*.txt\"` - use comma to specify more than one pattern.\n\n /// A file will be matched if any pattern matches.\n\n async fn list_files(\n\n &self,\n\n revision: Revision,\n\n path_pattern: &str,\n\n ) -> Result<Vec<ListEntry>, Error>;\n\n\n\n /// Queries a file at the specified [`Revision`] and path with the specified [`Query`].\n\n async fn get_file(&self, revision: Revision, query: &Query) -> Result<Entry, Error>;\n\n\n\n /// Retrieves the files at the specified [`Revision`] matched by the path pattern.\n", "file_path": "src/services/content.rs", "rank": 13, "score": 40478.85585855048 }, { "content": "#[async_trait]\n\npub trait RepoService {\n\n /// Creates a repository.\n\n async fn create_repo(&self, repo_name: &str) -> Result<Repository, Error>;\n\n\n\n /// Removes a repository, removed repository can be\n\n /// [unremoved](#tymethod.unremove_repo).\n\n async fn remove_repo(&self, repo_name: &str) -> Result<(), Error>;\n\n\n\n /// Purges a repository that was removed before.\n\n async fn purge_repo(&self, repo_name: &str) -> Result<(), Error>;\n\n\n\n /// Unremoves a repository.\n\n async fn unremove_repo(&self, repo_name: &str) -> Result<Repository, Error>;\n\n\n\n /// Retrieves the list of the repositories.\n\n async fn list_repos(&self) -> Result<Vec<Repository>, Error>;\n\n\n\n /// Retrieves the list of the removed repositories, which can be\n\n /// [unremoved](#tymethod.unremove_repo).\n\n async fn list_removed_repos(&self) -> Result<Vec<String>, Error>;\n", "file_path": "src/services/repository.rs", "rank": 14, "score": 40478.85585855048 }, { "content": "/// Watch-related APIs\n\npub trait WatchService {\n\n /// Returns a stream which output a [`WatchFileResult`] when the result of the\n\n /// given [`Query`] becomes available or changes\n\n fn watch_file_stream(\n\n &self,\n\n query: &Query,\n\n ) -> Result<Pin<Box<dyn Stream<Item = WatchFileResult> + Send>>, Error>;\n\n\n\n /// Returns a stream which output a [`WatchRepoResult`] when the repository has a new commit\n\n /// that contains the changes for the files matched by the given `path_pattern`.\n\n fn watch_repo_stream(\n\n &self,\n\n path_pattern: &str,\n\n ) -> Result<Pin<Box<dyn Stream<Item = WatchRepoResult> + Send>>, Error>;\n\n}\n\n\n\nimpl<'a> WatchService for RepoClient<'a> {\n\n fn watch_file_stream(\n\n &self,\n\n query: &Query,\n", "file_path": "src/services/watch.rs", "rank": 15, "score": 40475.33723926735 }, { "content": "fn watch_repo_stream_test<'a>(\n\n ctx: &'a mut TestContext,\n\n) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {\n\n async move {\n\n let r = ctx.client.repo(&ctx.project.name, &ctx.repo.name);\n\n\n\n let watch_stream = r\n\n .watch_repo_stream(\"\")\n\n .context(here!(\"Failed to get file watch stream\"))?;\n\n\n\n let new_commit_msg = CommitMessage {\n\n summary: \"change content\".to_string(),\n\n detail: None,\n\n };\n\n let new_change = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(json!({\"a\": \"c\"})),\n\n }];\n\n let new_push = async move {\n\n tokio::time::sleep(Duration::from_millis(10)).await;\n", "file_path": "tests/watch.rs", "rank": 16, "score": 38674.65299775295 }, { "content": "fn watch_file_stream_test<'a>(\n\n ctx: &'a mut TestContext,\n\n) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {\n\n async move {\n\n let r = ctx.client.repo(&ctx.project.name, &ctx.repo.name);\n\n\n\n let commit_msg = CommitMessage {\n\n summary: \"File\".to_string(),\n\n detail: None,\n\n };\n\n let file_change = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(json!({\"a\": \"b\"})),\n\n }];\n\n\n\n r.push(Revision::HEAD, commit_msg, file_change)\n\n .await\n\n .context(here!(\"Failed to push file\"))?;\n\n\n\n let watch_stream = r\n", "file_path": "tests/watch.rs", "rank": 17, "score": 38674.65299775295 }, { "content": " let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n add_pair(&mut s, params::PATH_PATTERN, &path_pattern);\n\n\n\n if let Some(v) = from_rev.as_ref() {\n\n add_pair(&mut s, params::FROM, &v.to_string());\n\n }\n\n if let Some(v) = to_rev.as_ref() {\n\n add_pair(&mut s, params::TO, &v.to_string());\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn contents_push_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n base_revision: Revision,\n\n) -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/contents?\",\n", "file_path": "src/services/path.rs", "rank": 27, "score": 27850.58905369772 }, { "content": " let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n add_pair(&mut s, params::PATH, path);\n\n\n\n if let Some(v) = to_rev.as_ref() {\n\n add_pair(&mut s, params::TO, &v.to_string());\n\n }\n\n\n\n if let Some(c) = max_commits {\n\n add_pair(&mut s, params::MAX_COMMITS, &c.to_string());\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn content_compare_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n query: &Query,\n", "file_path": "src/services/path.rs", "rank": 28, "score": 27849.673211656514 }, { "content": " path_pattern: &str,\n\n) -> String {\n\n let path_pattern = normalize_path_pattern(path_pattern);\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/contents{}?\",\n\n PATH_PREFIX, project_name, repo_name, path_pattern\n\n );\n\n let len = url.len();\n\n\n\n let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n if let Some(v) = revision.as_ref() {\n\n add_pair(&mut s, params::REVISION, &v.to_string());\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn content_path(\n\n project_name: &str,\n\n repo_name: &str,\n", "file_path": "src/services/path.rs", "rank": 29, "score": 27849.265800010584 }, { "content": ") -> String {\n\n let path_pattern = normalize_path_pattern(path_pattern);\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/list{}?\",\n\n PATH_PREFIX, project_name, repo_name, &path_pattern\n\n );\n\n let len = url.len();\n\n\n\n let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n if let Some(v) = revision.as_ref() {\n\n add_pair(&mut s, params::REVISION, &v.to_string());\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn contents_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n revision: Revision,\n", "file_path": "src/services/path.rs", "rank": 30, "score": 27848.839147312556 }, { "content": " PATH_PREFIX, project_name, repo_name\n\n );\n\n\n\n let len = url.len();\n\n let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n\n\n if let Some(v) = base_revision.as_ref() {\n\n add_pair(&mut s, params::REVISION, &v.to_string());\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn content_watch_path(project_name: &str, repo_name: &str, query: &Query) -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/contents{}?\",\n\n PATH_PREFIX, project_name, repo_name, &query.path\n\n );\n\n\n\n let len = url.len();\n", "file_path": "src/services/path.rs", "rank": 31, "score": 27848.393257849464 }, { "content": " let mut serializer = form_urlencoded::Serializer::for_suffix(url, len);\n\n\n\n if let QueryType::JsonPath(expressions) = &query.r#type {\n\n for expression in expressions.iter() {\n\n add_pair(&mut serializer, params::JSONPATH, expression);\n\n }\n\n }\n\n\n\n serializer.finish()\n\n}\n\n\n\npub(crate) fn repo_watch_path(project_name: &str, repo_name: &str, path_pattern: &str) -> String {\n\n let path_pattern = normalize_path_pattern(path_pattern);\n\n\n\n format!(\n\n \"{}/projects/{}/repos/{}/contents{}\",\n\n PATH_PREFIX, project_name, repo_name, path_pattern\n\n )\n\n}\n\n\n", "file_path": "src/services/path.rs", "rank": 32, "score": 27848.29332180554 }, { "content": ") -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/compare?\",\n\n PATH_PREFIX, project_name, repo_name\n\n );\n\n\n\n let len = url.len();\n\n let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n add_pair(&mut s, params::PATH, &query.path);\n\n\n\n if let Some(v) = from_rev.as_ref() {\n\n add_pair(&mut s, params::FROM, &v.to_string());\n\n }\n\n if let Some(v) = to_rev.as_ref() {\n\n add_pair(&mut s, params::TO, &v.to_string());\n\n }\n\n\n\n if let QueryType::JsonPath(expressions) = &query.r#type {\n\n for expression in expressions.iter() {\n\n add_pair(&mut s, params::JSONPATH, expression);\n", "file_path": "src/services/path.rs", "rank": 33, "score": 27847.825736447438 }, { "content": " revision: Revision,\n\n query: &Query,\n\n) -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/contents{}?\",\n\n PATH_PREFIX, project_name, repo_name, &query.path\n\n );\n\n\n\n let len = url.len();\n\n let mut s = form_urlencoded::Serializer::for_suffix(url, len);\n\n if let Some(v) = revision.as_ref() {\n\n add_pair(&mut s, params::REVISION, &v.to_string());\n\n }\n\n\n\n if let QueryType::JsonPath(expressions) = &query.r#type {\n\n for expression in expressions.iter() {\n\n add_pair(&mut s, params::JSONPATH, expression);\n\n }\n\n }\n\n\n", "file_path": "src/services/path.rs", "rank": 34, "score": 27846.52488395828 }, { "content": "use std::borrow::Cow;\n\n\n\nuse crate::model::{Query, QueryType, Revision};\n\n\n\nconst PATH_PREFIX: &str = \"/api/v1\";\n\n\n\nmod params {\n\n pub const REVISION: &str = \"revision\";\n\n pub const JSONPATH: &str = \"jsonpath\";\n\n pub const PATH: &str = \"path\";\n\n pub const PATH_PATTERN: &str = \"pathPattern\";\n\n pub const MAX_COMMITS: &str = \"maxCommits\";\n\n pub const FROM: &str = \"from\";\n\n pub const TO: &str = \"to\";\n\n}\n\n\n", "file_path": "src/services/path.rs", "rank": 35, "score": 27845.89764751345 }, { "content": " s.finish()\n\n}\n\n\n\npub(crate) fn content_commits_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path: &str,\n\n max_commits: Option<u32>,\n\n) -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/commits/{}?\",\n\n PATH_PREFIX,\n\n project_name,\n\n repo_name,\n\n &from_rev.to_string(),\n\n );\n\n\n\n let len = url.len();\n", "file_path": "src/services/path.rs", "rank": 36, "score": 27844.126147187526 }, { "content": " }\n\n }\n\n\n\n s.finish()\n\n}\n\n\n\npub(crate) fn contents_compare_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path_pattern: &str,\n\n) -> String {\n\n let url = format!(\n\n \"{}/projects/{}/repos/{}/compare?\",\n\n PATH_PREFIX, project_name, repo_name\n\n );\n\n\n\n let path_pattern = normalize_path_pattern(path_pattern);\n\n let len = url.len();\n", "file_path": "src/services/path.rs", "rank": 37, "score": 27843.602006957666 }, { "content": "}\n\n\n\npub(crate) fn project_path(project_name: &str) -> String {\n\n format!(\"{}/projects/{}\", PATH_PREFIX, project_name)\n\n}\n\n\n\npub(crate) fn removed_project_path(project_name: &str) -> String {\n\n format!(\"{}/projects/{}/removed\", PATH_PREFIX, project_name)\n\n}\n\n\n\npub(crate) fn repos_path(project_name: &str) -> String {\n\n format!(\"{}/projects/{}/repos\", PATH_PREFIX, project_name)\n\n}\n\n\n\npub(crate) fn removed_repos_path(project_name: &str) -> String {\n\n format!(\n\n \"{}/projects/{}/repos?status=removed\",\n\n PATH_PREFIX, project_name\n\n )\n\n}\n", "file_path": "src/services/path.rs", "rank": 38, "score": 27842.752756883536 }, { "content": "\n\npub(crate) fn repo_path(project_name: &str, repo_name: &str) -> String {\n\n format!(\n\n \"{}/projects/{}/repos/{}\",\n\n PATH_PREFIX, project_name, repo_name\n\n )\n\n}\n\n\n\npub(crate) fn removed_repo_path(project_name: &str, repo_name: &str) -> String {\n\n format!(\n\n \"{}/projects/{}/repos/{}/removed\",\n\n PATH_PREFIX, project_name, repo_name\n\n )\n\n}\n\n\n\npub(crate) fn list_contents_path(\n\n project_name: &str,\n\n repo_name: &str,\n\n revision: Revision,\n\n path_pattern: &str,\n", "file_path": "src/services/path.rs", "rank": 39, "score": 27842.242187724023 }, { "content": " \"bar\",\n\n Revision::DEFAULT,\n\n Revision::DEFAULT,\n\n &Query::of_json_path(\"/a.json\", vec![\"a\".to_string()]).unwrap(),\n\n );\n\n assert_eq!(\n\n with_json_query,\n\n \"/api/v1/projects/foo/repos/bar/compare?path=%2Fa.json&jsonpath=a\"\n\n );\n\n }\n\n}\n", "file_path": "src/services/path.rs", "rank": 40, "score": 27837.902931964767 }, { "content": " );\n\n assert_eq!(\n\n omitted_to_path,\n\n \"/api/v1/projects/foo/repos/bar/compare?path=%2Fa.json&from=1\"\n\n );\n\n\n\n let omitted_all_path = content_compare_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::DEFAULT,\n\n Revision::DEFAULT,\n\n &Query::identity(\"/a.json\").unwrap(),\n\n );\n\n assert_eq!(\n\n omitted_all_path,\n\n \"/api/v1/projects/foo/repos/bar/compare?path=%2Fa.json\"\n\n );\n\n\n\n let with_json_query = content_compare_path(\n\n \"foo\",\n", "file_path": "src/services/path.rs", "rank": 41, "score": 27835.559479645883 }, { "content": " \"/a.json\",\n\n Some(5),\n\n );\n\n assert_eq!(\n\n full_arg_path,\n\n \"/api/v1/projects/foo/repos/bar/commits/1?path=%2Fa.json&to=2&maxCommits=5\"\n\n );\n\n\n\n let omitted_max_commmit_path = content_commits_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::from(1),\n\n Revision::from(2),\n\n \"/a.json\",\n\n None,\n\n );\n\n assert_eq!(\n\n omitted_max_commmit_path,\n\n \"/api/v1/projects/foo/repos/bar/commits/1?path=%2Fa.json&to=2\"\n\n );\n", "file_path": "src/services/path.rs", "rank": 42, "score": 27835.515980286036 }, { "content": "\n\n let omitted_from_to_path = content_commits_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::DEFAULT,\n\n Revision::DEFAULT,\n\n \"/a.json\",\n\n Some(5),\n\n );\n\n assert_eq!(\n\n omitted_from_to_path,\n\n \"/api/v1/projects/foo/repos/bar/commits/?path=%2Fa.json&maxCommits=5\"\n\n );\n\n\n\n let omitted_all_path = content_commits_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::DEFAULT,\n\n Revision::DEFAULT,\n\n \"/a.json\",\n", "file_path": "src/services/path.rs", "rank": 43, "score": 27835.44334899719 }, { "content": " );\n\n\n\n let omitted_from_path = content_compare_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::DEFAULT,\n\n Revision::from(2),\n\n &Query::identity(\"/a.json\").unwrap(),\n\n );\n\n assert_eq!(\n\n omitted_from_path,\n\n \"/api/v1/projects/foo/repos/bar/compare?path=%2Fa.json&to=2\"\n\n );\n\n\n\n let omitted_to_path = content_compare_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::from(1),\n\n Revision::DEFAULT,\n\n &Query::identity(\"/a.json\").unwrap(),\n", "file_path": "src/services/path.rs", "rank": 44, "score": 27835.39365592249 }, { "content": " None,\n\n );\n\n assert_eq!(\n\n omitted_all_path,\n\n \"/api/v1/projects/foo/repos/bar/commits/?path=%2Fa.json\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_content_compare_path() {\n\n let full_arg_path = content_compare_path(\n\n \"foo\",\n\n \"bar\",\n\n Revision::from(1),\n\n Revision::from(2),\n\n &Query::identity(\"/a.json\").unwrap(),\n\n );\n\n assert_eq!(\n\n full_arg_path,\n\n \"/api/v1/projects/foo/repos/bar/compare?path=%2Fa.json&from=1&to=2\"\n", "file_path": "src/services/path.rs", "rank": 45, "score": 27835.312593106464 }, { "content": "}\n\n\n\n#[async_trait]\n\nimpl<'a> RepoService for ProjectClient<'a> {\n\n async fn create_repo(&self, repo_name: &str) -> Result<Repository, Error> {\n\n #[derive(Serialize)]\n\n struct CreateRepo<'a> {\n\n name: &'a str,\n\n }\n\n\n\n let body = serde_json::to_vec(&CreateRepo { name: repo_name })?;\n\n let body = Body::from(body);\n\n\n\n let req =\n\n self.client\n\n .new_request(Method::POST, path::repos_path(self.project), Some(body))?;\n\n\n\n let resp = self.client.request(req).await?;\n\n let resp_body = status_unwrap(resp).await?.bytes().await?;\n\n let result = serde_json::from_slice(&resp_body[..])?;\n", "file_path": "src/services/repository.rs", "rank": 46, "score": 28.87107174641993 }, { "content": "}\n\n\n\n#[async_trait]\n\nimpl ProjectService for Client {\n\n async fn create_project(&self, name: &str) -> Result<Project, Error> {\n\n #[derive(Serialize)]\n\n struct CreateProject<'a> {\n\n name: &'a str,\n\n }\n\n\n\n let body: Vec<u8> = serde_json::to_vec(&CreateProject { name })?;\n\n let body = Body::from(body);\n\n let req = self.new_request(Method::POST, path::projects_path(), Some(body))?;\n\n\n\n let resp = self.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n }\n", "file_path": "src/services/project.rs", "rank": 47, "score": 28.34838165128306 }, { "content": " ) -> Result<PushResult, Error> {\n\n if cm.summary.is_empty() {\n\n return Err(Error::InvalidParams(\n\n \"summary of commit_message cannot be empty\",\n\n ));\n\n }\n\n if changes.is_empty() {\n\n return Err(Error::InvalidParams(\"no changes to commit\"));\n\n }\n\n\n\n let body: String = serde_json::to_string(&Push {\n\n commit_message: cm,\n\n changes,\n\n })?;\n\n let body = Body::from(body);\n\n\n\n let p = path::contents_push_path(self.project, self.repo, base_revision);\n\n let req = self.client.new_request(Method::POST, p, Some(body))?;\n\n\n\n do_request(self.client, req).await\n", "file_path": "src/services/content.rs", "rank": 48, "score": 28.13869559226562 }, { "content": " path::removed_repo_path(self.project, repo_name),\n\n None,\n\n )?;\n\n\n\n let resp = self.client.request(req).await?;\n\n let _ = status_unwrap(resp).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn unremove_repo(&self, repo_name: &str) -> Result<Repository, Error> {\n\n let body: Vec<u8> = serde_json::to_vec(&json!([\n\n {\"op\":\"replace\", \"path\":\"/status\", \"value\":\"active\"}\n\n ]))?;\n\n let body = Body::from(body);\n\n let req = self.client.new_request(\n\n Method::PATCH,\n\n path::repo_path(self.project, repo_name),\n\n Some(body),\n\n )?;\n", "file_path": "src/services/repository.rs", "rank": 49, "score": 26.495698366333215 }, { "content": "\n\n Ok(result)\n\n }\n\n\n\n async fn remove_repo(&self, repo_name: &str) -> Result<(), Error> {\n\n let req = self.client.new_request(\n\n Method::DELETE,\n\n path::repo_path(self.project, repo_name),\n\n None,\n\n )?;\n\n\n\n let resp = self.client.request(req).await?;\n\n let _ = status_unwrap(resp).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn purge_repo(&self, repo_name: &str) -> Result<(), Error> {\n\n let req = self.client.new_request(\n\n Method::DELETE,\n", "file_path": "src/services/repository.rs", "rank": 50, "score": 25.08418618516266 }, { "content": " &self,\n\n revision: Revision,\n\n path_pattern: &str,\n\n ) -> Result<Vec<ListEntry>, Error> {\n\n let req = self.client.new_request(\n\n Method::GET,\n\n path::list_contents_path(self.project, self.repo, revision, path_pattern),\n\n None,\n\n )?;\n\n\n\n do_request(self.client, req).await\n\n }\n\n\n\n async fn get_file(&self, revision: Revision, query: &Query) -> Result<Entry, Error> {\n\n let p = path::content_path(self.project, self.repo, revision, query);\n\n let req = self.client.new_request(Method::GET, p, None)?;\n\n\n\n do_request(self.client, req).await\n\n }\n\n\n", "file_path": "src/services/content.rs", "rank": 51, "score": 23.822234124597305 }, { "content": "//! Watch-related APIs\n\nuse std::{pin::Pin, time::Duration};\n\n\n\nuse crate::{\n\n model::{Query, Revision, WatchFileResult, WatchRepoResult, Watchable},\n\n services::{path, status_unwrap},\n\n Client, Error, RepoClient,\n\n};\n\n\n\nuse futures::{Stream, StreamExt};\n\nuse reqwest::{Method, Request, StatusCode};\n\n\n\nconst DEFAULT_TIMEOUT: Duration = Duration::from_secs(60);\n\nconst DELAY_ON_SUCCESS: Duration = Duration::from_secs(1);\n\nconst JITTER_RATE: f32 = 0.2;\n\nconst MAX_BASE_TIME_MS: usize = 10_000; // 10sec = 10_000millis\n\n\n\nasync fn request_watch<D: Watchable>(client: &Client, req: Request) -> Result<Option<D>, Error> {\n\n let resp = client.request(req).await?;\n\n if resp.status() == StatusCode::NOT_MODIFIED {\n\n return Ok(None);\n\n }\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(Some(result))\n\n}\n\n\n", "file_path": "src/services/watch.rs", "rank": 52, "score": 23.604051706373813 }, { "content": "//! Project-related APIs\n\nuse crate::{\n\n client::{Client, Error},\n\n model::Project,\n\n services::{path, status_unwrap},\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse reqwest::{Body, Method};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::json;\n\n\n\n/// Project-related APIs\n\n#[async_trait]\n", "file_path": "src/services/project.rs", "rank": 53, "score": 23.138631504476198 }, { "content": " async fn get_files(&self, revision: Revision, path_pattern: &str) -> Result<Vec<Entry>, Error> {\n\n let req = self.client.new_request(\n\n Method::GET,\n\n path::contents_path(self.project, self.repo, revision, path_pattern),\n\n None,\n\n )?;\n\n\n\n do_request(self.client, req).await\n\n }\n\n\n\n async fn get_history(\n\n &self,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path: &str,\n\n max_commits: Option<u32>,\n\n ) -> Result<Vec<Commit>, Error> {\n\n let p = path::content_commits_path(\n\n self.project,\n\n self.repo,\n", "file_path": "src/services/content.rs", "rank": 54, "score": 23.00654506906318 }, { "content": "//! Repository-related APIs\n\nuse crate::{\n\n client::{Error, ProjectClient},\n\n model::Repository,\n\n services::{path, status_unwrap},\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse reqwest::{Body, Method};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::json;\n\n\n\n/// Repository-related APIs\n\n#[async_trait]\n", "file_path": "src/services/repository.rs", "rank": 55, "score": 22.979769411719356 }, { "content": "\n\n async fn remove_project(&self, name: &str) -> Result<(), Error> {\n\n let req = self.new_request(Method::DELETE, path::project_path(name), None)?;\n\n\n\n let resp = self.request(req).await?;\n\n let _ = status_unwrap(resp).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn purge_project(&self, name: &str) -> Result<(), Error> {\n\n let req = self.new_request(Method::DELETE, path::removed_project_path(name), None)?;\n\n\n\n let resp = self.request(req).await?;\n\n let _ = status_unwrap(resp).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn unremove_project(&self, name: &str) -> Result<Project, Error> {\n", "file_path": "src/services/project.rs", "rank": 56, "score": 22.867676383273864 }, { "content": "//! Content-related APIs\n\nuse crate::{\n\n model::{Change, Commit, CommitMessage, Entry, ListEntry, PushResult, Query, Revision},\n\n services::{do_request, path},\n\n Error, RepoClient,\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse reqwest::{Body, Method};\n\nuse serde::Serialize;\n\n\n\n#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/services/content.rs", "rank": 57, "score": 22.762137874829435 }, { "content": " let body: Vec<u8> = serde_json::to_vec(&json!([\n\n {\"op\":\"replace\", \"path\":\"/status\", \"value\":\"active\"}\n\n ]))?;\n\n let body = Body::from(body);\n\n let req = self.new_request(Method::PATCH, path::project_path(name), Some(body))?;\n\n\n\n let resp = self.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n }\n\n\n\n async fn list_projects(&self) -> Result<Vec<Project>, Error> {\n\n let req = self.new_request(Method::GET, path::projects_path(), None)?;\n\n let resp = self.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n\n\n if let Some(0) = ok_resp.content_length() {\n\n return Ok(Vec::new());\n", "file_path": "src/services/project.rs", "rank": 58, "score": 22.454261128125133 }, { "content": " }\n\n\n\n async fn get_diffs(\n\n &self,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path_pattern: &str,\n\n ) -> Result<Vec<Change>, Error> {\n\n let p =\n\n path::contents_compare_path(self.project, self.repo, from_rev, to_rev, path_pattern);\n\n let req = self.client.new_request(Method::GET, p, None)?;\n\n\n\n do_request(self.client, req).await\n\n }\n\n\n\n async fn push(\n\n &self,\n\n base_revision: Revision,\n\n cm: CommitMessage,\n\n changes: Vec<Change>,\n", "file_path": "src/services/content.rs", "rank": 59, "score": 22.032337021770118 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\npub enum QueryType {\n\n Identity,\n\n IdentityJson,\n\n IdentityText,\n\n JsonPath(Vec<String>),\n\n}\n\n\n\n/// A Query on a file\n\n#[derive(Debug)]\n\npub struct Query {\n\n pub(crate) path: String,\n\n pub(crate) r#type: QueryType,\n\n}\n\n\n\nimpl Query {\n\n fn normalize_path(path: &str) -> String {\n\n if path.starts_with('/') {\n\n path.to_owned()\n\n } else {\n", "file_path": "src/model.rs", "rank": 60, "score": 21.67253180597664 }, { "content": " from_rev,\n\n to_rev,\n\n path,\n\n max_commits,\n\n );\n\n let req = self.client.new_request(Method::GET, p, None)?;\n\n\n\n do_request(self.client, req).await\n\n }\n\n\n\n async fn get_diff(\n\n &self,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n query: &Query,\n\n ) -> Result<Change, Error> {\n\n let p = path::content_compare_path(self.project, self.repo, from_rev, to_rev, query);\n\n let req = self.client.new_request(Method::GET, p, None)?;\n\n\n\n do_request(self.client, req).await\n", "file_path": "src/services/content.rs", "rank": 61, "score": 21.437908292866844 }, { "content": "\n\n let resp = self.client.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n }\n\n\n\n async fn list_repos(&self) -> Result<Vec<Repository>, Error> {\n\n let req = self\n\n .client\n\n .new_request(Method::GET, path::repos_path(self.project), None)?;\n\n\n\n let resp = self.client.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n }\n\n\n", "file_path": "src/services/repository.rs", "rank": 62, "score": 21.40069590087683 }, { "content": " async fn list_removed_repos(&self) -> Result<Vec<String>, Error> {\n\n #[derive(Deserialize)]\n\n struct RemovedRepo {\n\n name: String,\n\n }\n\n let req =\n\n self.client\n\n .new_request(Method::GET, path::removed_repos_path(self.project), None)?;\n\n\n\n let resp = self.client.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n if ok_resp.status().as_u16() == 204 {\n\n return Ok(Vec::new());\n\n }\n\n let result: Vec<RemovedRepo> = ok_resp.json().await?;\n\n let result = result.into_iter().map(|r| r.name).collect();\n\n\n\n Ok(result)\n\n }\n\n}\n", "file_path": "src/services/repository.rs", "rank": 63, "score": 21.312042872583035 }, { "content": " let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"{\n\n \"path\":\"/a.json\",\n\n \"type\":\"JSON\",\n\n \"revision\":2,\n\n \"url\": \"/api/v1/projects/foo/repos/bar/contents/a.json\",\n\n \"content\":\"b\"\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents/a.json\"))\n\n .and(query_param(\"jsonpath\", \"$.a\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let query = Query::of_json_path(\"/a.json\", vec![\"$.a\".to_string()]).unwrap();\n", "file_path": "src/services/content.rs", "rank": 64, "score": 20.57258086110515 }, { "content": " }\n\n let result = ok_resp.json().await?;\n\n\n\n Ok(result)\n\n }\n\n\n\n async fn list_removed_projects(&self) -> Result<Vec<String>, Error> {\n\n #[derive(Deserialize)]\n\n struct RemovedProject {\n\n name: String,\n\n }\n\n let req = self.new_request(Method::GET, path::removed_projects_path(), None)?;\n\n let resp = self.request(req).await?;\n\n let ok_resp = status_unwrap(resp).await?;\n\n\n\n let result: Vec<RemovedProject> = ok_resp.json().await?;\n\n let result = result.into_iter().map(|p| p.name).collect();\n\n\n\n Ok(result)\n\n }\n", "file_path": "src/services/project.rs", "rank": 65, "score": 19.781094665698344 }, { "content": " let body = Push {\n\n commit_message: CommitMessage::only_summary(\"Add a.json and b.txt\"),\n\n changes,\n\n };\n\n Mock::given(method(\"POST\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents\"))\n\n .and(query_param(\"revision\", \"-1\"))\n\n .and(body_json(body))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .expect(1)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let changes = vec![\n\n Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(serde_json::json!({\"a\":\"b\"})),\n\n },\n", "file_path": "src/services/content.rs", "rank": 66, "score": 19.764179794399848 }, { "content": "##### Push\n\n\n\n```rust\n\nuse centraldogma::{Client, ContentService};\n\n\n\n#[tokio::main]\n\nfn main() {\n\n let client = Client::new(\"http://localhost:36462\", None).await.unwrap();\n\n let changes = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(serde_json::json!({\"a\":\"b\"})),\n\n }];\n\n let result = client\n\n .repo(\"foo\", \"bar\")\n\n .push(\n\n Revision::HEAD,\n\n CommitMessage::only_summary(\"Add a.json\"),\n\n changes,\n\n )\n\n .await\n\n .unwrap();\n\n```\n\n\n\n##### Watch file change\n\n\n\n```rust\n\nuse centraldogma::{Client, WatchService};\n\n\n\n#[tokio::main]\n\nfn main() {\n\n let client = Client::new(\"http://localhost:36462\", None).await.unwrap();\n\n let stream = client\n\n .repo(\"foo\", \"bar\")\n\n .watch_file_stream(&Query::identity(\"/a.json\").unwrap())\n\n .unwrap();\n\n\n\n tokio::spawn(async move {\n\n while let Some(result) = stream.next().await {\n\n // your code ...\n\n }\n\n })\n\n```\n\n\n\n## Contributing\n\n\n\nSee [CONTRIBUTING.md](CONTRIBUTING.md).\n", "file_path": "README.md", "rank": 67, "score": 19.313429140596718 }, { "content": " content: ChangeContent::UpsertJson(serde_json::json!({\"a\":\"b\"})),\n\n }];\n\n let body = Push {\n\n commit_message: CommitMessage::only_summary(\"Add a.json\"),\n\n changes,\n\n };\n\n Mock::given(method(\"POST\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents\"))\n\n .and(query_param(\"revision\", \"-1\"))\n\n .and(body_json(body))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .expect(1)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let changes = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(serde_json::json!({\"a\":\"b\"})),\n", "file_path": "src/services/content.rs", "rank": 68, "score": 19.27394333254126 }, { "content": " \"url\": \"/api/v1/projects/foo/repos/bar/contents/a.json\",\n\n \"content\":\"b\"\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents/a.json\"))\n\n .and(query_param(\"revision\", \"5\"))\n\n .and(query_param(\"jsonpath\", \"$.a\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let query = Query::of_json_path(\"/a.json\", vec![\"$.a\".to_string()]).unwrap();\n\n let entry = client\n\n .repo(\"foo\", \"bar\")\n\n .get_file(Revision::from(5), &query)\n\n .await\n", "file_path": "src/services/content.rs", "rank": 69, "score": 19.075455531747295 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n model::{Author, ChangeContent, EntryContent, EntryType, Revision},\n\n Client,\n\n };\n\n use wiremock::{\n\n matchers::{body_json, header, method, path, query_param},\n\n Mock, MockServer, ResponseTemplate,\n\n };\n\n\n\n #[tokio::test]\n\n async fn test_list_files() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"[\n", "file_path": "src/services/content.rs", "rank": 70, "score": 18.589909775797313 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n model::{Author, Revision},\n\n Client,\n\n };\n\n use wiremock::{\n\n matchers::{body_json, header, method, path, query_param},\n\n Mock, MockServer, ResponseTemplate,\n\n };\n\n\n\n #[tokio::test]\n\n async fn test_list_repos() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"[{\n\n \"name\":\"bar\",\n\n \"creator\":{\"name\":\"minux\", \"email\":\"[email protected]\"},\n", "file_path": "src/services/repository.rs", "rank": 71, "score": 18.455549725890695 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use wiremock::{\n\n matchers::{body_json, header, method, path, query_param},\n\n Mock, MockServer, ResponseTemplate,\n\n };\n\n\n\n #[tokio::test]\n\n async fn test_list_projects() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"[{\n\n \"name\":\"foo\",\n\n \"creator\":{\"name\":\"minux\", \"email\":\"[email protected]\"},\n\n \"url\":\"/api/v1/projects/foo\"\n\n }, {\n\n \"name\":\"bar\",\n", "file_path": "src/services/project.rs", "rank": 72, "score": 18.002091864440313 }, { "content": "pub mod content;\n\nmod path;\n\npub mod project;\n\npub mod repository;\n\npub mod watch;\n\n\n\nuse reqwest::Response;\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\n\n\nuse crate::{Client, Error};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/services/mod.rs", "rank": 73, "score": 17.071839611158193 }, { "content": " let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"{\n\n \"path\":\"/a.json\",\n\n \"type\":\"APPLY_JSON_PATCH\",\n\n \"content\":[{\n\n \"op\":\"safeReplace\",\n\n \"path\":\"\",\n\n \"oldValue\":\"bar\",\n\n \"value\":\"baz\"\n\n }]\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/compare\"))\n\n .and(query_param(\"from\", \"3\"))\n\n .and(query_param(\"to\", \"4\"))\n\n .and(query_param(\"path\", \"/a.json\"))\n\n .and(query_param(\"jsonpath\", \"$.a\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n", "file_path": "src/services/content.rs", "rank": 74, "score": 16.696849234571076 }, { "content": "\n\n let repo_json = serde_json::json!({\"name\": \"bar\"});\n\n Mock::given(method(\"POST\"))\n\n .and(path(\"/api/v1/projects/foo/repos\"))\n\n .and(body_json(repo_json))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let repo = client.project(\"foo\").create_repo(\"bar\").await.unwrap();\n\n\n\n assert_eq!(repo.name, \"bar\");\n\n assert_eq!(\n\n repo.creator,\n\n Author {\n\n name: \"minux\".to_string(),\n\n email: \"[email protected]\".to_string()\n\n }\n", "file_path": "src/services/repository.rs", "rank": 75, "score": 16.660193864971543 }, { "content": " /// Revision of this entry.\n\n pub revision: Revision,\n\n /// Url of this entry.\n\n pub url: String,\n\n /// When this entry was last modified.\n\n pub modified_at: Option<String>,\n\n}\n\n\n\nimpl Entry {\n\n pub fn entry_type(&self) -> EntryType {\n\n match self.content {\n\n EntryContent::Json(_) => EntryType::Json,\n\n EntryContent::Text(_) => EntryType::Text,\n\n EntryContent::Directory => EntryType::Directory,\n\n }\n\n }\n\n}\n\n\n\n/// The type of a [`ListEntry`]\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n", "file_path": "src/model.rs", "rank": 76, "score": 16.545654575763642 }, { "content": " async fn test_create_project() {\n\n let server = MockServer::start().await;\n\n let project_json = serde_json::json!({\"name\": \"foo\"});\n\n let resp = ResponseTemplate::new(201).set_body_raw(\n\n r#\"{\n\n \"name\":\"foo\",\n\n \"creator\":{\"name\":\"minux\", \"email\":\"[email protected]\"}\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"POST\"))\n\n .and(path(\"/api/v1/projects\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .and(body_json(project_json))\n\n .respond_with(resp)\n\n .expect(1)\n\n .mount(&server)\n\n .await;\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let project = client.create_project(\"foo\").await.unwrap();\n", "file_path": "src/services/project.rs", "rank": 77, "score": 16.23939309270799 }, { "content": " ///\n\n /// A path pattern is a variant of glob:\n\n /// * `\"/**\"` - find all files recursively\n\n /// * `\"*.json\"` - find all JSON files recursively\n\n /// * `\"/foo/*.json\"` - find all JSON files under the directory /foo\n\n /// * `\"/*/foo.txt\"` - find all files named foo.txt at the second depth level\n\n /// * `\"*.json,/bar/*.txt\"` - use comma to specify more than one pattern.\n\n /// A file will be matched if any pattern matches.\n\n async fn get_files(&self, revision: Revision, path_pattern: &str) -> Result<Vec<Entry>, Error>;\n\n\n\n /// Retrieves the history of the repository of the files matched by the given\n\n /// path pattern between two [`Revision`]s.\n\n /// Note that this method does not retrieve the diffs but only metadata about the changes.\n\n /// Use [get_diff](#tymethod.get_diff) or\n\n /// [get_diffs](#tymethod.get_diffs) to retrieve the diffs\n\n async fn get_history(\n\n &self,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path: &str,\n", "file_path": "src/services/content.rs", "rank": 78, "score": 16.142672171077887 }, { "content": " \"creator\":{\"name\":\"minux\", \"email\":\"[email protected]\"},\n\n \"createdAt\":\"a\",\n\n \"url\":\"/api/v1/projects/foo/repos/bar\",\n\n \"headRevision\": 2}\"#;\n\n let resp = ResponseTemplate::new(200).set_body_raw(resp, \"application/json\");\n\n let unremove_json = serde_json::json!(\n\n [{\"op\": \"replace\", \"path\": \"/status\", \"value\": \"active\"}]\n\n );\n\n Mock::given(method(\"PATCH\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar\"))\n\n .and(body_json(unremove_json))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .and(header(\"Content-Type\", \"application/json-patch+json\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let repo = client.project(\"foo\").unremove_repo(\"bar\").await;\n\n\n", "file_path": "src/services/repository.rs", "rank": 79, "score": 16.09758918632947 }, { "content": " #[tokio::test]\n\n async fn test_unremove_project() {\n\n let server = MockServer::start().await;\n\n let unremove_json =\n\n serde_json::json!([{\"op\": \"replace\", \"path\": \"/status\", \"value\": \"active\"}]);\n\n let resp = ResponseTemplate::new(201).set_body_raw(\n\n r#\"{\n\n \"name\":\"foo\",\n\n \"creator\":{\"name\":\"minux\", \"email\":\"[email protected]\"},\n\n \"url\":\"/api/v1/projects/foo\"\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"PATCH\"))\n\n .and(path(\"/api/v1/projects/foo\"))\n\n .and(header(\"Content-Type\", \"application/json-patch+json\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .and(body_json(unremove_json))\n\n .respond_with(resp)\n\n .expect(1)\n", "file_path": "src/services/project.rs", "rank": 80, "score": 15.951501487388608 }, { "content": " assert_eq!(entry.path, \"/b.txt\");\n\n assert!(matches!(entry.content, EntryContent::Text(t) if t == content));\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_get_file_json() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"{\n\n \"path\":\"/a.json\",\n\n \"type\":\"JSON\",\n\n \"revision\":2,\n\n \"url\": \"/api/v1/projects/foo/repos/bar/contents/a.json\",\n\n \"content\":{\"a\":\"b\"}\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents/a.json\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n", "file_path": "src/services/content.rs", "rank": 81, "score": 15.772270637196728 }, { "content": " /// Returns a newly-created [`Query`] that applies a series of\n\n /// [JSON path expressions](https://github.com/json-path/JsonPath/blob/master/README.md)\n\n /// to the content.\n\n /// Returns `None` if path is empty or does not end with `.json`.\n\n /// Returns `None` if any of the path expression provided is empty.\n\n pub fn of_json_path(path: &str, exprs: Vec<String>) -> Option<Self> {\n\n if !path.to_lowercase().ends_with(\"json\") {\n\n return None;\n\n }\n\n if exprs.iter().any(|expr| expr.is_empty()) {\n\n return None;\n\n }\n\n Some(Query {\n\n path: Self::normalize_path(path),\n\n r#type: QueryType::JsonPath(exprs),\n\n })\n\n }\n\n}\n\n\n\n/// Typed content of a [`CommitMessage`]\n", "file_path": "src/model.rs", "rank": 82, "score": 15.704310404216525 }, { "content": " use super::*;\n\n use crate::model::{Entry, EntryContent};\n\n use wiremock::{\n\n matchers::{header, method, path},\n\n Mock, MockServer, Respond, ResponseTemplate,\n\n };\n\n\n\n struct MockResponse {\n\n first_time: AtomicBool,\n\n }\n\n\n\n impl Respond for MockResponse {\n\n fn respond(&self, _req: &wiremock::Request) -> ResponseTemplate {\n\n if self.first_time.swap(false, Ordering::SeqCst) {\n\n println!(\"Called 1\");\n\n ResponseTemplate::new(304).set_delay(Duration::from_millis(100))\n\n } else {\n\n println!(\"Called 2\");\n\n let resp = r#\"{\n\n \"revision\":3,\n", "file_path": "src/services/watch.rs", "rank": 83, "score": 15.646764563547627 }, { "content": "# centraldogma-rs\n\n\n\nOfficial Rust Client for [Central Dogma](https://line.github.io/centraldogma/).\n\n\n\nFull documentation is available at https://docs.rs/centraldogma\n\n\n\n## Getting started\n\n\n\n### Installing\n\n\n\nAdd `centraldogma` crate and version to Cargo.toml.\n\n\n\n```toml\n\ncentraldogma = \"0.1\"\n\n```\n\n\n\n#### Async support with tokio\n\nThe client uses [`reqwest`](https://crates.io/crates/reqwest) to make HTTP calls, which internally uses\n\nthe [`tokio`](https://crates.io/crates/tokio) runtime for async support. As such, you may require to take\n\na dependency on `tokio` in order to use the client.\n\n\n\n```toml\n\ntokio = { version = \"1.2.0\", features = [\"full\"] }\n\n```\n\n\n\n### Create a client\n\n\n\nCreate a new client to make API to CentralDogma using the `Client` struct.\n\n\n\n```rust\n\nuse centraldogma::Client;\n\n\n\n#[tokio::main]\n\nfn main() {\n\n // with token\n\n let client = Client::new(\"http://localhost:36462\", Some(\"token\")).await.unwrap();\n\n // without token\n\n let client = Client::new(\"http://localhost:36462\", None).await.unwrap();\n\n // your code ...\n\n}\n\n```\n\n\n\n### Making typed API calls\n\n\n\nTyped API calls are provided behind traits:\n\n\n\n* [`ProjectService`](https://docs.rs/centraldogma/0.1.0/centraldogma/trait.ProjectService.html)\n\n* [`RepoService`](https://docs.rs/centraldogma/0.1.0/centraldogma/trait.RepoService.html)\n\n* [`ContentService`](https://docs.rs/centraldogma/0.1.0/centraldogma/trait.ContentService.html)\n\n* [`WatchService`](https://docs.rs/centraldogma/0.1.0/centraldogma/trait.WatchService.html)\n\n\n\n#### Examples\n\n\n\n##### Get File\n\n\n\n```rust\n\nuse centraldogma::{Client, ContentService};\n\n\n\n#[tokio::main]\n\nfn main() {\n\n // without token\n\n let client = Client::new(\"http://localhost:36462\", None).await.unwrap();\n\n\n\n let file = client\n\n .repo(\"project\", \"repository\")\n\n .get_file(Revision::HEAD, Query::of_text(\"/a.yml\"))\n\n .await\n\n .unwrap();\n\n // your code ...\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 84, "score": 15.635288497220952 }, { "content": " return None;\n\n }\n\n Some(Query {\n\n path: Self::normalize_path(path),\n\n r#type: QueryType::IdentityText,\n\n })\n\n }\n\n\n\n /// Returns a newly-created [`Query`] that retrieves the JSON content as it is.\n\n /// Returns `None` if path is empty\n\n pub fn of_json(path: &str) -> Option<Self> {\n\n if path.is_empty() {\n\n return None;\n\n }\n\n Some(Query {\n\n path: Self::normalize_path(path),\n\n r#type: QueryType::IdentityJson,\n\n })\n\n }\n\n\n", "file_path": "src/model.rs", "rank": 85, "score": 15.412902067702035 }, { "content": " \"path\":\"/b.txt\",\n\n \"type\":\"APPLY_TEXT_PATCH\",\n\n \"content\":\"--- /b.txt\\n+++ /b.txt\\n@@ -1,1 +1,1 @@\\n-foo\\n+bar\"\n\n }]\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/compare\"))\n\n .and(query_param(\"from\", \"1\"))\n\n .and(query_param(\"to\", \"4\"))\n\n .and(query_param(\"pathPattern\", \"/**\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let changes = client\n\n .repo(\"foo\", \"bar\")\n\n .get_diffs(Revision::from(1), Revision::from(4), \"/**\")\n", "file_path": "src/services/content.rs", "rank": 86, "score": 15.204348851442868 }, { "content": " /// A file will be matched if any pattern matches.\n\n async fn get_diffs(\n\n &self,\n\n from_rev: Revision,\n\n to_rev: Revision,\n\n path_pattern: &str,\n\n ) -> Result<Vec<Change>, Error>;\n\n\n\n /// Pushes the specified [`Change`]s to the repository.\n\n async fn push(\n\n &self,\n\n base_revision: Revision,\n\n cm: CommitMessage,\n\n changes: Vec<Change>,\n\n ) -> Result<PushResult, Error>;\n\n}\n\n\n\n#[async_trait]\n\nimpl<'a> ContentService for RepoClient<'a> {\n\n async fn list_files(\n", "file_path": "src/services/content.rs", "rank": 87, "score": 15.105685265587084 }, { "content": " ),\n\n ];\n\n\n\n for (r, e) in repos.iter().zip(expected.iter()) {\n\n assert_eq!(r.name, e.0);\n\n assert_eq!(r.creator, e.1);\n\n assert_eq!(r.url.as_ref().unwrap(), &e.2);\n\n assert_eq!(r.head_revision, e.3);\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_list_removed_repos() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200)\n\n .set_body_raw(r#\"[{\"name\":\"bar\"}, {\"name\":\"baz\"}]\"#, \"application/json\");\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos\"))\n\n .and(query_param(\"status\", \"removed\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n", "file_path": "src/services/repository.rs", "rank": 88, "score": 15.059074554182857 }, { "content": "\n\nimpl CommitMessage {\n\n pub fn only_summary(summary: &str) -> Self {\n\n CommitMessage {\n\n summary: summary.to_owned(),\n\n detail: None,\n\n }\n\n }\n\n}\n\n\n\n/// Result of a [push](trait@crate::ContentService#tymethod.push) operation.\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PushResult {\n\n /// Revision of this commit.\n\n pub revision: Revision,\n\n /// When this commit was pushed.\n\n pub pushed_at: Option<String>,\n\n}\n\n\n", "file_path": "src/model.rs", "rank": 89, "score": 14.844836204482785 }, { "content": " \"commitMessage\":{\"summary\":\"Add a.json\"}\n\n }, {\n\n \"revision\":2,\n\n \"author\":{\"name\":\"minux\", \"email\":\"[email protected]\"},\n\n \"commitMessage\":{\"summary\":\"Edit a.json\"}\n\n }]\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/commits/-2\"))\n\n .and(query_param(\"to\", \"-1\"))\n\n .and(query_param(\"maxCommits\", \"2\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let commits = client\n\n .repo(\"foo\", \"bar\")\n", "file_path": "src/services/content.rs", "rank": 90, "score": 14.80142763655394 }, { "content": " .watch_file_stream(&Query::of_json(\"/a.json\").unwrap())\n\n .context(here!(\"Failed to get file watch stream\"))?;\n\n\n\n let new_commit_msg = CommitMessage {\n\n summary: \"change content\".to_string(),\n\n detail: None,\n\n };\n\n let new_change = vec![Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(json!({\"a\": \"c\"})),\n\n }];\n\n let new_push = async move {\n\n tokio::time::sleep(Duration::from_millis(1)).await;\n\n r.push(Revision::HEAD, new_commit_msg, new_change).await\n\n };\n\n\n\n let sleep = tokio::time::sleep(Duration::from_millis(10000));\n\n futures::pin_mut!(sleep);\n\n\n\n let mut s = watch_stream.take_until(sleep);\n", "file_path": "tests/watch.rs", "rank": 91, "score": 14.775276122821392 }, { "content": "#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\n#[serde(tag = \"type\", content = \"content\")]\n\npub enum EntryContent {\n\n /// Content as a JSON Value.\n\n Json(serde_json::Value),\n\n /// Content as a String.\n\n Text(String),\n\n /// This Entry is a directory.\n\n Directory,\n\n}\n\n\n\n/// A file or a directory in a repository.\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Entry {\n\n /// Path of this entry.\n\n pub path: String,\n\n /// Content of this entry.\n\n #[serde(flatten)]\n\n pub content: EntryContent,\n", "file_path": "src/model.rs", "rank": 92, "score": 14.615805940759476 }, { "content": " }\n\n\n\n #[tokio::test]\n\n async fn test_get_file() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"{\n\n \"path\":\"/b.txt\",\n\n \"type\":\"TEXT\",\n\n \"revision\":2,\n\n \"url\": \"/api/v1/projects/foo/repos/bar/contents/b.txt\",\n\n \"content\":\"hello world~!\"\n\n }\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents/b.txt\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n", "file_path": "src/services/content.rs", "rank": 93, "score": 14.421212160450242 }, { "content": " state.failed_count = 0; // reset fail count\n\n Duration::from_secs(1)\n\n }\n\n Err(Error::HttpClient(e)) if e.is_timeout() => Duration::from_secs(1),\n\n Err(e) => {\n\n log::debug!(\"Request error: {}\", e);\n\n state.failed_count += 1;\n\n delay_time_for(state.failed_count)\n\n }\n\n };\n\n\n\n // Delay\n\n tokio::time::sleep(next_delay).await;\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/services/watch.rs", "rank": 94, "score": 14.141899102989093 }, { "content": " \"entry\":{\n\n \"path\":\"/a.json\",\n\n \"type\":\"JSON\",\n\n \"content\": {\"a\":\"b\"},\n\n \"revision\":3,\n\n \"url\": \"/api/v1/projects/foo/repos/bar/contents/a.json\"\n\n }\n\n }\"#;\n\n ResponseTemplate::new(200)\n\n .set_delay(Duration::from_millis(100))\n\n .set_body_raw(resp, \"application/json\")\n\n }\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_watch_file() {\n\n let server = MockServer::start().await;\n\n let resp = MockResponse {\n\n first_time: AtomicBool::new(true),\n", "file_path": "src/services/watch.rs", "rank": 95, "score": 14.073532583916467 }, { "content": " async fn test_push_two_files() {\n\n let server = MockServer::start().await;\n\n let resp = ResponseTemplate::new(200).set_body_raw(\n\n r#\"{\n\n \"revision\":3,\n\n \"pushedAt\":\"2017-05-22T00:00:00Z\"\n\n }\"#,\n\n \"application/json\",\n\n );\n\n\n\n let changes = vec![\n\n Change {\n\n path: \"/a.json\".to_string(),\n\n content: ChangeContent::UpsertJson(serde_json::json!({\"a\":\"b\"})),\n\n },\n\n Change {\n\n path: \"/b.txt\".to_string(),\n\n content: ChangeContent::UpsertText(\"myContent\".to_string()),\n\n },\n\n ];\n", "file_path": "src/services/content.rs", "rank": 96, "score": 13.858234235916001 }, { "content": "mod client;\n\npub mod model;\n\nmod services;\n\n\n\npub use client::{Client, Error, ProjectClient, RepoClient};\n\npub use services::{\n\n content::ContentService, project::ProjectService, repository::RepoService, watch::WatchService,\n\n};\n", "file_path": "src/lib.rs", "rank": 97, "score": 13.71568105483561 }, { "content": " {\"path\":\"/a.json\", \"type\":\"JSON\"},\n\n {\"path\":\"/b.txt\", \"type\":\"TEXT\"}\n\n ]\"#,\n\n \"application/json\",\n\n );\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/list/**\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let entries = client\n\n .repo(\"foo\", \"bar\")\n\n .list_files(Revision::HEAD, \"/**\")\n\n .await\n\n .unwrap();\n\n\n\n server.reset().await;\n", "file_path": "src/services/content.rs", "rank": 98, "score": 13.511463747674059 }, { "content": " };\n\n\n\n Mock::given(method(\"GET\"))\n\n .and(path(\"/api/v1/projects/foo/repos/bar/contents/a.json\"))\n\n .and(header(\"if-none-match\", \"-1\"))\n\n .and(header(\"prefer\", \"wait=60\"))\n\n .and(header(\"Authorization\", \"Bearer anonymous\"))\n\n .respond_with(resp)\n\n .expect(2)\n\n .mount(&server)\n\n .await;\n\n\n\n let client = Client::new(&server.uri(), None).await.unwrap();\n\n let stream = client\n\n .repo(\"foo\", \"bar\")\n\n .watch_file_stream(&Query::identity(\"/a.json\").unwrap())\n\n .unwrap()\n\n .take_until(tokio::time::sleep(Duration::from_secs(3)));\n\n tokio::pin!(stream);\n\n\n", "file_path": "src/services/watch.rs", "rank": 99, "score": 13.481951108236276 } ]
Rust
src/combinators.rs
hashedone/toy-interpreter
7e04e06bd0fe9349d7515fe0731591eabc6b428b
use crate::{Operator, Result, Token}; #[derive(Debug, PartialEq)] pub struct ParseProgress<'a, T> { pub tail: &'a str, pub token: Option<T>, } pub type ParseResult<'a, T> = Result<ParseProgress<'a, T>>; impl<'a, T> ParseProgress<'a, T> { fn none(tail: &'a str) -> ParseResult<'a, T> { Ok(ParseProgress { tail, token: None }) } fn some(tail: &'a str, token: T) -> ParseResult<'a, T> { Ok(ParseProgress { tail, token: Some(token), }) } } macro_rules! assume { ($e:expr, $tail:expr) => {{ let e = $e?; if e.token.is_some() { (e.tail, e.token.unwrap()) } else { return ParseProgress::none($tail); } }}; } fn number(src: &str) -> ParseResult<f32> { let first_not = src .find(|c| !"0123456789.".contains(c)) .unwrap_or_else(|| src.len()); if first_not == 0 { return ParseProgress::none(src); } let literal = &src[..first_not]; let tail = &src[first_not..]; if literal.chars().filter(|&c| c == '.').count() > 1 { Err(format!( "Invalid number: {}, only one decimal point allowed", literal )) } else { let number = literal .parse() .map_err(|err| format!("Invalid numer: {}, {}", literal, err))?; ParseProgress::some(tail, number) } } fn identifier(src: &str) -> ParseResult<&str> { if src.is_empty() { ParseProgress::none(src) } else if src.chars().next().unwrap().is_ascii_alphabetic() || src.starts_with('_') { let first_not = src .find(|c: char| -> bool { !(c == '_' || c.is_ascii_alphanumeric()) }) .unwrap_or_else(|| src.len()); let literal = &src[..first_not]; let tail = &src[first_not..]; ParseProgress::some(tail, literal) } else { ParseProgress::none(src) } } fn assignment(src: &str) -> ParseResult<&str> { let (tail, ident) = assume!(identifier(src), src); let tail = tail.trim_start(); if tail.starts_with('=') && !tail.starts_with("=>") { ParseProgress::some(&tail[1..], ident) } else { ParseProgress::none(src) } } pub fn next_token(src: &str) -> ParseResult<Token> { if src.is_empty() { return ParseProgress::none(""); } let assign = assignment(src)?; if let Some(tok) = assign.token { return ParseProgress::some(assign.tail, Token::Assign(tok.to_owned())); } let id = identifier(src)?; if let Some(tok) = id.token { return ParseProgress::some(id.tail, Token::Id(tok.to_owned())); } let num = number(src)?; if let Some(tok) = num.token { return ParseProgress::some(num.tail, Token::Number(tok)); } if src.starts_with("=>") { return ParseProgress::some(&src[2..], Token::Func); } let tok = match src { _ if src.starts_with('+') => Token::Operator(Operator::Add), _ if src.starts_with('-') => Token::Operator(Operator::Sub), _ if src.starts_with('*') => Token::Operator(Operator::Mul), _ if src.starts_with('/') => Token::Operator(Operator::Div), _ if src.starts_with('%') => Token::Operator(Operator::Mod), _ if src.starts_with('(') => Token::LBracket, _ if src.starts_with(')') => Token::RBracket, _ => return Err(format!("Invalid token: {}", src)), }; ParseProgress::some(&src[1..], tok) } #[cfg(test)] mod test { use super::*; #[test] fn test_number() { assert_eq!(ParseProgress::none(""), number("")); assert_eq!(ParseProgress::none("tail"), number("tail")); assert_eq!(ParseProgress::some("", 10.0f32), number("10")); assert_eq!(ParseProgress::some("", 10.4f32), number("10.4")); assert_eq!(ParseProgress::some("tail", 10.4f32), number("10.4tail")); number("10.4.5").unwrap_err(); } #[test] fn test_identifier() { assert_eq!(ParseProgress::none(""), identifier("")); assert_eq!(ParseProgress::none("10"), identifier("10")); assert_eq!(ParseProgress::some("", "a"), identifier("a")); assert_eq!(ParseProgress::some("", "ab"), identifier("ab")); assert_eq!(ParseProgress::some("", "_ab"), identifier("_ab")); assert_eq!(ParseProgress::some(".", "_ab"), identifier("_ab.")); assert_eq!(ParseProgress::some("", "__"), identifier("__")); assert_eq!(ParseProgress::some("", "_1"), identifier("_1")); } #[test] fn test_assignment() { assert_eq!(ParseProgress::none(""), assignment("")); assert_eq!(ParseProgress::none("x"), assignment("x")); assert_eq!(ParseProgress::some("", "x"), assignment("x =")); assert_eq!(ParseProgress::none("x =>"), assignment("x =>")); } #[test] fn test_next_token() { assert_eq!(ParseProgress::none(""), next_token("")); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Add)), next_token("+") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Sub)), next_token("-") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Mul)), next_token("*") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Div)), next_token("/") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Mod)), next_token("%") ); assert_eq!(ParseProgress::some("", Token::LBracket), next_token("(")); assert_eq!(ParseProgress::some("", Token::RBracket), next_token(")")); assert_eq!( ParseProgress::some("", Token::Assign("x".to_owned())), next_token("x =") ); assert_eq!(ParseProgress::some("", Token::Func), next_token("=>")); assert_eq!( ParseProgress::some("x", Token::Operator(Operator::Mod)), next_token("%x") ); assert_eq!( ParseProgress::some(" =>", Token::Id("x".to_owned())), next_token("x =>") ); next_token("10.0.4").unwrap_err(); next_token("=").unwrap_err(); } }
use crate::{Operator, Result, Token}; #[derive(Debug, PartialEq)] pub struct ParseProgress<'a, T> { pub tail: &'a str, pub token: Option<T>, } pub type ParseResult<'a, T> = Result<ParseProgress<'a, T>>; impl<'a, T> ParseProgress<'a, T> { fn none(tail: &'a str) -> ParseResult<'a, T> { Ok(ParseProgress { tail, token: None }) } fn some(tail: &'a str, token: T) -> ParseResult<'a, T> { Ok(ParseProgress { tail, token: Some(token), }) } } macro_rules! assume { ($e:expr, $tail:expr) => {{ let e = $e?; if e.token.is_some() { (e.tail, e.token.unwrap()) } else { return ParseProgress::none($tail); } }}; } fn number(src: &str) -> ParseResult<f32> { let first_not = src .find(|c| !"0123456789.".contains(c)) .unwrap_or_else(|| src.len()); if first_not == 0 { return ParseProgress::none(src); } let literal = &src[..first_not]; let tail = &src[first_not..]; if literal.chars().filter(|&c| c == '.').count() > 1 { Err(format!( "Invalid number: {}, only one decimal point allowed", literal )) } else { let number = literal .parse() .map_err(|err| format!("Invalid numer: {}, {}", literal, err))?; ParseProgress::some(tail, number) } }
fn assignment(src: &str) -> ParseResult<&str> { let (tail, ident) = assume!(identifier(src), src); let tail = tail.trim_start(); if tail.starts_with('=') && !tail.starts_with("=>") { ParseProgress::some(&tail[1..], ident) } else { ParseProgress::none(src) } } pub fn next_token(src: &str) -> ParseResult<Token> { if src.is_empty() { return ParseProgress::none(""); } let assign = assignment(src)?; if let Some(tok) = assign.token { return ParseProgress::some(assign.tail, Token::Assign(tok.to_owned())); } let id = identifier(src)?; if let Some(tok) = id.token { return ParseProgress::some(id.tail, Token::Id(tok.to_owned())); } let num = number(src)?; if let Some(tok) = num.token { return ParseProgress::some(num.tail, Token::Number(tok)); } if src.starts_with("=>") { return ParseProgress::some(&src[2..], Token::Func); } let tok = match src { _ if src.starts_with('+') => Token::Operator(Operator::Add), _ if src.starts_with('-') => Token::Operator(Operator::Sub), _ if src.starts_with('*') => Token::Operator(Operator::Mul), _ if src.starts_with('/') => Token::Operator(Operator::Div), _ if src.starts_with('%') => Token::Operator(Operator::Mod), _ if src.starts_with('(') => Token::LBracket, _ if src.starts_with(')') => Token::RBracket, _ => return Err(format!("Invalid token: {}", src)), }; ParseProgress::some(&src[1..], tok) } #[cfg(test)] mod test { use super::*; #[test] fn test_number() { assert_eq!(ParseProgress::none(""), number("")); assert_eq!(ParseProgress::none("tail"), number("tail")); assert_eq!(ParseProgress::some("", 10.0f32), number("10")); assert_eq!(ParseProgress::some("", 10.4f32), number("10.4")); assert_eq!(ParseProgress::some("tail", 10.4f32), number("10.4tail")); number("10.4.5").unwrap_err(); } #[test] fn test_identifier() { assert_eq!(ParseProgress::none(""), identifier("")); assert_eq!(ParseProgress::none("10"), identifier("10")); assert_eq!(ParseProgress::some("", "a"), identifier("a")); assert_eq!(ParseProgress::some("", "ab"), identifier("ab")); assert_eq!(ParseProgress::some("", "_ab"), identifier("_ab")); assert_eq!(ParseProgress::some(".", "_ab"), identifier("_ab.")); assert_eq!(ParseProgress::some("", "__"), identifier("__")); assert_eq!(ParseProgress::some("", "_1"), identifier("_1")); } #[test] fn test_assignment() { assert_eq!(ParseProgress::none(""), assignment("")); assert_eq!(ParseProgress::none("x"), assignment("x")); assert_eq!(ParseProgress::some("", "x"), assignment("x =")); assert_eq!(ParseProgress::none("x =>"), assignment("x =>")); } #[test] fn test_next_token() { assert_eq!(ParseProgress::none(""), next_token("")); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Add)), next_token("+") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Sub)), next_token("-") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Mul)), next_token("*") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Div)), next_token("/") ); assert_eq!( ParseProgress::some("", Token::Operator(Operator::Mod)), next_token("%") ); assert_eq!(ParseProgress::some("", Token::LBracket), next_token("(")); assert_eq!(ParseProgress::some("", Token::RBracket), next_token(")")); assert_eq!( ParseProgress::some("", Token::Assign("x".to_owned())), next_token("x =") ); assert_eq!(ParseProgress::some("", Token::Func), next_token("=>")); assert_eq!( ParseProgress::some("x", Token::Operator(Operator::Mod)), next_token("%x") ); assert_eq!( ParseProgress::some(" =>", Token::Id("x".to_owned())), next_token("x =>") ); next_token("10.0.4").unwrap_err(); next_token("=").unwrap_err(); } }
fn identifier(src: &str) -> ParseResult<&str> { if src.is_empty() { ParseProgress::none(src) } else if src.chars().next().unwrap().is_ascii_alphabetic() || src.starts_with('_') { let first_not = src .find(|c: char| -> bool { !(c == '_' || c.is_ascii_alphanumeric()) }) .unwrap_or_else(|| src.len()); let literal = &src[..first_not]; let tail = &src[first_not..]; ParseProgress::some(tail, literal) } else { ParseProgress::none(src) } }
function_block-full_function
[ { "content": "pub fn tokenize<'a>(mut src: &'a str) -> impl Iterator<Item = Result<Token>> + 'a {\n\n iter::from_fn(move || match next_token(src) {\n\n Ok(progress) => {\n\n src = progress.tail.trim_start();\n\n progress.token.map(Ok)\n\n }\n\n Err(err) => {\n\n src = \"\";\n\n Some(Err(err))\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn empty() {\n", "file_path": "src/lexer.rs", "rank": 1, "score": 114890.0143290045 }, { "content": "type Result<T> = std::result::Result<T, String>;\n\n\n\nuse context::Context;\n\nuse lexer::{Operator, Token};\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 70301.1604499733 }, { "content": "fn run(line: &str, context: &mut Context) -> Result<Option<f32>> {\n\n let tokens: Result<Vec<_>> = lexer::tokenize(line).collect();\n\n let tokens = tokens?.into_iter();\n\n Ok(context.parse(tokens)?.evaluate(context, &[]))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 66430.85334384831 }, { "content": "#[derive(Debug)]\n\nstruct CallExpr {\n\n func: Rc<dyn AST>,\n\n args: Vec<Box<dyn AST>>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Function {\n\n pub name: String,\n\n pub arity: usize,\n\n pub expr: Rc<dyn AST>,\n\n}\n\n\n\nimpl AST for Terminal {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn is_same(&self, other: &dyn AST) -> bool {\n\n other\n\n .as_any()\n", "file_path": "src/parser.rs", "rank": 7, "score": 40705.08242595435 }, { "content": "#[derive(Debug)]\n\nstruct OpExpr {\n\n op: Operator,\n\n left: Box<dyn AST>,\n\n right: Box<dyn AST>,\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 8, "score": 40705.08242595435 }, { "content": "fn main() {\n\n let mut context = Context::new();\n\n stdin()\n\n .lock()\n\n .lines()\n\n .filter_map(|line| line.ok()) // Actually ignoring iostream errors\n\n .for_each(|line| match run(&line, &mut context) {\n\n Ok(Some(val)) => println!(\"= {}\", val),\n\n Ok(None) => println!(\"()\"),\n\n Err(err) => println!(\"Error: {}\", err),\n\n });\n\n}\n", "file_path": "src/main.rs", "rank": 9, "score": 39117.31580252733 }, { "content": "pub trait AST: std::fmt::Debug {\n\n fn as_any(&self) -> &dyn Any;\n\n fn is_same(&self, other: &dyn AST) -> bool;\n\n fn arity(&self) -> usize {\n\n 0\n\n }\n\n\n\n /// Used to return value if known without any context\n\n fn value(&self) -> Option<f32>;\n\n\n\n fn evaluate(&self, context: &mut Context, args: &[f32]) -> Option<f32>;\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 10, "score": 31643.6329700817 }, { "content": " pub fn parse(&self, tokens: impl Iterator<Item = Token>) -> Result<Box<dyn AST>> {\n\n let tokens: Vec<_> = tokens.collect();\n\n\n\n if tokens.contains(&Token::Func) {\n\n Function::parse(&mut tokens.into_iter().peekable(), self)\n\n } else {\n\n CallExpr::parse(&mut tokens.into_iter().peekable(), self)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n\n\n fn tokenize<'a>(src: &'a str) -> Peekable<impl Iterator<Item = Token> + 'a> {\n\n use crate::lexer::tokenize;\n\n\n\n tokenize(src).map(|t| t.unwrap()).peekable()\n", "file_path": "src/parser.rs", "rank": 12, "score": 6064.397851774183 }, { "content": " assert_eq!(Ok(expected), tokenize(src).collect());\n\n }\n\n\n\n #[test]\n\n fn invalid() {\n\n tokenize(\"^\").collect::<Result<Vec<_>, _>>().unwrap_err();\n\n }\n\n\n\n #[test]\n\n fn func() {\n\n let src = \"add x y => x + y\";\n\n let expected = vec![\n\n Token::Id(\"add\".to_owned()),\n\n Token::Id(\"x\".to_owned()),\n\n Token::Id(\"y\".to_owned()),\n\n Token::Func,\n\n Token::Id(\"x\".to_owned()),\n\n Token::Operator(Operator::Add),\n\n Token::Id(\"y\".to_owned()),\n\n ];\n\n\n\n assert_eq!(Ok(expected), tokenize(src).collect());\n\n }\n\n\n\n}\n", "file_path": "src/lexer.rs", "rank": 15, "score": 6063.264935279363 }, { "content": "\n\n pub fn get_var(&self, var: &str) -> Option<f32> {\n\n match self.symbols.get(var)? {\n\n Symbol::Variable(v) => Some(*v),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn get_arg(&self, var: &str) -> Option<usize> {\n\n match self.symbols.get(var)? {\n\n Symbol::Argument(idx) => Some(*idx),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn get_arity(&self, var: &str) -> Option<usize> {\n\n match self.symbols.get(var)? {\n\n Symbol::Function(arity, _) => Some(*arity),\n\n _ => None,\n\n }\n", "file_path": "src/context.rs", "rank": 17, "score": 6061.6746758687605 }, { "content": "use crate::{Context, Operator, Result, Token};\n\nuse std::any::Any;\n\nuse std::iter::Peekable;\n\nuse std::rc::Rc;\n\n\n", "file_path": "src/parser.rs", "rank": 18, "score": 6061.302210074465 }, { "content": " match tokens.next() {\n\n Some(Token::Number(x)) => Ok(Box::new(Terminal::Value(x))),\n\n Some(Token::LBracket) => {\n\n tokens.next();\n\n let expr = OpExpr::parse(tokens, context)?;\n\n if let Some(Token::RBracket) = tokens.peek() {\n\n tokens.next();\n\n Ok(expr)\n\n } else {\n\n Err(format!(\"Invalid token {:?}, expected `)`\", tokens.next()))\n\n }\n\n }\n\n Some(Token::Assign(var)) => {\n\n if context.is_var(&var) {\n\n let expr = CallExpr::parse(tokens, context)?;\n\n Ok(Box::new(Terminal::Assign(var, expr)))\n\n } else {\n\n Err(format!(\n\n \"Assigning to symbol which is not variable: {}\",\n\n var\n", "file_path": "src/parser.rs", "rank": 19, "score": 6061.255466909926 }, { "content": " assert_eq!(None, tokenize(\"\").next());\n\n }\n\n\n\n #[test]\n\n fn all_tokens() {\n\n let src = \"x 10.3 + - * / % () x = =>\";\n\n let expected = vec![\n\n Token::Id(\"x\".to_owned()),\n\n Token::Number(10.3),\n\n Token::Operator(Operator::Add),\n\n Token::Operator(Operator::Sub),\n\n Token::Operator(Operator::Mul),\n\n Token::Operator(Operator::Div),\n\n Token::Operator(Operator::Mod),\n\n Token::LBracket,\n\n Token::RBracket,\n\n Token::Assign(\"x\".to_owned()),\n\n Token::Func,\n\n ];\n\n\n", "file_path": "src/lexer.rs", "rank": 20, "score": 6061.18482095057 }, { "content": "use crate::combinators::next_token;\n\nuse crate::Result;\n\nuse std::iter;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum Operator {\n\n Add,\n\n Sub,\n\n Mul,\n\n Div,\n\n Mod,\n\n}\n\n\n\nimpl Operator {\n\n pub fn eval(self, left: f32, right: f32) -> f32 {\n\n match self {\n\n Operator::Add => left + right,\n\n Operator::Sub => left - right,\n\n Operator::Mul => left * right,\n\n Operator::Div => left / right,\n", "file_path": "src/lexer.rs", "rank": 21, "score": 6060.9230152001155 }, { "content": " Some(id)\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n fn parse(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n\n let name = Self::get_id(tokens).ok_or_else(|| format!(\n\n \"Expected function name, but got: {:?}\",\n\n tokens.peek()\n\n ))?;\n\n\n\n if !context.is_func(&name) {\n\n return Err(format!(\n\n \"Expected function name, but got not function id: {}\",\n\n name\n\n ));\n", "file_path": "src/parser.rs", "rank": 22, "score": 6060.718830753444 }, { "content": " tokens.next();\n\n Some(Operator::Mod)\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n fn parse_multiplicative(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n\n let mut result = Terminal::parse(tokens, context)?;\n\n\n\n while let Some(op) = Self::get_next_multiplicative(tokens) {\n\n let right = Terminal::parse(tokens, context)?;\n\n result = Box::new(OpExpr {\n\n op,\n\n left: result,\n\n right,\n\n });\n", "file_path": "src/parser.rs", "rank": 23, "score": 6060.62453405667 }, { "content": "\n\n if let Some(val) = result.value() {\n\n result = Box::new(Terminal::Value(val))\n\n }\n\n }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn get_next_additive(tokens: &mut Peekable<impl Iterator<Item = Token>>) -> Option<Operator> {\n\n match tokens.peek() {\n\n Some(Token::Operator(Operator::Add)) => {\n\n tokens.next();\n\n Some(Operator::Add)\n\n }\n\n Some(Token::Operator(Operator::Sub)) => {\n\n tokens.next();\n\n Some(Operator::Sub)\n\n }\n\n _ => None,\n", "file_path": "src/parser.rs", "rank": 24, "score": 6060.5931792568545 }, { "content": " ))\n\n }\n\n }\n\n Some(Token::Id(var)) => {\n\n if let Some(var) = context.get_var(&var) {\n\n Ok(Box::new(Terminal::Value(var)))\n\n } else if let Some(var) = context.get_arg(&var) {\n\n Ok(Box::new(Terminal::Argument(var)))\n\n } else {\n\n Err(format!(\n\n \"Non variable symbol as terminal token occured: {}\",\n\n var\n\n ))\n\n }\n\n }\n\n Some(token) => Err(format!(\n\n \"Unexpected token while parsing terminal expression: {:?}\",\n\n token\n\n )),\n\n None => {\n", "file_path": "src/parser.rs", "rank": 25, "score": 6060.443418568588 }, { "content": " }\n\n\n\n pub fn get_func(&self, var: &str) -> Option<Rc<dyn AST>> {\n\n match self.symbols.get(var)? {\n\n Symbol::Function(_, expr) => Some(expr.clone()),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "src/context.rs", "rank": 26, "score": 6060.432352042332 }, { "content": " } else {\n\n false\n\n }\n\n }\n\n\n\n fn value(&self) -> Option<f32> {\n\n None\n\n }\n\n\n\n fn evaluate(&self, context: &mut Context, _args: &[f32]) -> Option<f32> {\n\n context.update_func(self);\n\n None\n\n }\n\n}\n\n\n\nimpl Terminal {\n\n fn parse(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n", "file_path": "src/parser.rs", "rank": 27, "score": 6059.924235223571 }, { "content": " let name = f.clone();\n\n tokens.next();\n\n Some(name)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn parse(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n\n if let Some(name) = Self::get_func(tokens, context) {\n\n let arity = context.get_arity(&name).unwrap_or(0);\n\n let func = context\n\n .get_func(&name)\n\n .ok_or_else(|| format!(\"No function named {}\", name))?;\n", "file_path": "src/parser.rs", "rank": 28, "score": 6059.895767679652 }, { "content": " }\n\n }\n\n\n\n fn parse_additive(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n\n let mut result = Self::parse_multiplicative(tokens, context)?;\n\n\n\n while let Some(op) = Self::get_next_additive(tokens) {\n\n let right = Self::parse_multiplicative(tokens, context)?;\n\n result = Box::new(OpExpr {\n\n op,\n\n left: result,\n\n right,\n\n });\n\n\n\n if let Some(val) = result.value() {\n\n result = Box::new(Terminal::Value(val))\n\n }\n", "file_path": "src/parser.rs", "rank": 29, "score": 6059.4222925640315 }, { "content": " Operator::Mod => ((left as i64) % (right as i64)) as f32,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Token {\n\n Id(String),\n\n Number(f32),\n\n Operator(Operator),\n\n LBracket,\n\n RBracket,\n\n Assign(String), // Assignment is actually bitoken including variable which is assigned to\n\n Func, // =>\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 31, "score": 6058.994686346924 }, { "content": " }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn parse(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Result<Box<dyn AST>> {\n\n Self::parse_additive(tokens, context)\n\n }\n\n}\n\n\n\nimpl CallExpr {\n\n fn get_func(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n context: &Context,\n\n ) -> Option<String> {\n\n if let Some(Token::Id(f)) = tokens.peek() {\n\n if context.is_func(f) {\n", "file_path": "src/parser.rs", "rank": 32, "score": 6058.938041352864 }, { "content": "\n\n pub fn update_func(&mut self, func: &Function) {\n\n self.symbols\n\n .entry(func.name.clone())\n\n .and_modify(|v|\n\n if let Symbol::Function(ref mut arity, ref mut expr) = v {\n\n *arity = func.arity;\n\n *expr = func.expr.clone();\n\n }\n\n )\n\n .or_insert_with(|| Symbol::Function(func.arity, func.expr.clone()));\n\n }\n\n\n\n pub fn is_var(&self, var: &str) -> bool {\n\n self.symbols.get(var).map_or(true, Symbol::is_var)\n\n }\n\n\n\n pub fn is_func(&self, var: &str) -> bool {\n\n self.symbols.get(var).map_or(true, Symbol::is_func)\n\n }\n", "file_path": "src/context.rs", "rank": 33, "score": 6058.694674469651 }, { "content": " Err(\"Unexpected end of tokens list while parsing terminal expression\".to_owned())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl OpExpr {\n\n fn get_next_multiplicative(\n\n tokens: &mut Peekable<impl Iterator<Item = Token>>,\n\n ) -> Option<Operator> {\n\n match tokens.peek() {\n\n Some(Token::Operator(Operator::Mul)) => {\n\n tokens.next();\n\n Some(Operator::Mul)\n\n }\n\n Some(Token::Operator(Operator::Div)) => {\n\n tokens.next();\n\n Some(Operator::Div)\n\n }\n\n Some(Token::Operator(Operator::Mod)) => {\n", "file_path": "src/parser.rs", "rank": 35, "score": 6058.400494049218 }, { "content": " }\n\n\n\n #[test]\n\n fn test_terminal_number() {\n\n let number = Terminal::parse(&mut tokenize(\"10\"), &Context::new()).unwrap();\n\n let expected = Terminal::Value(10.0);\n\n assert!(expected.is_same(number.as_ref()));\n\n }\n\n\n\n #[test]\n\n fn test_terminal_assignment() {\n\n let assign = Terminal::parse(&mut tokenize(\"a = 10 + 2\"), &Context::new()).unwrap();\n\n let expected = Terminal::Assign(\"a\".to_string(), Box::new(Terminal::Value(12.0)));\n\n assert!(expected.is_same(assign.as_ref()));\n\n\n\n let assign = OpExpr::parse(&mut tokenize(\"2 + a = 10\"), &Context::new()).unwrap();\n\n let expected = OpExpr {\n\n op: Operator::Add,\n\n left: Box::new(Terminal::Value(2.0)),\n\n right: Box::new(Terminal::Assign(\n", "file_path": "src/parser.rs", "rank": 36, "score": 6058.227983937644 }, { "content": " }\n\n\n\n let mut args = vec![];\n\n while let Some(arg) = Self::get_id(tokens) {\n\n args.push(arg.clone());\n\n }\n\n\n\n if tokens.next() != Some(Token::Func) {\n\n return Err(\"Expected => token\".to_string());\n\n }\n\n\n\n let arity = args.len();\n\n let ctx = Context::function_ctx(args, context);\n\n let expr = CallExpr::parse(tokens, &ctx)?.into();\n\n\n\n Ok(Box::new(Function { name, arity, expr }))\n\n }\n\n}\n\n\n\nimpl Context {\n", "file_path": "src/parser.rs", "rank": 37, "score": 6058.143612400842 }, { "content": "use crate::parser::{Function, AST};\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n", "file_path": "src/context.rs", "rank": 38, "score": 6057.441080260375 }, { "content": "mod combinators;\n\nmod context;\n\nmod lexer;\n\nmod parser;\n\n\n\nuse std;\n\nuse std::io::{stdin, BufRead};\n\n\n", "file_path": "src/main.rs", "rank": 39, "score": 6057.150212412584 }, { "content": "\n\n let mut args = vec![];\n\n for _ in 0..arity {\n\n let arg = CallExpr::parse(tokens, context)?;\n\n args.push(arg);\n\n }\n\n\n\n Ok(Box::new(CallExpr { func, args }))\n\n } else {\n\n OpExpr::parse(tokens, context)\n\n }\n\n }\n\n}\n\n\n\nimpl Function {\n\n fn get_id(tokens: &mut Peekable<impl Iterator<Item = Token>>) -> Option<String> {\n\n match tokens.peek() {\n\n Some(Token::Id(id)) => {\n\n let id = id.clone();\n\n tokens.next();\n", "file_path": "src/parser.rs", "rank": 41, "score": 6056.944935364486 }, { "content": "}\n\n\n\npub struct Context {\n\n symbols: HashMap<String, Symbol>,\n\n}\n\n\n\nimpl Context {\n\n pub fn new() -> Self {\n\n Context {\n\n symbols: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn function_ctx(args: Vec<String>, parent: &Context) -> Self {\n\n let functions = parent\n\n .symbols\n\n .iter()\n\n .filter(|(_, item)| item.is_func())\n\n .map(|(name, item)| (name.clone(), item.clone()));\n\n\n", "file_path": "src/context.rs", "rank": 42, "score": 6056.8806490765655 }, { "content": "\n\n let expr = OpExpr::parse_additive(&mut tokenize(\"10 + 2\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(12.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr = OpExpr::parse_additive(&mut tokenize(\"10 - 2\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(8.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr = OpExpr::parse_additive(&mut tokenize(\"11 + 2 - 5\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(8.0f32);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr =\n\n OpExpr::parse_additive(&mut tokenize(\"10 * 3 - 6 / 2\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(27.0);;\n\n assert!(expected.is_same(expr.as_ref()));\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 43, "score": 6056.378003880356 }, { "content": " .downcast_ref::<Self>()\n\n .map_or(false, |o| match (self, o) {\n\n (Terminal::Value(x), Terminal::Value(y)) => (x - y).abs() < 0.001,\n\n (Terminal::Assign(v1, val1), Terminal::Assign(v2, val2)) => {\n\n v1 == v2 && val1.is_same(val2.as_ref())\n\n }\n\n _ => false,\n\n })\n\n }\n\n\n\n fn value(&self) -> Option<f32> {\n\n match self {\n\n Terminal::Value(v) => Some(*v),\n\n Terminal::Assign(_, _) => None,\n\n Terminal::Argument(_) => None,\n\n }\n\n }\n\n\n\n fn evaluate(&self, context: &mut Context, args: &[f32]) -> Option<f32> {\n\n match self {\n", "file_path": "src/parser.rs", "rank": 44, "score": 6056.332303129187 }, { "content": " \"a\".to_string(),\n\n Box::new(Terminal::Value(10.0)),\n\n )),\n\n };\n\n assert!(expected.is_same(assign.as_ref()));\n\n }\n\n\n\n #[test]\n\n fn text_op_expr_mul() {\n\n let expr = OpExpr::parse_multiplicative(&mut tokenize(\"10\"), &Context::new()).unwrap();\n\n let expected = Terminal::Value(10.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr = OpExpr::parse_multiplicative(&mut tokenize(\"10 * 2\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(20.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr = OpExpr::parse_multiplicative(&mut tokenize(\"10 / 2\"), &Context::new()).unwrap();\n\n\n", "file_path": "src/parser.rs", "rank": 45, "score": 6056.275088706224 }, { "content": " let expected = Terminal::Value(5.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr = OpExpr::parse_multiplicative(&mut tokenize(\"10 % 2\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(0.0);\n\n assert!(expected.is_same(expr.as_ref()));\n\n\n\n let expr =\n\n OpExpr::parse_multiplicative(&mut tokenize(\"11 % 2 * 5 / 3\"), &Context::new()).unwrap();\n\n\n\n let expected = Terminal::Value(5.0f32 / 3.0f32);\n\n assert!(expected.is_same(expr.as_ref()));\n\n }\n\n\n\n #[test]\n\n fn text_op_expr_add() {\n\n let expr = OpExpr::parse_additive(&mut tokenize(\"10\"), &Context::new()).unwrap();\n\n let expected = Terminal::Value(10.0);\n\n assert!(expected.is_same(expr.as_ref()));\n", "file_path": "src/parser.rs", "rank": 46, "score": 6056.198271282566 }, { "content": "\n\n Some(self.op.eval(left, right))\n\n }\n\n}\n\n\n\nimpl AST for CallExpr {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn is_same(&self, other: &dyn AST) -> bool {\n\n other.as_any().downcast_ref::<Self>().is_some()\n\n }\n\n\n\n fn value(&self) -> Option<f32> {\n\n None\n\n }\n\n\n\n fn evaluate(&self, context: &mut Context, args: &[f32]) -> Option<f32> {\n\n let args: Option<Vec<_>> = self\n", "file_path": "src/parser.rs", "rank": 47, "score": 6056.0233791986875 }, { "content": " let args = args\n\n .into_iter()\n\n .enumerate()\n\n .map(|(idx, var)| (var, Symbol::Argument(idx)));\n\n\n\n let symbols = functions.chain(args).collect();\n\n\n\n Self { symbols }\n\n }\n\n\n\n pub fn update_var(&mut self, var: impl ToString, val: f32) {\n\n self.symbols\n\n .entry(var.to_string())\n\n .and_modify(|v|\n\n if let Symbol::Variable(ref mut v) = v {\n\n *v = val;\n\n }\n\n )\n\n .or_insert(Symbol::Variable(val));\n\n }\n", "file_path": "src/context.rs", "rank": 48, "score": 6055.952884236147 }, { "content": " && self.right.is_same(other.right.as_ref())\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn value(&self) -> Option<f32> {\n\n let (left, right) = (self.left.value(), self.right.value());\n\n if let (Some(left), Some(right)) = (left, right) {\n\n Some(self.op.eval(left, right))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn evaluate(&self, context: &mut Context, args: &[f32]) -> Option<f32> {\n\n let (left, right) = (\n\n self.left.evaluate(context, args)?,\n\n self.right.evaluate(context, args)?,\n\n );\n", "file_path": "src/parser.rs", "rank": 49, "score": 6055.8276098567785 }, { "content": " Terminal::Value(v) => Some(*v),\n\n Terminal::Assign(var, val) => {\n\n let val = val.evaluate(context, args)?;\n\n context.update_var(var, val);\n\n Some(val)\n\n }\n\n Terminal::Argument(arg) => args.get(*arg).cloned(),\n\n }\n\n }\n\n}\n\n\n\nimpl AST for OpExpr {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn is_same(&self, other: &dyn AST) -> bool {\n\n if let Some(other) = other.as_any().downcast_ref::<Self>() {\n\n self.op == other.op\n\n && self.left.is_same(other.left.as_ref())\n", "file_path": "src/parser.rs", "rank": 50, "score": 6054.4801947328615 }, { "content": " .args\n\n .iter()\n\n .map(|arg| arg.evaluate(context, args))\n\n .collect();\n\n let args = args?;\n\n\n\n self.func.evaluate(context, &args)\n\n }\n\n}\n\n\n\nimpl AST for Function {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn is_same(&self, other: &dyn AST) -> bool {\n\n if let Some(other) = other.as_any().downcast_ref::<Self>() {\n\n self.name == other.name\n\n && self.arity == other.arity\n\n && self.expr.is_same(other.expr.as_ref())\n", "file_path": "src/parser.rs", "rank": 51, "score": 6054.4801947328615 }, { "content": "#[derive(Debug)]\n\nenum Terminal {\n\n Value(f32), // Literal or substituted variable value\n\n Assign(String, Box<dyn AST>),\n\n Argument(usize), // Function argument of given index\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 52, "score": 5651.061630726335 }, { "content": "#[derive(Clone)]\n\nenum Symbol {\n\n Variable(f32),\n\n Function(usize, Rc<dyn AST>),\n\n Argument(usize),\n\n}\n\n\n\nimpl Symbol {\n\n fn is_var(&self) -> bool {\n\n match self {\n\n Symbol::Variable(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n fn is_func(&self) -> bool {\n\n match self {\n\n Symbol::Function(_, _) => true,\n\n _ => false,\n\n }\n\n }\n", "file_path": "src/context.rs", "rank": 53, "score": 5651.061630726335 }, { "content": "I did couple changes (clarifications?) in this language comparing to kata, just to make the language more consistent.\n\n\n\nUsage: just `cargo run` and put expressions. No scripts implemented, no session saving. It is just a toy excercise, because why not.\n\n\n\n## Assignment\n\nIt is clear, that assignment itself is an expression and can be chained like:\n\n```\n\na = b = 3\n\n```\n\n\n\nAlso if `b` wasnt assigned before, this is pretty obvious:\n\n```\n\na = 3 + b = 4\n\n```\n\n\n\nbecause `3 + b` is not valid expression. However, if `b` was assigned before, `3 + b` becomes valid on itself, and due to operator precedense it should be calculated before considering before assignment, but it creates semanticaly invalid code, because result of `3 + b` is not valid left side of assignment. According to comments on Kata, this should throw an error, but I don't like this, so I made not-symetrical precedense of `=` - it has the lower precedense that everything on right side, but higher than everything on left side.\n\n\n\n## Function call from another function\n\n\n\nThere is not clear if function may be called from another function. Logically it should be, but there is a problem: functions may be overwritten, with change of their signature (or arity), which may cause functions which were calling it before possibly invalid.\n\n\n\nOn the other hand if functions would be treaten in same way as variables, its clear, that functions may not call other function, because they cannot be passed in arguments (functions aren't values in this language).\n\n\n\nFrom comments on Kata its clear, that there are no tests calling function from another functions, so secons approach would make things easy, but I don't like this approach - it is inconvinient. I decided to just inline all functions called from other functions.\n\n\n\n# Syntax\n\n\n\n## Expressions:\n\n```\n\n2 + 3\n\n= 5\n\n4 - 3\n\n= 1\n\n2 * 0.75\n\n= 1\n\n3 / 2\n\n= 1\n\n3 % 2\n\n= 1\n\n```\n\n\n\n## Variables\n\n```\n\na = 4\n\n= 4\n\na\n\n= 4\n\na + 1\n\n= 5\n\na\n\n= 4\n\na = a + 1\n\n= 5\n\na\n\n= 5\n\n```\n\n\n\n## Functions\n\n```\n\nadd a b => a + b\n\n()\n\nadd 1 2\n\n= 3\n\nadd add 1 2 3\n\n= 6\n\nadd3 a b c => add add a b c\n\n= ()\n\nadd3 1 2 3\n\n= 6\n\n```\n", "file_path": "README.md", "rank": 54, "score": 1.6277745242505657 } ]
Rust
kq/tests/accessor_multiple/mod.rs
jihchi/kq
58bb05a44e0ceca6b8237bda63c5403a74a80a0c
use assert_cmd::Command; use indoc::indoc; const INPUT: &str = include_str!("./website.kdl"); #[test] fn top_descendant_any_element() { Command::cargo_bin("kq") .unwrap() .arg("top() []") .write_stdin(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}) .assert() .success() .stdout(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}); } #[test] fn top_child_any_element() { Command::cargo_bin("kq") .unwrap() .arg("top() > []") .write_stdin(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}) .assert() .success() .stdout(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}); } #[test] fn descendant_child() { Command::cargo_bin("kq") .unwrap() .arg("html > body section > h2") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" h2 "Design and Discussion" h2 "Design Principles" "#}); } #[test] fn general_sibling() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta ~ title") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" title "kdl - Kat's Document Language" "#}); } #[test] fn adjacent_sibling() { Command::cargo_bin("kq") .unwrap() .arg("html body h2 + ol") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" ol { li "Maintainability" li "Flexibility" li "Cognitive simplicity and Learnability" li "Ease of de\/serialization" li "Ease of implementation" } "#}); } #[test] fn general_adjacent_siblings() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta ~ title + link") .write_stdin(INPUT) .assert() .success() .stdout(predicates::str::starts_with("link")) .stdout(predicates::str::contains(r#"href="\/styles\/global.css""#)) .stdout(predicates::str::contains(r#"rel="stylesheet""#)); } #[test] fn adjacent_general_siblings() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta + meta ~ link") .write_stdin(INPUT) .assert() .success() .stdout(predicates::str::starts_with("link")) .stdout(predicates::str::contains(r#"href="\/styles\/global.css""#)) .stdout(predicates::str::contains(r#"rel="stylesheet""#)); } #[test] fn complex_single_level() { Command::cargo_bin("kq") .unwrap() .arg(r#"li[val() = "Flexibility"] + [val() = "Cognitive simplicity and Learnability"] ~ [val() = "Ease of implementation"]"#) .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" li "Ease of implementation" "#}); } #[test] fn complex_nested() { Command::cargo_bin("kq") .unwrap() .arg(r#"header + section[prop(id) = "description"] ~ section[class = "kdl-section"] ol > li"#) .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" li "Maintainability" li "Flexibility" li "Cognitive simplicity and Learnability" li "Ease of de\/serialization" li "Ease of implementation" "#}); }
use assert_cmd::Command; use indoc::indoc; const INPUT: &str = include_str!("./website.kdl"); #[test] fn top_descendant_any_element() { Command::cargo_bin("kq") .unwrap() .arg("top() []") .write_stdin(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}) .assert() .success() .stdout(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}); } #[test] fn top_child_any_element() { Command::cargo_bin("kq") .unwrap() .arg("top() > []") .write_stdin(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}) .assert() .success() .stdout(indoc! {r#" name "CI" jobs { fmt_and_docs "Check fmt & build docs" build_and_test "Build & Test" { strategy { matrix { os "ubuntu-latest" "macOS-latest" "windows-latest" } } } } "#}); } #[test] fn descendant_child() { Command::cargo_bin("kq") .unwrap() .arg("html > body section > h2") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" h2 "Design and Discussion" h2 "Design Principles" "#}); } #[test] fn general_sibling() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta ~ title") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" title "kdl - Kat's Document Language" "#}); } #[test] fn adjacent_sibling() { Command::cargo_bin("kq") .unwrap() .arg("html body h2 + ol") .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" ol { li "Maintainability" li "Flexibility" li "Cognitive simplicity and Learnability" li "Ease of de\/serialization" li "Ease of implementation" } "#}); } #[test] fn general_adjacent_siblings() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta ~ title + link") .write_stdin(INPUT) .assert() .success() .stdout(predicates::str::starts_with("link")) .stdout(predicates::str::contains(r#"href="\/styles\/global.css""#)) .stdout(predicates::str::contains(r#"rel="stylesheet""#)); } #[test]
#[test] fn complex_single_level() { Command::cargo_bin("kq") .unwrap() .arg(r#"li[val() = "Flexibility"] + [val() = "Cognitive simplicity and Learnability"] ~ [val() = "Ease of implementation"]"#) .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" li "Ease of implementation" "#}); } #[test] fn complex_nested() { Command::cargo_bin("kq") .unwrap() .arg(r#"header + section[prop(id) = "description"] ~ section[class = "kdl-section"] ol > li"#) .write_stdin(INPUT) .assert() .success() .stdout(indoc! {r#" li "Maintainability" li "Flexibility" li "Cognitive simplicity and Learnability" li "Ease of de\/serialization" li "Ease of implementation" "#}); }
fn adjacent_general_siblings() { Command::cargo_bin("kq") .unwrap() .arg("html > head meta + meta ~ link") .write_stdin(INPUT) .assert() .success() .stdout(predicates::str::starts_with("link")) .stdout(predicates::str::contains(r#"href="\/styles\/global.css""#)) .stdout(predicates::str::contains(r#"rel="stylesheet""#)); }
function_block-full_function
[ { "content": "pub fn query_document(input: &str, document: Vec<KdlNode>) -> Result<Vec<KdlNode>, String> {\n\n let input = input.trim();\n\n if input.is_empty() {\n\n Ok(document)\n\n } else {\n\n all_consuming(parser::selector)(input)\n\n .finish()\n\n .map(|(_input, selector)| query_by_selector(selector, document))\n\n .map_err(|error| error.to_string())\n\n }\n\n}\n\n\n", "file_path": "kq/src/lib.rs", "rank": 0, "score": 193314.20404780286 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L384-L390\n\n/// `boolean := 'true' | 'false'`\n\n///\n\n// fn boolean(input: &str) -> IResult<&str, KdlValue, KdlParseError<&str>> {\n\nfn boolean(input: &str) -> IResult<&str, KdlValue> {\n\n alt((\n\n value(KdlValue::Boolean(true), tag(\"true\")),\n\n value(KdlValue::Boolean(false), tag(\"false\")),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 1, "score": 179682.94951895133 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L280-L289\n\n/// `number := decimal | hex | octal | binary`\n\n///\n\n// fn number(input: &str) -> IResult<&str, KdlValue, KdlParseError<&str>> {\n\nfn number(input: &str) -> IResult<&str, KdlValue> {\n\n alt((\n\n map(hexadecimal, KdlValue::Int),\n\n map(octal, KdlValue::Int),\n\n map(binary, KdlValue::Int),\n\n map(float, KdlValue::Float),\n\n map(integer, KdlValue::Int),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 2, "score": 179682.66603846452 }, { "content": "fn filter_by_identifier(identifier: &str, document: &[KdlNode]) -> Vec<KdlNode> {\n\n document\n\n .iter()\n\n .filter(|node| node.name == *identifier)\n\n .cloned()\n\n .collect()\n\n}\n\n\n", "file_path": "kq/src/lib.rs", "rank": 3, "score": 159986.58429388647 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L450-L471\n\n///\n\n// fn unicode_space(input: &str) -> IResult<&str, &str, KdlParseError<&str>> {\n\nfn unicode_space(input: &str) -> IResult<&str, &str> {\n\n alt((\n\n tag(\" \"),\n\n tag(\"\\t\"),\n\n tag(\"\\u{00A0}\"),\n\n tag(\"\\u{1680}\"),\n\n tag(\"\\u{2000}\"),\n\n tag(\"\\u{2001}\"),\n\n tag(\"\\u{2002}\"),\n\n tag(\"\\u{2003}\"),\n\n tag(\"\\u{2004}\"),\n\n tag(\"\\u{2005}\"),\n\n tag(\"\\u{2006}\"),\n\n tag(\"\\u{2007}\"),\n\n tag(\"\\u{2008}\"),\n\n tag(\"\\u{2009}\"),\n\n tag(\"\\u{200A}\"),\n\n tag(\"\\u{202F}\"),\n\n tag(\"\\u{205F}\"),\n\n tag(\"\\u{3000}\"),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 4, "score": 158500.82229473582 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L118-L122\n\n/// `identifier_char := unicode - linespace - [\\/(){}<>;[]=,\"]\n\n///\n\n// fn identifier_char(input: &str) -> IResult<&str, &str, KdlParseError<&str>> {\n\nfn identifier_char(input: &str) -> IResult<&str, &str> {\n\n not(linespace)(input)?;\n\n recognize(none_of(r#\"\\/(){}<>;[]=,\"\"#))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 5, "score": 158500.20592983032 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L413-L422\n\n/// `commented-block := '*/' | (multi-line-comment | '*' | '/' | [^*/]+) commented-block`\n\n///\n\n// fn commented_block(input: &str) -> IResult<&str, &str, KdlParseError<&str>> {\n\nfn commented_block(input: &str) -> IResult<&str, &str> {\n\n alt((\n\n tag(\"*/\"),\n\n terminated(\n\n alt((multi_line_comment, take_until1(\"*/\"), tag(\"*\"), tag(\"/\"))),\n\n commented_block,\n\n ),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 6, "score": 158499.80493689456 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L124-L142\n\n/// `bare_identifier := ((identifier-char - digit - sign) identifier-char* | sign ((identifier-char - digit) identifier-char*)?) - keyword`\n\n///\n\n// fn bare_identifier(input: &str) -> IResult<&str, &str, KdlParseError<&str>>> {\n\nfn bare_identifier(input: &str) -> IResult<&str, &str> {\n\n // fn left(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\n fn left(input: &str) -> IResult<&str, ()> {\n\n not(keyword)(input)?;\n\n not(one_of(\"0123456789\"))(input)?;\n\n not(one_of(\"+-\"))(input)?;\n\n let (input, _) = identifier_char(input)?;\n\n let (input, _) = many0(identifier_char)(input)?;\n\n Ok((input, ()))\n\n }\n\n // fn right(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\n fn right(input: &str) -> IResult<&str, ()> {\n\n let (input, _) = one_of(\"+-\")(input)?;\n\n not(keyword)(input)?;\n\n not(one_of(\"0123456789\"))(input)?;\n\n let (input, _) = opt(many1(identifier_char))(input)?;\n\n Ok((input, ()))\n\n }\n\n recognize(alt((left, right)))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 7, "score": 158498.92737513 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L267-L278\n\n/// `raw-string := 'r' raw-string-hash`\n\n/// `raw-string-hash := '#' raw-string-hash '#' | raw-string-quotes`\n\n/// `raw-string-quotes := '\"' .* '\"'`\n\n///\n\n// fn raw_string(input: &str) -> IResult<&str, &str, KdlParseError<&str>> {\n\nfn raw_string(input: &str) -> IResult<&str, &str> {\n\n let (input, _) = char('r')(input)?;\n\n let (input, hashes) = recognize(many0(char('#')))(input)?;\n\n let (input, _) = char('\"')(input)?;\n\n let close = format!(\"\\\"{}\", hashes);\n\n let (input, string) = take_until(&close[..])(input)?;\n\n let (input, _) = tag(&close[..])(input)?;\n\n Ok((input, string))\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 8, "score": 158498.7423961896 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L407-L411\n\n/// `multi-line-comment := '/*' commented-block\n\n///\n\n// fn multi_line_comment(input: &str) -> IResult<&str, &str, KdlParseError<&str>> {\n\nfn multi_line_comment(input: &str) -> IResult<&str, &str> {\n\n let (input, _) = tag(\"/*\")(input)?;\n\n commented_block(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 9, "score": 156660.2910931721 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L118-L122\n\n/// `linespace := newline | ws | single-line-comment`\n\n///\n\n// fn linespace(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\nfn linespace(input: &str) -> IResult<&str, ()> {\n\n value((), alt((newline, whitespace, single_line_comment)))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 10, "score": 156428.02053846896 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L473-L487\n\n/// `newline := All line-break unicode white_space\n\n///\n\n// fn newline(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\nfn newline(input: &str) -> IResult<&str, ()> {\n\n value(\n\n (),\n\n alt((\n\n tag(\"\\r\\n\"),\n\n tag(\"\\r\"),\n\n tag(\"\\n\"),\n\n tag(\"\\u{0085}\"),\n\n tag(\"\\u{000C}\"),\n\n tag(\"\\u{2028}\"),\n\n tag(\"\\u{2029}\"),\n\n )),\n\n )(input)\n\n}\n\n\n\n/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L437-L448\n\n/// `ws := bom | unicode-space | multi-line-comment`\n\n///\n\n// fn whitespace(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\npub(crate) fn whitespace(input: &str) -> IResult<&str, ()> {\n", "file_path": "kq/src/kdlrs.rs", "rank": 11, "score": 156428.02053846896 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L400-L405\n\n/// `single-line-comment := '//' ('\\r' [^\\n] | [^\\r\\n])* (newline | eof)`\n\n///\n\n// fn single_line_comment(input: &str) -> IResult<&str, (), KdlParseError<&str>> {\n\nfn single_line_comment(input: &str) -> IResult<&str, ()> {\n\n let (input, _) = tag(\"//\")(input)?;\n\n let (input, _) = many_till(value((), anychar), alt((newline, value((), eof))))(input)?;\n\n Ok((input, ()))\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 12, "score": 152250.13723489695 }, { "content": "// fn unicode(input: &str) -> IResult<&str, char, KdlParseError<&str>> {\n\nfn unicode(input: &str) -> IResult<&str, char> {\n\n map_opt(\n\n map_res(\n\n take_while_m_n(1, 6, |c: char| c.is_ascii_hexdigit()),\n\n |hex| u32::from_str_radix(hex, 16),\n\n ),\n\n std::char::from_u32,\n\n )(input)\n\n}\n\n\n\n/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L180-L190\n\n/// `value := type-annotation? (string | raw_string | number | boolean | 'null'`)\n\n///\n\n// fn node_value(input: &str) -> IResult<&str, KdlValue, KdlParseError<&str>> {\n\npub(crate) fn node_value(input: &str) -> IResult<&str, KdlValue> {\n\n // let (input, _ty) = opt(type_annotation)(input)?;\n\n alt((\n\n map(string, KdlValue::String),\n\n map(raw_string, |s| KdlValue::String(s.into())),\n\n number,\n\n boolean,\n\n value(KdlValue::Null, tag(\"null\")),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 13, "score": 150607.74676305294 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L331-L343\n\n///\n\n// fn sign(input: &str) -> IResult<&str, i64, KdlParseError<&str>> {\n\nfn sign(input: &str) -> IResult<&str, i64> {\n\n let (input, sign) = opt(alt((char('+'), char('-'))))(input)?;\n\n let mult = if let Some(sign) = sign {\n\n if sign == '+' {\n\n 1\n\n } else {\n\n -1\n\n }\n\n } else {\n\n 1\n\n };\n\n Ok((input, mult))\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 14, "score": 150605.5887644871 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L372-L382\n\n///\n\n// fn binary(input: &str) -> IResult<&str, i64, KdlParseError<&str>> {\n\nfn binary(input: &str) -> IResult<&str, i64> {\n\n let (input, sign) = sign(input)?;\n\n map_res(\n\n preceded(\n\n alt((tag(\"0b\"), tag(\"0B\"))),\n\n recognize(many1(terminated(one_of(\"01\"), many0(char('_'))))),\n\n ),\n\n move |out: &str| i64::from_str_radix(&str::replace(out, \"_\", \"\"), 2).map(|x| x * sign),\n\n )(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 15, "score": 150605.58876448707 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L213-L223\n\n/// `string := '\"' character* '\"'`\n\n///\n\n// fn string(input: &str) -> IResult<&str, String, KdlParseError<&str>> {\n\nfn string(input: &str) -> IResult<&str, String> {\n\n delimited(\n\n char('\"'),\n\n fold_many0(character, String::new, |mut acc, ch| {\n\n acc.push(ch);\n\n acc\n\n }),\n\n char('\"'),\n\n )(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 16, "score": 150605.25416969898 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L144-L146\n\n/// `string := '\"' character* '\"'`\n\n///\n\n// fn keyword(input: &str) -> IResult<&str, String, KdlParseError<&str>> {\n\nfn keyword(input: &str) -> IResult<&str, String> {\n\n map(alt((tag(\"true\"), tag(\"false\"), tag(\"null\"))), String::from)(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 17, "score": 150605.25416969898 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L225-L228\n\n/// `character := '\\' escape | [^\\\"]`\n\n///\n\n// fn character(input: &str) -> IResult<&str, char, KdlParseError<&str>> {\n\nfn character(input: &str) -> IResult<&str, char> {\n\n alt((preceded(char('\\\\'), escape), none_of(\"\\\\\\\"\")))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 18, "score": 150605.25416969895 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L360-L370\n\n///\n\n/// `octal := sign? '0o' [0-7] [0-7_]*`\n\n// fn octal(input: &str) -> IResult<&str, i64, KdlParseError<&str>> {\n\nfn octal(input: &str) -> IResult<&str, i64> {\n\n let (input, sign) = sign(input)?;\n\n map_res(\n\n preceded(\n\n alt((tag(\"0o\"), tag(\"0O\"))),\n\n recognize(many1(terminated(one_of(\"01234567\"), many0(char('_'))))),\n\n ),\n\n move |out: &str| i64::from_str_radix(&str::replace(out, \"_\", \"\"), 8).map(|x| x * sign),\n\n )(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 19, "score": 150605.25416969898 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L249-L255\n\n/// `escape := [\"\\\\/bfnrt] | 'u{' hex-digit{1, 6} '}'`\n\n///\n\n// fn escape(input: &str) -> IResult<&str, char, KdlParseError<&str>> {\n\nfn escape(input: &str) -> IResult<&str, char> {\n\n alt((\n\n delimited(tag(\"u{\"), unicode, char('}')),\n\n map_opt(anychar, escape_chars),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 20, "score": 150604.94810953527 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L345-L358\n\n/// `hex := sign? '0x' [0-9a-fA-F] [0-9a-fA-F_]*`\n\n///\n\n// fn hexadecimal(input: &str) -> IResult<&str, i64, KdlParseError<&str>> {\n\nfn hexadecimal(input: &str) -> IResult<&str, i64> {\n\n let (input, sign) = sign(input)?;\n\n map_res(\n\n preceded(\n\n alt((tag(\"0x\"), tag(\"0X\"))),\n\n recognize(many1(terminated(\n\n one_of(\"0123456789abcdefABCDEF\"),\n\n many0(char('_')),\n\n ))),\n\n ),\n\n move |out: &str| i64::from_str_radix(&str::replace(out, \"_\", \"\"), 16).map(|x| x * sign),\n\n )(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 21, "score": 150604.94810953524 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L291-L311\n\n/// ```text\n\n/// decimal := integer ('.' [0-9]+)? exponent?\n\n/// exponent := ('e' | 'E') integer\n\n/// integer := sign? [1-9] [0-9_]*\n\n/// sign := '+' | '-'\n\n/// ```\n\n///\n\n// fn float(input: &str) -> IResult<&str, f64, KdlParseError<&str>> {\n\nfn float(input: &str) -> IResult<&str, f64> {\n\n map_res(\n\n alt((\n\n recognize(tuple((\n\n integer,\n\n opt(preceded(char('.'), integer)),\n\n one_of(\"eE\"),\n\n opt(one_of(\"+-\")),\n\n integer,\n\n ))),\n\n recognize(tuple((integer, char('.'), integer))),\n\n )),\n\n |x| str::replace(x, \"_\", \"\").parse::<f64>(),\n\n )(input)\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 22, "score": 150604.16806906593 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L313-L329\n\n/// ```text\n\n/// decimal := integer ('.' [0-9]+)? exponent?\n\n/// exponent := ('e' | 'E') integer\n\n/// integer := sign? [1-9] [0-9_]*\n\n/// sign := '+' | '-'\n\n/// ```\n\n///\n\n// fn integer(input: &str) -> IResult<&str, i64, KdlParseError<&str>> {\n\nfn integer(input: &str) -> IResult<&str, i64> {\n\n let (input, sign) = sign(input)?;\n\n map_res(\n\n recognize(many1(terminated(one_of(\"0123456789\"), many0(char('_'))))),\n\n move |out: &str| {\n\n str::replace(out, \"_\", \"\")\n\n .parse::<i64>()\n\n .map(move |x| x * sign)\n\n },\n\n )(input)\n\n}\n", "file_path": "kq/src/kdlrs.rs", "rank": 23, "score": 150604.16806906593 }, { "content": "/// `matcher := '[' entity (ws+ operator ws+ kdl-value)? ']'`\n\nfn matcher(input: &str) -> IResult<&str, Matcher> {\n\n let (input, _) = tag(\"[\")(input)?;\n\n let (input, left_hand_side) = entity(input)?;\n\n let (input, expression) = opt(tuple((\n\n delimited(many1(kdlrs::whitespace), operator, many1(kdlrs::whitespace)),\n\n kdlrs::node_value,\n\n )))(input)?;\n\n let (input, _) = tag(\"]\")(input)?;\n\n\n\n let output = match expression {\n\n Some((operator, right_hand_side)) => {\n\n Matcher::Expression(left_hand_side, operator, right_hand_side)\n\n }\n\n None => Matcher::Direct(left_hand_side),\n\n };\n\n\n\n Ok((input, output))\n\n}\n\n\n", "file_path": "kq/src/parser.rs", "rank": 24, "score": 150600.43198780195 }, { "content": "/// ```text\n\n/// entity :=\n\n/// 'name()' |\n\n/// 'tag()' |\n\n/// 'props()' |\n\n/// 'values()' |\n\n/// 'val(' digit* ')' |\n\n/// 'prop(' identifier ')' |\n\n/// identifier '()'?\n\n/// ```\n\nfn entity(input: &str) -> IResult<&str, Entity> {\n\n alt((\n\n value(Entity::NodeName, tag(\"name()\")),\n\n value(Entity::TypeTag, tag(\"tag()\")),\n\n value(Entity::Props, tag(\"props()\")),\n\n value(Entity::Values, tag(\"values()\")),\n\n map(delimited(tag(\"val(\"), digit0, tag(\")\")), |input: &str| {\n\n Entity::Val(input.parse::<usize>().unwrap_or(0))\n\n }),\n\n map(\n\n delimited(tag(\"prop(\"), kdlrs::identifier, tag(\")\")),\n\n Entity::PropName,\n\n ),\n\n map(kdlrs::identifier, Entity::PropName),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/parser.rs", "rank": 25, "score": 150599.97017497578 }, { "content": "/// `operator := '=' | '!=' | '>' | '>=' | '<' | '<=' | '^=' | '$=' | '*='`\n\nfn operator(input: &str) -> IResult<&str, Operator> {\n\n alt((\n\n value(Operator::Contains, tag(\"*=\")),\n\n value(Operator::EndsWith, tag(\"$=\")),\n\n value(Operator::GreaterThanOrEqualTo, tag(\">=\")),\n\n value(Operator::LessThanOrEqualTo, tag(\"<=\")),\n\n value(Operator::NotEqual, tag(\"!=\")),\n\n value(Operator::StartsWith, tag(\"^=\")),\n\n value(Operator::Equal, tag(\"=\")),\n\n value(Operator::GreaterThan, tag(\">\")),\n\n value(Operator::LessThan, tag(\"<\")),\n\n ))(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_selector() {\n", "file_path": "kq/src/parser.rs", "rank": 26, "score": 150597.86500200073 }, { "content": "/// ```text\n\n/// accessor :=\n\n/// 'top()' |\n\n/// '[]' |\n\n/// '(' identifier? ')' |\n\n/// identifier? matcher |\n\n/// identifier\n\n/// ```\n\nfn accessor(input: &str) -> IResult<&str, Accessor> {\n\n alt((\n\n value(Accessor::Top, tag(\"top()\")),\n\n value(Accessor::AnyElement, tag(\"[]\")),\n\n map(\n\n delimited(tag(\"(\"), opt(kdlrs::identifier), tag(\")\")),\n\n Accessor::AnyElementWithTypeTag,\n\n ),\n\n map(\n\n tuple((opt(kdlrs::identifier), matcher)),\n\n |(identifier, matcher)| Accessor::Closed(identifier, matcher),\n\n ),\n\n map(kdlrs::identifier, Accessor::Sole),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/parser.rs", "rank": 27, "score": 150597.86500200076 }, { "content": "fn combinator(input: &str) -> IResult<&str, ParsedCombinator> {\n\n alt((\n\n delimited(\n\n many1(kdlrs::whitespace),\n\n alt((\n\n value(ParsedCombinator::Child, tag(\">\")),\n\n value(ParsedCombinator::AdjacentSibling, tag(\"+\")),\n\n value(ParsedCombinator::GeneralSibling, tag(\"~\")),\n\n )),\n\n many1(kdlrs::whitespace),\n\n ),\n\n value(ParsedCombinator::Descendant, many1(kdlrs::whitespace)),\n\n ))(input)\n\n}\n\n\n", "file_path": "kq/src/parser.rs", "rank": 28, "score": 148556.83265278145 }, { "content": "fn traverse<F>(predicate: F, document: &[KdlNode]) -> Vec<KdlNode>\n\nwhere\n\n F: Fn(&KdlNode) -> bool,\n\n{\n\n let mut result: Vec<KdlNode> = vec![];\n\n let mut queue: VecDeque<&KdlNode> = document.iter().collect();\n\n\n\n while let Some(node) = queue.pop_front() {\n\n if predicate(node) {\n\n result.push(node.clone());\n\n }\n\n queue.extend(node.children.iter());\n\n }\n\n\n\n result\n\n}\n", "file_path": "kq/src/lib.rs", "rank": 29, "score": 110218.65633700238 }, { "content": "fn query_by_selector(selector: Vec<Combinator>, document: Vec<KdlNode>) -> Vec<KdlNode> {\n\n selector\n\n .iter()\n\n .fold(\n\n (&Accessor::Top, document),\n\n |(previous, document), combinator| match combinator {\n\n Combinator::Child(accessor, siblings) => {\n\n let is_previous_sibling_top = match previous {\n\n Accessor::AnyElement\n\n | Accessor::AnyElementWithTypeTag(_)\n\n | Accessor::Closed(_, _)\n\n | Accessor::Sole(_) => false,\n\n Accessor::Top => true,\n\n };\n\n let document = query_by_child_combinator(\n\n is_previous_sibling_top,\n\n accessor,\n\n siblings,\n\n document,\n\n );\n", "file_path": "kq/src/lib.rs", "rank": 30, "score": 105498.26060131687 }, { "content": "#[test]\n\nfn identifier_and_implicit_property_name() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"dependencies[platform]\")\n\n .write_stdin(INPUT)\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n dependencies platform=\"windows\" {\n\n winapi \"1.0.0\" path=\".\\/crates\\/my-winapi-fork\"\n\n }\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/example_test.rs", "rank": 31, "score": 93051.78959476443 }, { "content": "#[test]\n\nfn identifier_and_explicit_property_name() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"dependencies[prop(platform)]\")\n\n .write_stdin(INPUT)\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n dependencies platform=\"windows\" {\n\n winapi \"1.0.0\" path=\".\\/crates\\/my-winapi-fork\"\n\n }\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/example_test.rs", "rank": 32, "score": 93051.78959476443 }, { "content": "#[test]\n\nfn parentheses() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"[name()]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 34, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() < \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 35, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() > \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 36, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() = \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 37, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() *= \"rofil\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 38, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() ^= \"pro\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 39, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() != \"step\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 40, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() $= \"file\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 41, "score": 85999.04954683821 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() = \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 42, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn present() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"[prop(uses)]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 43, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) != \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 44, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses ^= \"actions\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 45, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) = \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 46, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() > \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 47, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn less_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() <= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 48, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses $= \"@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 49, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() $= \"file\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 50, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses > \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 51, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) $= \"@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 52, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn greater_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[name() >= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_node_name.rs", "rank": 53, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses != \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 54, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses *= \"s/\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 55, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) > \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 56, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) < \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 57, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() ^= \"pro\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 58, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn present() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"[uses]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 59, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) ^= \"actions\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 60, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses < \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 61, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) *= \"toolchain\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 62, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() *= \"rofil\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 63, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses = \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 64, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() != \"step\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n profile \"minimal\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 65, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() < \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 66, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn parentheses() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"step[name()]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 67, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn absent() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"[prop(does_not_exist)]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 68, "score": 83938.96457463272 }, { "content": "#[test]\n\nfn absent() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(\"[does_not_exist]\")\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 69, "score": 83938.9645746327 }, { "content": "#[test]\n\nfn absent_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[name() = \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 70, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses < \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 71, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn greater_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses >= \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 72, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses > \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 73, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses != \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 74, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"does_not_matter[name() > \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 75, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) $= \"@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 76, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[name() $= \"file\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 77, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn greater_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() >= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 78, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) < \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 79, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_less_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"does_not_matter[name() < \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 80, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) != \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 81, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses ^= \"actions\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 82, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn less_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"profile[name() <= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 83, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) *= \"toolchain\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 84, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn less_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) <= \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 85, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[name() *= \"rofil\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 86, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) = \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 87, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) ^= \"actions\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 88, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn greater_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[prop(uses) >= \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/closed_prop_name_explicit.rs", "rank": 89, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_starts_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[name() ^= \"pro\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 90, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn greater_than() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[prop(uses) > \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_explicit.rs", "rank": 91, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses = \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 92, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn contains() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses *= \"s/\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 93, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn less_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"[uses <= \"actions/checkout@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n", "file_path": "kq/tests/accessor_single/closed_prop_name_implicit.rs", "rank": 94, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn ends_with() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"step[uses $= \"@v1\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step run=\"cargo test --all --verbose\"\n\n step uses=\"actions-rs/toolchain@v1\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(indoc! {r#\"\n\n step uses=\"actions\\/checkout@v1\"\n\n step uses=\"actions-rs\\/toolchain@v1\"\n\n \"#});\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_prop_name_implicit.rs", "rank": 95, "score": 81982.96222946899 }, { "content": "#[test]\n\nfn absent_not_equal() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"nonsense[name() != \"step\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 96, "score": 81982.96222946899 }, { "content": "/// https://github.com/kdl-org/kdl-rs/blob/v3.0.0/src/parser.rs#L237-L247\n\n/// a map and its inverse of escape-sequence<->char\n\n///\n\n/// (instead of building a map by phf, use a function with pattern matching)\n\nfn escape_chars(input: char) -> Option<char> {\n\n match input {\n\n '\"' => Some('\"'),\n\n '\\\\' => Some('\\\\'),\n\n '/' => Some('/'),\n\n 'b' => Some('\\u{08}'),\n\n 'f' => Some('\\u{0C}'),\n\n 'n' => Some('\\n'),\n\n 'r' => Some('\\r'),\n\n 't' => Some('\\t'),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "kq/src/kdlrs.rs", "rank": 97, "score": 81197.90159667653 }, { "content": "#[test]\n\nfn absent_greater_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"does_not_matter[name() >= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n\n\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 98, "score": 80123.34894208101 }, { "content": "#[test]\n\nfn absent_less_than_or_equal_to() {\n\n Command::cargo_bin(\"kq\")\n\n .unwrap()\n\n .arg(r#\"does_not_matter[name() <= \"profile\"]\"#)\n\n .write_stdin(indoc! {r#\"\n\n step uses=\"actions/checkout@v1\"\n\n step \"Install Rust\" uses=\"actions-rs/toolchain@v1\" {\n\n profile \"minimal\"\n\n }\n\n step \"Clippy\" run=\"cargo clippy --all -- -D warnings\"\n\n step \"Run tests\" run=\"cargo test --all --verbose\"\n\n \"#})\n\n .assert()\n\n .success()\n\n .stdout(predicates::str::is_empty());\n\n}\n", "file_path": "kq/tests/accessor_single/identifier_closed_node_name.rs", "rank": 99, "score": 80123.34894208101 } ]
Rust
src/texture/pixel_format.rs
jsmith628/gl-struct
57b29458d477194bb87f170b1cfe01fb3a0f725f
use super::*; glenum! { pub enum InternalFormatFloat { RED, RG, RGB, RGBA, COMPRESSED_RED, COMPRESSED_RG, COMPRESSED_RGB, COMPRESSED_RGBA, COMPRESSED_SRGB, COMPRESSED_SRGB_ALPHA, R8,R8_SNORM, R16,R16_SNORM, RG8,RG8_SNORM, RG16,RG16_SNORM, R3_G3_B2, RGB4, RGB5, RGB565, RGB8,RGB8_SNORM, RGB10, RGB12, RGB16,RGB16_SNORM, RGBA2, RGBA4, RGB5_A1, RGBA8,RGBA8_SNORM, RGB10_A2, RGBA12,RGBA16,RGBA16_SNORM, SRGB8,SRGB8_ALPHA8, R16F, RG16F, RGB16F, RGBA16F, R32F, RG32F, RGB32F, RGBA32F, R11F_G11F_B10F, RGB9_E5, COMPRESSED_RED_RGTC1, COMPRESSED_SIGNED_RED_RGTC1, COMPRESSED_RG_RGTC2, COMPRESSED_SIGNED_RG_RGTC2, COMPRESSED_RGBA_BPTC_UNORM, COMPRESSED_SRGB_ALPHA_BPTC_UNORM, COMPRESSED_RGB_BPTC_SIGNED_FLOAT, COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT, COMPRESSED_RGB8_ETC2, COMPRESSED_SRGB8_ETC2, COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2, COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2, COMPRESSED_RGBA8_ETC2_EAC, COMPRESSED_SRGB8_ALPHA8_ETC2_EAC, COMPRESSED_R11_EAC, COMPRESSED_SIGNED_R11_EAC, COMPRESSED_RG11_EAC, COMPRESSED_SIGNED_RG11_EAC } pub enum InternalFormatInt { R8I, R16I, R32I, RG8I, RG16I, RG32I, RGB8I, RGB16I, RGB32I, RGBA8I, RGBA16I, RGBA32I } pub enum InternalFormatUInt { R8UI, R16UI, R32UI, RG8UI, RG16UI, RG32UI, RGB8UI, RGB16UI, RGB32UI, RGBA8UI, RGBA16UI, RGBA32UI, RGB10_A2UI } pub enum InternalFormatDepth { DEPTH_COMPONENT, DEPTH_COMPONENT16, DEPTH_COMPONENT24, DEPTH_COMPONENT32, DEPTH_COMPONENT32F } pub enum InternalFormatStencil { STENCIL_INDEX, STENCIL_INDEX1, STENCIL_INDEX4, STENCIL_INDEX8, STENCIL_INDEX16 } pub enum InternalFormatDepthStencil { DEPTH_STENCIL, DEPTH24_STENCIL8, DEPTH32F_STENCIL8 } } pub unsafe trait InternalFormat: GLEnum { type TypeFormat: PixelFormatType; } unsafe impl InternalFormat for InternalFormatFloat { type TypeFormat = FloatFormatType; } unsafe impl InternalFormat for InternalFormatUInt { type TypeFormat = IntFormatType; } unsafe impl InternalFormat for InternalFormatInt { type TypeFormat = IntFormatType; } unsafe impl InternalFormat for InternalFormatDepth { type TypeFormat = DepthFormatType; } unsafe impl InternalFormat for InternalFormatStencil { type TypeFormat = StencilFormatType; } unsafe impl InternalFormat for InternalFormatDepthStencil { type TypeFormat = DepthStencilFormatType; } glenum! { pub enum FormatDepth { DEPTH_COMPONENT } pub enum FormatStencil { STENCIL_INDEX } pub enum FormatDepthStencil { DEPTH_COMPONENT, STENCIL_INDEX, DEPTH_STENCIL } pub enum FormatFloat { RED, GREEN, BLUE, RG, RGB, BGR, RGBA, BGRA } pub enum FormatInt { RED_INTEGER, GREEN_INTEGER, BLUE_INTEGER, RG_INTEGER, RGB_INTEGER, BGR_INTEGER, RGBA_INTEGER, BGRA_INTEGER } } impl From<FormatInt> for FormatFloat { #[inline] fn from(fmt: FormatInt) -> Self { match fmt { FormatInt::RED_INTEGER => Self::RED, FormatInt::GREEN_INTEGER => Self::GREEN, FormatInt::BLUE_INTEGER => Self::BLUE, FormatInt::RG_INTEGER => Self::RG, FormatInt::RGB_INTEGER => Self::RGB, FormatInt::BGR_INTEGER => Self::BGR, FormatInt::RGBA_INTEGER => Self::RGBA, FormatInt::BGRA_INTEGER => Self::BGRA } } } impl From<FormatFloat> for FormatInt { #[inline] fn from(fmt: FormatFloat) -> Self { match fmt { FormatFloat::RED => Self::RED_INTEGER, FormatFloat::GREEN => Self::GREEN_INTEGER, FormatFloat::BLUE => Self::BLUE_INTEGER, FormatFloat::RG => Self::RG_INTEGER, FormatFloat::RGB => Self::RGB_INTEGER, FormatFloat::BGR => Self::BGR_INTEGER, FormatFloat::RGBA => Self::RGBA_INTEGER, FormatFloat::BGRA => Self::BGRA_INTEGER } } } impl From<FormatDepth> for FormatDepthStencil { #[inline] fn from(_fmt: FormatDepth) -> Self {Self::DEPTH_COMPONENT} } impl From<FormatStencil> for FormatDepthStencil { #[inline] fn from(_fmt: FormatStencil) -> Self {Self::STENCIL_INDEX} } pub unsafe trait PixelFormat: GLEnum { fn components(self) -> usize; } unsafe impl PixelFormat for FormatDepth { #[inline] fn components(self) -> usize {1} } unsafe impl PixelFormat for FormatStencil { #[inline] fn components(self) -> usize {1} } unsafe impl PixelFormat for FormatDepthStencil { #[inline] fn components(self) -> usize { if self == FormatDepthStencil::DEPTH_STENCIL {2} else {1} } } unsafe impl PixelFormat for FormatFloat { #[inline] fn components(self) -> usize { match self { Self::RED | Self::GREEN | Self::BLUE => 1, Self::RG => 2, Self::RGB | Self::BGR => 3, Self::RGBA | Self::BGRA => 4, } } } unsafe impl PixelFormat for FormatInt { #[inline] fn components(self) -> usize { match self { Self::RED_INTEGER | Self::GREEN_INTEGER | Self::BLUE_INTEGER => 1, Self::RG_INTEGER => 2, Self::RGB_INTEGER | Self::BGR_INTEGER => 3, Self::RGBA_INTEGER | Self::BGRA_INTEGER => 4, } } } glenum! { pub enum PixelType { UNSIGNED_BYTE, BYTE, UNSIGNED_SHORT, SHORT, UNSIGNED_INT, INT, HALF_FLOAT, FLOAT, UNSIGNED_BYTE_3_3_2, UNSIGNED_BYTE_2_3_3_REV, UNSIGNED_SHORT_5_6_5, UNSIGNED_SHORT_5_6_5_REV, UNSIGNED_SHORT_4_4_4_4, UNSIGNED_SHORT_4_4_4_4_REV, UNSIGNED_SHORT_5_5_5_1, UNSIGNED_SHORT_1_5_5_5_REV, UNSIGNED_INT_8_8_8_8, UNSIGNED_INT_8_8_8_8_REV, UNSIGNED_INT_10_10_10_2, UNSIGNED_INT_2_10_10_10_REV, UNSIGNED_INT_10F_11F_11F_REV, UNSIGNED_INT_5_9_9_9_REV, UNSIGNED_INT_24_8, FLOAT_32_UNSIGNED_INT_24_8_REV } pub enum SpecialFloatType { UNSIGNED_INT_10F_11F_11F_REV, UNSIGNED_INT_5_9_9_9_REV } pub enum SpecialDepthStencilType { FLOAT_32_UNSIGNED_INT_24_8_REV } } impl From<FloatType> for PixelType { #[inline] fn from(f:FloatType) -> Self {(f as GLenum).try_into().unwrap()} } impl From<IntType> for PixelType { #[inline] fn from(f:IntType) -> Self {(f as GLenum).try_into().unwrap()} } pub trait PixelFormatType: Copy+Clone+PartialEq+Eq+Hash+Debug { type Format: PixelFormat; fn size(self) -> usize; unsafe fn format_type(self) -> (Self::Format, PixelType); } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum IntFormatType { Integer(FormatInt, IntType), UShort4_4_4_4, UShort4_4_4_4Rev, UShort5_5_5_1, UShort1_5_5_5Rev, UInt8_8_8_8, UInt8_8_8_8Rev, UInt10_10_10_2, UInt10_10_10_2Rev } display_from_debug!(IntFormatType); impl PixelFormatType for IntFormatType { type Format = FormatInt; #[inline] fn size(self) -> usize { match self { Self::Integer(format, ty) => format.components() * ty.size(), Self::UShort4_4_4_4 | Self::UShort4_4_4_4Rev | Self::UShort5_5_5_1 | Self::UShort1_5_5_5Rev => 2, Self::UInt8_8_8_8 | Self::UInt8_8_8_8Rev | Self::UInt10_10_10_2 | Self::UInt10_10_10_2Rev => 4 } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Integer(format, ty) => (format, ty.into()), _ => ( FormatInt::RGBA_INTEGER, match self { Self::UShort4_4_4_4 => PixelType::UNSIGNED_SHORT_4_4_4_4, Self::UShort4_4_4_4Rev => PixelType::UNSIGNED_SHORT_4_4_4_4_REV, Self::UShort5_5_5_1 => PixelType::UNSIGNED_SHORT_5_5_5_1, Self::UShort1_5_5_5Rev => PixelType::UNSIGNED_SHORT_1_5_5_5_REV, Self::UInt8_8_8_8 => PixelType::UNSIGNED_INT_8_8_8_8, Self::UInt8_8_8_8Rev => PixelType::UNSIGNED_INT_8_8_8_8_REV, Self::UInt10_10_10_2 => PixelType::UNSIGNED_INT_10_10_10_2, Self::UInt10_10_10_2Rev => PixelType::UNSIGNED_INT_2_10_10_10_REV, _ => panic!("Unknown type: {}", self) } ) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum FloatFormatType { Float(FormatFloat, FloatType), Fixed(IntFormatType), UByte3_3_2, UByte2_3_3Rev, UShort5_6_5, UShort5_6_5Rev } display_from_debug!(FloatFormatType); impl PixelFormatType for FloatFormatType { type Format = FormatFloat; #[inline] fn size(self) -> usize { match self { Self::Float(format, ty) => format.components() * ty.size_of(), Self::Fixed(int) => int.size(), Self::UByte3_3_2 | Self::UByte2_3_3Rev => 1, Self::UShort5_6_5 | Self::UShort5_6_5Rev => 2 } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Float(format, ty) => (format, ty.into()), Self::Fixed(int) => {let ft = int.format_type(); (ft.0.into(), ft.1)}, Self::UByte3_3_2 => (FormatFloat::RGB, PixelType::UNSIGNED_BYTE_3_3_2), Self::UByte2_3_3Rev => (FormatFloat::RGB, PixelType::UNSIGNED_BYTE_2_3_3_REV), Self::UShort5_6_5 => (FormatFloat::RGB, PixelType::UNSIGNED_SHORT_5_6_5), Self::UShort5_6_5Rev => (FormatFloat::RGB, PixelType::UNSIGNED_SHORT_5_6_5_REV) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum DepthFormatType { Fixed(IntType), Float(FloatType) } display_from_debug!(DepthFormatType); impl PixelFormatType for DepthFormatType { type Format = FormatDepth; #[inline] fn size(self) -> usize { match self { Self::Fixed(ty) => ty.size_of(), Self::Float(ty) => ty.size_of() } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Fixed(ty) => (FormatDepth::DEPTH_COMPONENT, ty.into()), Self::Float(ty) => (FormatDepth::DEPTH_COMPONENT, ty.into()) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub struct StencilFormatType(pub IntType); display_from_debug!(StencilFormatType); impl PixelFormatType for StencilFormatType { type Format = FormatStencil; #[inline] fn size(self) -> usize { self.0.size_of() } #[inline] unsafe fn format_type(self) -> (FormatStencil, PixelType) { (FormatStencil::STENCIL_INDEX, self.0.into()) } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum DepthStencilFormatType { DepthComponent(DepthFormatType), StencilIndex(StencilFormatType), UInt24_8 } impl PixelFormatType for DepthStencilFormatType { type Format = FormatDepthStencil; #[inline] fn size(self) -> usize { match self { Self::DepthComponent(ty) => ty.size(), Self::StencilIndex(ty) => ty.size(), Self::UInt24_8 => 4 } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::DepthComponent(ty) => (FormatDepthStencil::DEPTH_COMPONENT, ty.format_type().1), Self::StencilIndex(ty) => (FormatDepthStencil::STENCIL_INDEX, ty.format_type().1), Self::UInt24_8 => (FormatDepthStencil::DEPTH_STENCIL, PixelType::UNSIGNED_INT_24_8), } } } display_from_debug!(DepthStencilFormatType);
use super::*; glenum! { pub enum InternalFormatFloat { RED, RG, RGB, RGBA, COMPRESSED_RED, COMPRESSED_RG, COMPRESSED_RGB, COMPRESSED_RGBA, COMPRESSED_SRGB, COMPRESSED_SRGB_ALPHA, R8,R8_SNORM, R16,R16_SNORM, RG8,RG8_SNORM, RG16,RG16_SNORM, R3_G3_B2, RGB4, RGB5, RGB565, RGB8,RGB8_SNORM, RGB10, RGB12, RGB16,RGB16_SNORM, RGBA2, RGBA4, RGB5_A1, RGBA8,RGBA8_SNORM, RGB10_A2, RGBA12,RGBA16,RGBA16_SNORM, SRGB8,SRGB8_ALPHA8, R16F, RG16F, RGB16F, RGBA16F, R32F, RG32F, RGB32F, RGBA32F, R11F_G11F_B10F, RGB9_E5, COMPRESSED_RED_RGTC1, COMPRESSED_SIGNED_RED_RGTC1, COMPRESSED_RG_RGTC2, COMPRESSED_SIGNED_RG_RGTC2, COMPRESSED_RGBA_BPTC_UNORM, COMPRESSED_SRGB_ALPHA_BPTC_UNORM, COMPRESSED_RGB_BPTC_SIGNED_FLOAT, COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT, COMPRESSED_RGB8_ETC2, COMPRESSED_SRGB8_ETC2, COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2, COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2, COMPRESSED_RGBA8_ETC2_EAC, COMPRESSED_SRGB8_ALPHA8_ETC2_EAC, COMPRESSED_R11_EAC, COMPRESSED_SIGNED_R11_EAC, COMPRESSED_RG11_EAC, COMPRESSED_SIGNED_RG11_EAC } pub enum InternalFormatInt { R8I, R16I, R32I, RG8I, RG16I, RG32I, RGB8I, RGB16I, RGB32I, RGBA8I, RGBA16I, RGBA32I } pub enum InternalFormatUInt { R8UI, R16UI, R32UI, RG8UI, RG16UI, RG32UI, RGB8UI, RGB16UI, RGB32UI, RGBA8UI, RGBA16UI, RGBA32UI, RGB10_A2UI } pub enum InternalFormatDepth { DEPTH_COMPONENT, DEPTH_COMPONENT16, DEPTH_COMPONENT24, DEPTH_COMPONENT32, DEPTH_COMPONENT32F } pub enum InternalFormatStencil { STENCIL_INDEX, STENCIL_INDEX1, STENCIL_INDEX4, STENCIL_INDEX8, STENCIL_INDEX16 } pub enum InternalFormatDepthStencil { DEPTH_STENCIL, DEPTH24_STENCIL8, DEPTH32F_STENCIL8 } } pub unsafe trait InternalFormat: GLEnum { type TypeFormat: PixelFormatType; } unsafe impl InternalFormat for InternalFormatFloat { type TypeFormat = FloatFormatType; } unsafe impl InternalFormat for InternalFormatUInt { type TypeFormat = IntFormatType; } unsafe impl InternalFormat for InternalFormatInt { type TypeFormat = IntFormatType; } unsafe impl InternalFormat for InternalFormatDepth { type TypeFormat = DepthFormatType; } unsafe impl InternalFormat for InternalFormatStencil { type TypeFormat = StencilFormatType; } unsafe impl InternalFormat for InternalFormatDepthStencil { type TypeFormat = DepthStencilFormatType; } glenum! { pub enum FormatDepth { DEPTH_COMPONENT } pub enum FormatStencil { STENCIL_INDEX } pub enum FormatDepthStencil { DEPTH_COMPONENT, STENCIL_INDEX, DEPTH_STENCIL } pub enum FormatFloat { RED, GREEN, BLUE, RG, RGB, BGR, RGBA, BGRA } pub enum FormatInt { RED_INTEGER, GREEN_INTEGER, BLUE_INTEGER, RG_INTEGER, RGB_INTEGER, BGR_INTEGER, RGBA_INTEGER, BGRA_INTEGER } } impl From<FormatInt> for FormatFloat { #[inline] fn from(fmt: FormatInt) -> Self { match fmt { FormatInt::RED_INTEGER => Self::RED, FormatInt::GREEN_INTEGER => Self::GREEN, FormatInt::BLUE_INTEGER => Self::BLUE, FormatInt::RG_INTEGER => Self::RG, FormatInt::RGB_INTEGER => Self::RGB, FormatInt::BGR_INTEGER => Self::BGR, FormatInt::RGBA_INTEGER => Self::RGBA, FormatInt::BGRA_INTEGER => Self::BGRA } } } impl From<FormatFloat> for FormatInt { #[inline] fn from(fmt: FormatFloat) -> Self { match fmt { FormatFloat::RED => Self::RED_INTEGER, FormatFloat::GREEN => Self::GREEN_INTEGER, FormatFloat::BLUE => Self::BLUE_INTEGER, FormatFloat::RG => Self::RG_INTEGER, FormatFloat::RGB => Self::RGB_INTEGER, FormatFloat::BGR => Self::BGR_INTEGER, FormatFloat::RGBA => Self::RGBA_INTEGER, FormatFloat::BGRA => Self::BGRA_INTEGER } } } impl From<FormatDepth> for FormatDepthStencil { #[inline] fn from(_fmt: FormatDepth) -> Self {Self::DEPTH_COMPONENT} } impl From<FormatStencil> for FormatDepthStencil { #[inline] fn from(_fmt: FormatStencil) -> Self {Self::STENCIL_INDEX} } pub unsafe trait PixelFormat: GLEnum { fn components(self) -> usize; } unsafe impl PixelFormat for FormatDepth { #[inline] fn components(self) -> usize {1} } unsafe impl PixelFormat for FormatStencil { #[inline] fn components(self) -> usize {1} } unsafe impl PixelFormat for FormatDepthStencil { #[inline] fn components(self) -> usize { if self == FormatDepthStencil::DEPTH_STENCIL {2} else {1} } } unsafe impl PixelFormat for FormatFloat { #[inline] fn components(self) -> usize { match self { Self::RED | Self::GREEN | Self::BLUE => 1, Self::RG => 2, Self::RGB | Self::BGR => 3, Self::RGBA | Self::BGRA => 4, } } } unsafe impl PixelFormat for FormatInt { #[inline] fn components(self) -> usize { match self { Self::RED_INTEGER | Self::GREEN_INTEGER | Self::BLUE_INTEGER => 1, Self::RG_INTEGER => 2, Self::RGB_INTEGER | Self::BGR_INTEGER => 3, Self::RGBA_INTEGER | Self::BGRA_INTEGER => 4, } } } glenum! { pub enum PixelType { UNSIGNED_BYTE, BYTE, UNSIGNED_SHORT, SHORT, UNSIGNED_INT, INT, HALF_FLOAT, FLOAT, UNSIGNED_BYTE_3_3_2, UNSIGNED_BYTE_2_3_3_REV, UNSIGNED_SHORT_5_6_5, UNSIGNED_SHORT_5_6_5_REV, UNSIGNED_SHORT_4_4_4_4, UNSIGNED_SHORT_4_4_4_4_REV, UNSIGNED_SHORT_5_5_5_1, UNSIGNED_SHORT_1_5_5_5_REV, UNSIGNED_INT_8_8_8_8, UNSIGNED_INT_8_8_8_8_REV, UNSIGNED_INT_10_10_10_2, UNSIGNED_INT_2_10_10_10_REV, UNSIGNED_INT_10F_11F_11F_REV, UNSIGNED_INT_5_9_9_9_REV, UNSIGNED_INT_24_8, FLOAT_32_UNSIGNED_INT_24_8_REV } pub enum SpecialFloatType { UNSIGNED_INT_10F_11F_11F_REV, UNSIGNED_INT_5_9_9_9_REV } pub enum SpecialDepthStencilType { FLOAT_32_UNSIGNED_INT_24_8_REV } } impl From<FloatType> for PixelType { #[inline] fn from(f:FloatType) -> Self {(f as GLenum).try_into().unwrap()} } impl From<IntType> for PixelType { #[inline] fn from(f:IntType) -> Self {(f as GLenum).try_into().unwrap()} } pub trait PixelFormatType: Copy+Clone+PartialEq+Eq+Hash+Debug { type Format: PixelFormat; fn size(self) -> usize; unsafe fn format_type(self) -> (Self::Format, PixelType); } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum IntFormatType { Integer(FormatInt, IntType), UShort4_4_4_4, UShort4_4_4_4Rev, UShort5_5_5_1, UShort1_5_5_5Rev, UInt8_8_8_8, UInt8_8_8_8Rev, UInt10_10_10_2, UInt10_10_10_2Rev } display_from_debug!(IntFormatType); impl PixelFormatType for IntFormatType { type Format = FormatInt; #[inline] fn size(self) -> usize {
} #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Integer(format, ty) => (format, ty.into()), _ => ( FormatInt::RGBA_INTEGER, match self { Self::UShort4_4_4_4 => PixelType::UNSIGNED_SHORT_4_4_4_4, Self::UShort4_4_4_4Rev => PixelType::UNSIGNED_SHORT_4_4_4_4_REV, Self::UShort5_5_5_1 => PixelType::UNSIGNED_SHORT_5_5_5_1, Self::UShort1_5_5_5Rev => PixelType::UNSIGNED_SHORT_1_5_5_5_REV, Self::UInt8_8_8_8 => PixelType::UNSIGNED_INT_8_8_8_8, Self::UInt8_8_8_8Rev => PixelType::UNSIGNED_INT_8_8_8_8_REV, Self::UInt10_10_10_2 => PixelType::UNSIGNED_INT_10_10_10_2, Self::UInt10_10_10_2Rev => PixelType::UNSIGNED_INT_2_10_10_10_REV, _ => panic!("Unknown type: {}", self) } ) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum FloatFormatType { Float(FormatFloat, FloatType), Fixed(IntFormatType), UByte3_3_2, UByte2_3_3Rev, UShort5_6_5, UShort5_6_5Rev } display_from_debug!(FloatFormatType); impl PixelFormatType for FloatFormatType { type Format = FormatFloat; #[inline] fn size(self) -> usize { match self { Self::Float(format, ty) => format.components() * ty.size_of(), Self::Fixed(int) => int.size(), Self::UByte3_3_2 | Self::UByte2_3_3Rev => 1, Self::UShort5_6_5 | Self::UShort5_6_5Rev => 2 } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Float(format, ty) => (format, ty.into()), Self::Fixed(int) => {let ft = int.format_type(); (ft.0.into(), ft.1)}, Self::UByte3_3_2 => (FormatFloat::RGB, PixelType::UNSIGNED_BYTE_3_3_2), Self::UByte2_3_3Rev => (FormatFloat::RGB, PixelType::UNSIGNED_BYTE_2_3_3_REV), Self::UShort5_6_5 => (FormatFloat::RGB, PixelType::UNSIGNED_SHORT_5_6_5), Self::UShort5_6_5Rev => (FormatFloat::RGB, PixelType::UNSIGNED_SHORT_5_6_5_REV) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum DepthFormatType { Fixed(IntType), Float(FloatType) } display_from_debug!(DepthFormatType); impl PixelFormatType for DepthFormatType { type Format = FormatDepth; #[inline] fn size(self) -> usize { match self { Self::Fixed(ty) => ty.size_of(), Self::Float(ty) => ty.size_of() } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::Fixed(ty) => (FormatDepth::DEPTH_COMPONENT, ty.into()), Self::Float(ty) => (FormatDepth::DEPTH_COMPONENT, ty.into()) } } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub struct StencilFormatType(pub IntType); display_from_debug!(StencilFormatType); impl PixelFormatType for StencilFormatType { type Format = FormatStencil; #[inline] fn size(self) -> usize { self.0.size_of() } #[inline] unsafe fn format_type(self) -> (FormatStencil, PixelType) { (FormatStencil::STENCIL_INDEX, self.0.into()) } } #[derive(Copy,Clone,PartialEq,Eq,Hash,Debug)] pub enum DepthStencilFormatType { DepthComponent(DepthFormatType), StencilIndex(StencilFormatType), UInt24_8 } impl PixelFormatType for DepthStencilFormatType { type Format = FormatDepthStencil; #[inline] fn size(self) -> usize { match self { Self::DepthComponent(ty) => ty.size(), Self::StencilIndex(ty) => ty.size(), Self::UInt24_8 => 4 } } #[inline] unsafe fn format_type(self) -> (Self::Format, PixelType) { match self { Self::DepthComponent(ty) => (FormatDepthStencil::DEPTH_COMPONENT, ty.format_type().1), Self::StencilIndex(ty) => (FormatDepthStencil::STENCIL_INDEX, ty.format_type().1), Self::UInt24_8 => (FormatDepthStencil::DEPTH_STENCIL, PixelType::UNSIGNED_INT_24_8), } } } display_from_debug!(DepthStencilFormatType);
match self { Self::Integer(format, ty) => format.components() * ty.size(), Self::UShort4_4_4_4 | Self::UShort4_4_4_4Rev | Self::UShort5_5_5_1 | Self::UShort1_5_5_5Rev => 2, Self::UInt8_8_8_8 | Self::UInt8_8_8_8Rev | Self::UInt10_10_10_2 | Self::UInt10_10_10_2Rev => 4 }
if_condition
[ { "content": "pub trait AttributeValue<T:GLSLType>: GPUCopy { fn format(&self) -> T::AttributeFormat; }\n\nimpl<A:AttributeData<T>, T:GLSLType> AttributeValue<T> for A {\n\n #[inline] fn format(&self) -> T::AttributeFormat {A::format()}\n\n}\n\n\n", "file_path": "src/glsl/mod.rs", "rank": 0, "score": 133558.68184289365 }, { "content": " trait SpecificDrop { unsafe fn specific_drop(&mut self); }\n\n impl<T:?Sized, A:BufferAccess> SpecificDrop for Buffer<T, A> {\n\n #[inline] default unsafe fn specific_drop(&mut self) { drop_in_place(&mut *self._map::<ReadWrite>(0)) }\n\n }\n\n impl<T:GPUCopy+?Sized, A:BufferAccess> SpecificDrop for Buffer<T, A> { #[inline] unsafe fn specific_drop(&mut self) {} }\n\n\n\n unsafe {\n\n self.specific_drop();\n\n gl::DeleteBuffers(1, &self.id);\n\n }\n\n }\n\n }\n\n}\n\n\n\n//\n\n//We only want to allow copying of types that are themselves Copy\n\n//or of arrays of Copy objects\n\n//\n\n\n\nimpl<T:GPUCopy + ?Sized, A:BufferAccess> Buffer<T,A> {\n", "file_path": "src/buffer.rs", "rank": 1, "score": 104646.32545430538 }, { "content": "/// A trait for type-level bools\n\npub trait Boolean {\n\n type Not: Boolean<Not=Self>;\n\n const VALUE: bool;\n\n}\n\n\n\n/// A type representing a `true` value\n\npub struct True;\n\n\n\n/// A type representing a `false` value\n\npub struct False;\n\n\n\nimpl Boolean for True {\n\n type Not = False;\n\n const VALUE: bool = true;\n\n}\n\n\n\nimpl Boolean for False {\n\n type Not = True;\n\n const VALUE: bool = false;\n\n}\n", "file_path": "src/lib.rs", "rank": 3, "score": 75409.3198779983 }, { "content": "pub trait Surface: {\n\n fn is_active(&self) -> bool;\n\n fn make_current(&mut self) -> &mut Context;\n\n}\n\n\n\npub struct GLProvider { _private: () }\n\npub struct GL2 { _private: () }\n\npub struct GL3 { _private: () }\n\npub struct GL4 { _private: () }\n\n\n\nimpl GLProvider {\n\n\n\n // #[inline] pub(crate) fn unchecked() -> &'static Self { &GLProvider { _private: () } }\n\n\n\n pub fn get_current() -> Result<GLProvider, ()> {\n\n //if glFinish isn't loaded, we can pretty safely assume nothing has\n\n if gl::Finish::is_loaded() {\n\n Ok(GLProvider{ _private: () })\n\n } else {\n\n Err(())\n", "file_path": "src/lib.rs", "rank": 4, "score": 75401.6030744973 }, { "content": "pub trait BufferAccess {\n\n\n\n type Read: Boolean;\n\n type Write: Boolean;\n\n\n\n type FlipReadBit: BufferAccess<Read=<Self::Read as Boolean>::Not, Write=Self::Write>;\n\n type FlipWriteBit: BufferAccess<Read=Self::Read, Write=<Self::Write as Boolean>::Not>;\n\n type NoReadBit: BufferAccess<Read=False, Write=Self::Write>;\n\n type NoWriteBit: BufferAccess<Read=Self::Read, Write=False>;\n\n\n\n #[inline]\n\n fn storage_flags(_hint: BufferUsage) -> GLbitfield {\n\n let f1 = if <Self::Read as Boolean>::VALUE { gl::MAP_READ_BIT } else {0};\n\n let f2 = if <Self::Write as Boolean>::VALUE { gl::MAP_WRITE_BIT } else {0};\n\n f1 | f2\n\n }\n\n\n\n #[inline] fn mapping_flags(hint: BufferUsage) -> GLbitfield { Self::storage_flags(hint) }\n\n #[inline] fn buffer_usage(hint: BufferUsage) -> BufferUsage { hint }\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 5, "score": 72161.14293940582 }, { "content": "pub trait AttributeData<T:GLSLType>: Sized + Copy {\n\n fn format() -> T::AttributeFormat;\n\n}\n\n\n", "file_path": "src/glsl/mod.rs", "rank": 6, "score": 71175.66736922228 }, { "content": "fn align(offset: usize, alignment: usize) -> usize {\n\n let error = offset % alignment;\n\n if error != 0 {\n\n offset - error + alignment\n\n } else {\n\n offset\n\n }\n\n}\n\n\n\nmacro_rules! impl_tuple_splitting {\n\n\n\n ({$($T:ident:$t:ident)*} $Last:ident:$l:ident) => {\n\n impl_tuple_splitting!({$($T)*} $Last split_tuple BSlice &);\n\n impl_tuple_splitting!({$($T)*} $Last split_tuple_mut BSliceMut &mut );\n\n };\n\n\n\n ({$($T:ident)*} $Last:ident $fun:ident $slice:ident $($r:tt)* ) => {\n\n\n\n impl<$($T:Sized,)* $Last:?Sized, BA:BufferAccess> Buffer<($($T,)* $Last), BA> {\n\n\n", "file_path": "src/buffer.rs", "rank": 7, "score": 66554.31828113475 }, { "content": "pub trait GLEnum: Sized + Copy + Eq + Hash + Debug + Display + Into<GLenum> + TryFrom<GLenum, Error=GLError> {}\n\n\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub enum GLError {\n\n ShaderCompilation(GLenum, ShaderType, String),\n\n ProgramLinking(GLenum, String),\n\n ProgramValidation(GLenum, String),\n\n InvalidEnum(GLenum, String),\n\n InvalidOperation(String),\n\n InvalidBits(GLbitfield, String),\n\n BufferCopySizeError(usize, usize),\n\n FunctionNotLoaded(&'static str)\n\n}\n\n\n\ndisplay_from_debug!(GLError);\n\nimpl Debug for GLError {\n\n\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n match self {\n\n GLError::ShaderCompilation(id, ty, log) => write!(f, \"{} #{} compilation error: {}\", ty, id, log),\n", "file_path": "src/lib.rs", "rank": 8, "score": 65243.60665011655 }, { "content": "pub trait WriteAccess: BufferAccess {}\n\n\n\nimpl<A:BufferAccess<Read=True>> ReadAccess for A {}\n\nimpl<A:BufferAccess<Write=True>> WriteAccess for A {}\n\n\n\npub struct CopyOnly;\n\npub struct Read;\n\npub struct Write;\n\npub struct ReadWrite;\n\n\n\n\n\nimpl BufferAccess for CopyOnly {\n\n type Read = False; type Write = False;\n\n type FlipReadBit = Read; type FlipWriteBit = Write;\n\n type NoReadBit = Self; type NoWriteBit = Self;\n\n}\n\nimpl BufferAccess for Read {\n\n type Read = True; type Write = False;\n\n type FlipReadBit = CopyOnly; type FlipWriteBit = ReadWrite;\n\n type NoReadBit = CopyOnly; type NoWriteBit = Self;\n", "file_path": "src/buffer.rs", "rank": 9, "score": 64705.47569744699 }, { "content": "pub trait ReadAccess: BufferAccess {}\n", "file_path": "src/buffer.rs", "rank": 10, "score": 64705.47569744699 }, { "content": "pub trait GLSLSubroutine: Copy + Eq {\n\n fn function_name(&self) -> &'static ::std::ffi::CStr;\n\n}\n", "file_path": "src/glsl/mod.rs", "rank": 11, "score": 60707.01772675511 }, { "content": "pub trait GLSLData<T:GLSLType>: From<T> + Into<T> + AttributeData<T> {}\n\nimpl<T:GLSLType, G> GLSLData<T> for G where G: From<T> + Into<T> + AttributeData<T> {}\n\n\n\npub unsafe trait AttribFormat: Sized + Clone + Copy + PartialEq + Eq + Hash + Debug {\n\n fn size(self) -> usize;\n\n fn attrib_count(self) -> usize {1}\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize);\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid);\n\n}\n\n\n", "file_path": "src/glsl/mod.rs", "rank": 12, "score": 60643.53001878985 }, { "content": "pub trait InterfaceBlock<L:BlockLayout, T:Layout<L>+?Sized> {\n\n fn buffer_target() -> IndexedBufferTarget;\n\n fn binding(&self) -> GLuint;\n\n\n\n #[inline]\n\n unsafe fn bind_buffer_range<A:BufferAccess>(&self, buffer: &Buffer<T, A>) {\n\n Self::buffer_target().bind_range(buffer, self.binding());\n\n }\n\n\n\n #[inline] unsafe fn unbind(&self) {Self::buffer_target().unbind(self.binding())}\n\n}\n\n\n\npub struct UniformBlock<L:BlockLayout, T:Layout<L>+Sized> {\n\n id: GLuint,\n\n pid: GLuint,\n\n binding: GLuint,\n\n p: PhantomData<(Box<T>, L)>\n\n}\n\n\n\nimpl<L:BlockLayout, T:Layout<L>+Sized> UniformBlock<L, T> {\n", "file_path": "src/program.rs", "rank": 13, "score": 50853.04292539061 }, { "content": "fn main() {\n\n\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n\n\n let width = 640;\n\n let height = 480;\n\n\n\n let mut window = glfw.create_window(width, height, \"Basic gravity demo\", glfw::WindowMode::Windowed).unwrap().0;\n\n\n\n glfw::Context::make_current(&mut window);\n\n window.set_key_polling(true);\n\n glfw.set_swap_interval(glfw::SwapInterval::None);\n\n\n\n let gl_provider = unsafe {\n\n GLProvider::load(|s| ::std::mem::transmute(glfw.get_proc_address_raw(s)))\n\n };\n\n let mut context = Context::init(&gl_provider);\n\n let mut shader = ParticleShader::init(&gl_provider).unwrap();\n\n let mut computer = ParticleUpdator::init(&gl_provider).unwrap();\n\n\n", "file_path": "examples/gravity.rs", "rank": 14, "score": 43463.42339080789 }, { "content": "fn main() {\n\n\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n\n\n let width = 640;\n\n let height = 480;\n\n\n\n let mut window = glfw.create_window(width, height, \"Compute test\", glfw::WindowMode::Windowed).unwrap().0;\n\n\n\n glfw::Context::make_current(&mut window);\n\n window.set_key_polling(true);\n\n glfw.set_swap_interval(glfw::SwapInterval::Adaptive);\n\n\n\n let gl_provider = unsafe {\n\n GLProvider::load(|s| ::std::mem::transmute(glfw.get_proc_address_raw(s)))\n\n };\n\n let mut context = Context::init(&gl_provider);\n\n let shader = ParticleShader::init(&gl_provider).unwrap();\n\n let computer = ParticleUpdator::init(&gl_provider).unwrap();\n\n\n", "file_path": "examples/compute.rs", "rank": 15, "score": 43463.42339080789 }, { "content": "fn main() {\n\n\n\n //get the runtime params\n\n let args: Vec<String> = env::args().collect();\n\n\n\n let mut gpu = false;\n\n let mut cpu = false;\n\n let mut par_cpu = false;\n\n let mut order = 0;\n\n\n\n //parse the params\n\n let mut i = 0;\n\n while i < args.len() {\n\n if args[i] == \"--gpu\" { gpu = true; }\n\n else if args[i] == \"--cpu\" { cpu = true; }\n\n else if args[i] == \"--par_cpu\" { par_cpu = true; }\n\n else if args[i] == \"--order\" || args[i] == \"--o\" {\n\n i+=1;\n\n if i < args.len() {\n\n order = args[i].parse::<u32>().unwrap();\n", "file_path": "examples/sorting.rs", "rank": 16, "score": 43463.42339080789 }, { "content": "fn main() {\n\n\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n\n\n let mut window = glfw.create_window(640, 480, \"Simple Render\", glfw::WindowMode::Windowed).unwrap().0;\n\n\n\n glfw::Context::make_current(&mut window);\n\n window.set_key_polling(true);\n\n glfw.set_swap_interval(glfw::SwapInterval::Adaptive);\n\n\n\n let gl_provider = unsafe {\n\n GLProvider::load(|s| ::std::mem::transmute(glfw.get_proc_address_raw(s)))\n\n };\n\n let mut context = Context::init(&gl_provider);\n\n let mut shader = Shaderinator::init(&gl_provider).unwrap();\n\n\n\n\n\n let points = [[-0.5f32,-0.5,0.0],[0.0,0.866,0.0],[0.5,-0.5,0.0]];\n\n // let points = [[-0.5f32,-0.5,0.0],[0.5,-0.5,0.0],[-0.5,0.5,0.0],[-0.5,0.5,0.0],[0.5,-0.5,0.0],[0.5,0.5,0.0]];\n\n let triangle: Buffer<[[f32;3]],_> = Buffer::immut_from(&gl_provider, Box::new(points));\n", "file_path": "examples/simple-render.rs", "rank": 17, "score": 41606.68282385213 }, { "content": "fn main() {\n\n let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n let mut window = glfw.create_window(640, 480, \"QR-Decomposition\", glfw::WindowMode::Windowed).unwrap().0;\n\n glfw::Context::make_current(&mut window);\n\n\n\n let gl_provider = unsafe {\n\n GLProvider::load(|s| ::std::mem::transmute(glfw.get_proc_address_raw(s)))\n\n };\n\n\n\n let decomposer = QRDecomp::init(&gl_provider).unwrap();\n\n\n\n fn rf() -> f32 {rand::random::<f32>()}\n\n\n\n let count = 10000;\n\n let mats = (0..count).map(\n\n |_| {\n\n let rand = [[rf(),rf(),rf(),rf()],[rf(),rf(),rf(),rf()],[rf(),rf(),rf(),rf()],[rf(),rf(),rf(),rf()]];\n\n //now make a positive semidefinite matrix\n\n let mut m = [[0.0;4];4];\n\n for i in 0..4 {\n", "file_path": "examples/qr-decomp.rs", "rank": 18, "score": 41606.68282385213 }, { "content": " #[allow(non_camel_case_types)]\n\n pub struct $name {\n\n pub value: $prim\n\n }\n\n\n\n impl From<$prim> for $name { #[inline] fn from(v: $prim) -> Self { $name{value: v} } }\n\n impl From<$name> for $prim { #[inline] fn from(v: $name) -> Self { v.value } }\n\n\n\n impl<G:GLSLType> AttributeData<G> for $name where $prim: AttributeData<G> {\n\n #[inline] fn format() -> G::AttributeFormat { <$prim as AttributeData<G>>::format() }\n\n }\n\n ]\n\n [ #[allow(non_camel_case_types)] pub type $name = $prim; ]\n\n @if @quote\n\n }\n\n\n\n unsafe impl GLSLType for $name {\n\n type AttributeFormat = $fmt;\n\n\n\n unsafe fn load_uniforms(id: GLint, data: &[Self]){\n", "file_path": "src/glsl/glsl_type.rs", "rank": 19, "score": 25709.445065563243 }, { "content": "use super::*;\n\n\n\nuse std::mem::transmute;\n\nuse std::ops::*;\n\n\n\n#[repr(align(4))]\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)]\n\npub struct c_bool(GLuint);\n\n\n\nmacro_rules! impl_c_bool {\n\n ($($Trait:ident.$fun:ident $op:tt),*) => {$(\n\n impl $Trait<Self> for c_bool { type Output = Self; #[inline] fn $fun(self, r:Self) -> Self {c_bool(self.0 $op r.0)} }\n\n impl $Trait<bool> for c_bool { type Output = Self; #[inline] fn $fun(self, r:bool) -> Self {c_bool(self.0 $op r as u32)} }\n\n impl $Trait<c_bool> for bool { type Output = c_bool; #[inline] fn $fun(self, r:c_bool) -> c_bool {c_bool(self as u32 $op r.0)} }\n\n )*}\n\n}\n\n\n\nmacro_rules! impl_c_bool_assign {\n\n ($($Trait:ident.$fun:ident $op:tt),*) => {$(\n", "file_path": "src/glsl/glsl_type.rs", "rank": 20, "score": 25708.005702729013 }, { "content": "}\n\n\n\nimpl_attr_data!(@Int bool IntFormat::UByte);\n\nimpl_attr_data!(@Int gl_bool IntFormat::UInt);\n\nimpl_attr_data!(@Int i8 IntFormat::Byte);\n\nimpl_attr_data!(@Int u8 IntFormat::UByte);\n\nimpl_attr_data!(@Int i16 IntFormat::Short);\n\nimpl_attr_data!(@Int u16 IntFormat::UShort);\n\nimpl_attr_data!(@Int i32 IntFormat::Int);\n\nimpl_attr_data!(@Int u32 IntFormat::UInt);\n\n\n\nimpl AttributeData<float> for f32 { fn format() -> FloatFormat { FloatFormat::Float(FloatType::Float) }}\n\nimpl AttributeData<float> for f64 { fn format() -> FloatFormat { FloatFormat::Double }}\n\n\n\nimpl AttributeData<double> for f64 { fn format() -> DoubleFormat {DoubleFormat}}\n\n\n\nimpl_attr_data!(@IVec gl_bool bvec2 bvec3 bvec4);\n\nimpl_attr_data!(@IVec uint uvec2 uvec3 uvec4);\n\nimpl_attr_data!(@IVec int ivec2 ivec3 ivec4);\n\nimpl_attr_data!(@Vec float vec2 vec3 vec4);\n\nimpl_attr_data!(@Mat vec2 mat2 mat3x2 mat4x2);\n\nimpl_attr_data!(@Mat vec3 mat2x3 mat3 mat4x3);\n\nimpl_attr_data!(@Mat vec4 mat2x4 mat3x4 mat4);\n\n\n\nimpl_attr_data!(@DVec double dvec2 dvec3 dvec4);\n\nimpl_attr_data!(@Mat dvec2 dmat2 dmat3x2 dmat4x2);\n\nimpl_attr_data!(@Mat dvec3 dmat2x3 dmat3 dmat4x3);\n\nimpl_attr_data!(@Mat dvec4 dmat2x4 dmat3x4 dmat4);\n", "file_path": "src/glsl/glsl_type.rs", "rank": 21, "score": 25707.05205558973 }, { "content": "\n\n (@Int $prim:ident $value:expr) => {\n\n impl AttributeData<gl_bool> for $prim { fn format() -> IntFormat { $value }}\n\n impl AttributeData<int> for $prim { fn format() -> IntFormat { $value }}\n\n impl AttributeData<uint> for $prim { fn format() -> IntFormat { $value }}\n\n impl AttributeData<float> for $prim { fn format() -> FloatFormat { FloatFormat::FromInt($value, false) }}\n\n };\n\n\n\n (@IVec $F:ident $size:tt) => { IVecFormat::IVecN($F::format(), $size) };\n\n (@Vec $F:ident $size:tt) => { VecFormat::VecN($F::format(), $size) };\n\n (@DVec $F:ident $size:tt) => { DVecFormat::DVecN($size) };\n\n (@Mat $F:ident $size:tt) => { [$F::format(); $size] };\n\n\n\n (@$arr:ident $vec1:ident $vec2:ident $vec3:ident $vec4:ident) => {\n\n impl<F:AttributeData<$vec1>> AttributeData<$vec1> for [F; 1] { fn format() -> <$vec1 as GLSLType>::AttributeFormat { F::format()}}\n\n impl<F:AttributeData<$vec1>> AttributeData<$vec2> for [F; 2] { fn format() -> <$vec2 as GLSLType>::AttributeFormat { impl_attr_data!(@$arr F 2) } }\n\n impl<F:AttributeData<$vec1>> AttributeData<$vec3> for [F; 3] { fn format() -> <$vec3 as GLSLType>::AttributeFormat { impl_attr_data!(@$arr F 3) } }\n\n impl<F:AttributeData<$vec1>> AttributeData<$vec4> for [F; 4] { fn format() -> <$vec4 as GLSLType>::AttributeFormat { impl_attr_data!(@$arr F 4) } }\n\n };\n\n\n", "file_path": "src/glsl/glsl_type.rs", "rank": 22, "score": 25706.053649103975 }, { "content": " impl $Trait<Self> for c_bool { #[inline] fn $fun(&mut self, r:Self) {self.0 $op r.0;} }\n\n impl $Trait<bool> for c_bool { #[inline] fn $fun(&mut self, r:bool) {self.0 $op r as u32;} }\n\n impl $Trait<c_bool> for bool { #[inline] fn $fun(&mut self, r:c_bool) {*self $op r.0>0;} }\n\n )*}\n\n}\n\n\n\nimpl_c_bool!(BitAnd.bitand &, BitOr.bitor |, BitXor.bitxor &);\n\nimpl_c_bool_assign!(BitAndAssign.bitand_assign &=, BitOrAssign.bitor_assign |=, BitXorAssign.bitxor_assign &=);\n\n\n\nimpl From<bool> for c_bool { #[inline] fn from(b: bool) -> Self {c_bool(b as GLuint)} }\n\nimpl From<c_bool> for bool { #[inline] fn from(b: c_bool) -> Self {b.0>0} }\n\nimpl From<GLuint> for c_bool { #[inline] fn from(b: GLuint) -> Self {c_bool(b)} }\n\nimpl From<c_bool> for GLuint { #[inline] fn from(b: c_bool) -> Self {b.0} }\n\nimpl From<GLboolean> for c_bool { #[inline] fn from(b: GLboolean) -> Self {c_bool(b as GLuint)} }\n\nimpl From<c_bool> for GLboolean { #[inline] fn from(b: c_bool) -> Self {b.0 as GLboolean} }\n\n\n\n#[allow(non_camel_case_types)]\n\npub type void = ();\n\n\n\n//booleans\n", "file_path": "src/glsl/glsl_type.rs", "rank": 23, "score": 25703.706273094795 }, { "content": "glsl_type!({IntFormat} gl_bool = c_bool);\n\nglsl_type!({IVecFormat} bvec2 = [c_bool; 2]);\n\nglsl_type!({IVecFormat} bvec3 = [c_bool; 3]);\n\nglsl_type!({IVecFormat} bvec4 = [c_bool; 4]);\n\n\n\n//integers\n\nglsl_type!({IntFormat} int = GLint);\n\nglsl_type!({IVecFormat} ivec2 = [GLint; 2]);\n\nglsl_type!({IVecFormat} ivec3 = [GLint; 3]);\n\nglsl_type!({IVecFormat} ivec4 = [GLint; 4]);\n\n\n\n//unsigned integers\n\nglsl_type!({IntFormat} uint = GLuint);\n\nglsl_type!({IVecFormat} uvec2 = [GLuint; 2]);\n\nglsl_type!({IVecFormat} uvec3 = [GLuint; 3]);\n\nglsl_type!({IVecFormat} uvec4 = [GLuint; 4]);\n\n\n\n//floats\n\nglsl_type!({FloatFormat} float = GLfloat);\n\nglsl_type!({VecFormat} vec2 = [GLfloat; 2]);\n", "file_path": "src/glsl/glsl_type.rs", "rank": 24, "score": 25701.5553997797 }, { "content": "\n\n//for uniforms defined with an unnamed struct as a type\n\nmacro_rules! impl_tuple_type {\n\n ($var:ident @first $T0:ident $($T:ident)*) => {$T0::first_element_name($var)};\n\n ($($T:ident:$t:ident)*) => {\n\n\n\n unsafe impl<$($T:GLSLType),*> GLSLType for ($($T),*) {\n\n\n\n //tuples aren't allowed to be attributes\n\n type AttributeFormat = UnsupportedFormat;\n\n\n\n unsafe fn load_uniforms(id: GLint, data: &[Self]){\n\n let mut i = id;\n\n for ($($t),*) in data {\n\n $(\n\n $T::load_uniform(i, $t);\n\n *(&mut i) = i + $T::uniform_locations() as GLint;\n\n )*\n\n }\n\n }\n", "file_path": "src/glsl/glsl_type.rs", "rank": 25, "score": 25700.26872379432 }, { "content": "glsl_type!({[DVecFormat; 3]} dmat3x2 = [[GLdouble; 2]; 3]);\n\nglsl_type!({[DVecFormat; 3]} dmat3 = [[GLdouble; 3]; 3]);\n\nglsl_type!({[DVecFormat; 3]} dmat3x4 = [[GLdouble; 4]; 3]);\n\nglsl_type!({[DVecFormat; 4]} dmat4x2 = [[GLdouble; 2]; 4]);\n\nglsl_type!({[DVecFormat; 4]} dmat4x3 = [[GLdouble; 3]; 4]);\n\nglsl_type!({[DVecFormat; 4]} dmat4 = [[GLdouble; 4]; 4]);\n\n\n\n\n\nmacro_rules! impl_array_type {\n\n\n\n ($attrib_support:tt $($num:tt)*) => {\n\n $(\n\n unsafe impl<T:GLSLType> GLSLType for [T; $num] {\n\n gl_builder!{\n\n [$attrib_support]\n\n [type AttributeFormat = [T::AttributeFormat; $num];]\n\n [type AttributeFormat = UnsupportedFormat;]\n\n @if @quote\n\n }\n\n\n", "file_path": "src/glsl/glsl_type.rs", "rank": 26, "score": 25699.014661108806 }, { "content": "\n\n unsafe fn get_uniform(p: GLuint, id:GLint) -> Self {\n\n let ($(mut $t),*) = ($(MaybeUninit::<$T>::uninit()),*);\n\n let mut i = id;\n\n $(\n\n $t.write($T::get_uniform(p, i));\n\n *(&mut i) = i + $T::uniform_locations() as GLint;\n\n )*\n\n ($($t.assume_init()),*)\n\n }\n\n\n\n #[inline] fn uniform_locations() -> GLuint { 0 $(+ $T::uniform_locations())* }\n\n #[inline] fn first_element_name(var: String) -> String {impl_tuple_type!(var @first $($T)*)}\n\n\n\n }\n\n\n\n }\n\n}\n\n\n\nmacro_rules! impl_tuple_layout {\n", "file_path": "src/glsl/glsl_type.rs", "rank": 27, "score": 25698.368922710724 }, { "content": "\n\n gl_builder!{\n\n [$std140] [unsafe impl Layout<std140> for $name {}] [] @if @quote\n\n }\n\n\n\n gl_builder!{\n\n [$align_vec4] [unsafe impl AlignedVec4 for $name {}] [] @if @quote\n\n }\n\n\n\n\n\n };\n\n\n\n}\n\n\n\n//we want to throw everything in a module since these are kindof really common data type names\n\n//and we're kinda not 100% following rust naming convention, so we don't want literally everything\n\n//to get pulled into the crate module by default\n\n\n\npub use gl::types::*;\n\nuse gl::*;\n", "file_path": "src/glsl/glsl_type.rs", "rank": 28, "score": 25697.785507822806 }, { "content": "\n\n ({$($T:ident:$t0:ident)*} $Last:ident:$last:ident) => {\n\n\n\n //TODO fix to where a tuple is vec4 aligned if at least one of its members is\n\n unsafe impl<$($T:Sized+AlignedVec4, )* $Last:?Sized+AlignedVec4> AlignedVec4 for ($($T,)* $Last) {}\n\n unsafe impl<$($T:Sized+Layout<std140>, )* $Last:?Sized+Layout<std140>> Layout<std140> for ($($T,)* $Last) {}\n\n unsafe impl<$($T:Sized+Layout<std430>, )* $Last:?Sized+Layout<std430>> Layout<std430> for ($($T,)* $Last) {}\n\n\n\n };\n\n}\n\n\n\nimpl_tuple!(impl_tuple_type);\n\nimpl_tuple!(impl_tuple_layout @with_last);\n\n\n\n//\n\n//For specifying which types can be used as data for vertex attributes of the various glsl types\n\n//and what formatting to use\n\n//\n\n\n\nmacro_rules! impl_attr_data {\n", "file_path": "src/glsl/glsl_type.rs", "rank": 29, "score": 25697.432936169207 }, { "content": "\n\n (@index $name:ident = $prim:ident) => {};\n\n (@index $name:ident = [$T:ty; $c:tt]) => {\n\n impl Index<usize> for $name {\n\n type Output = $T;\n\n #[inline] fn index(&self, i: usize) -> &$T { &self.value[i] }\n\n }\n\n\n\n impl IndexMut<usize> for $name {\n\n #[inline] fn index_mut(&mut self, i: usize) -> &mut $T { &mut self.value[i] }\n\n }\n\n };\n\n\n\n ({$a:expr} $align_vec4:tt $std140:tt $std430:tt $scalar:tt $mat:tt {$fmt:ty} {$prim:ty} {$set:expr} {$get:expr} $name:ident) => {\n\n\n\n gl_builder! {\n\n [$scalar] @not [\n\n #[repr(C)]\n\n #[repr(align($a))]\n\n #[derive(Clone, Copy, PartialEq, Debug, Default)]\n", "file_path": "src/glsl/glsl_type.rs", "rank": 30, "score": 25696.98621663153 }, { "content": " unsafe impl<T:Layout<std430>> Layout<std430> for [T; $num] {}\n\n\n\n )*\n\n }\n\n\n\n\n\n}\n\n\n\nimpl_array_type!{ true\n\n 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32\n\n}\n\n\n\n//As much as it looks like it, this *actually* isn't overkill.\n\n//It is *very* possible that you may need mat4 uniform arrays for something\n\n//such as storing bone transforms for skeletal animation for example.\n\n//Of course, at some point, the user *could* (and should) just use\n\n//arrays of arrays to get more indices (though that needs GL 4)\n\n#[cfg(any(feature = \"large_uniform_arrays\", feature = \"extra_large_uniform_arrays\"))]\n\nimpl_array_type!{ false\n\n 033 034 035 036 037 038 039 040 041 042 043 044 045 046 047 048 049 050 051 052 053 054 055 056 057 058 059 060 061 062 063 064\n", "file_path": "src/glsl/glsl_type.rs", "rank": 31, "score": 25696.47869026671 }, { "content": " unsafe fn load_uniforms(id: GLint, data: &[Self]){\n\n let flattened = from_raw_parts(&data[0][0] as *const T, data.len() * $num);\n\n T::load_uniforms(id, flattened);\n\n }\n\n\n\n unsafe fn get_uniform(p: GLuint, id:GLint) -> Self {\n\n let mut data = MaybeUninit::uninit_array();\n\n for i in 0..$num {\n\n data[i] = MaybeUninit::new(T::get_uniform(p, id + i as GLint));\n\n }\n\n MaybeUninit::array_assume_init(data)\n\n }\n\n\n\n #[inline] fn uniform_locations() -> GLuint { T::uniform_locations() * $num }\n\n #[inline] fn first_element_name(var: String) -> String { T::first_element_name(var + \"[0]\") }\n\n\n\n }\n\n\n\n unsafe impl<T:AlignedVec4> AlignedVec4 for [T; $num] {}\n\n unsafe impl<T:AlignedVec4+Layout<std140>> Layout<std140> for [T; $num] {}\n", "file_path": "src/glsl/glsl_type.rs", "rank": 32, "score": 25696.473427739416 }, { "content": "glsl_type!({VecFormat} vec3 = [GLfloat; 3]);\n\nglsl_type!({VecFormat} vec4 = [GLfloat; 4]);\n\nglsl_type!({[VecFormat; 2]} mat2 = [[GLfloat; 2]; 2]);\n\nglsl_type!({[VecFormat; 2]} mat2x3 = [[GLfloat; 3]; 2]);\n\nglsl_type!({[VecFormat; 2]} mat2x4 = [[GLfloat; 4]; 2]);\n\nglsl_type!({[VecFormat; 3]} mat3x2 = [[GLfloat; 2]; 3]);\n\nglsl_type!({[VecFormat; 3]} mat3 = [[GLfloat; 3]; 3]);\n\nglsl_type!({[VecFormat; 3]} mat3x4 = [[GLfloat; 4]; 3]);\n\nglsl_type!({[VecFormat; 4]} mat4x2 = [[GLfloat; 2]; 4]);\n\nglsl_type!({[VecFormat; 4]} mat4x3 = [[GLfloat; 3]; 4]);\n\nglsl_type!({[VecFormat; 4]} mat4 = [[GLfloat; 4]; 4]);\n\n\n\n//doubles\n\nglsl_type!({DoubleFormat} double = GLdouble);\n\nglsl_type!({DVecFormat} dvec2 = [GLdouble; 2]);\n\nglsl_type!({DVecFormat} dvec3 = [GLdouble; 3]);\n\nglsl_type!({DVecFormat} dvec4 = [GLdouble; 4]);\n\nglsl_type!({[DVecFormat; 2]} dmat2 = [[GLdouble; 2]; 2]);\n\nglsl_type!({[DVecFormat; 2]} dmat2x3 = [[GLdouble; 3]; 2]);\n\nglsl_type!({[DVecFormat; 2]} dmat2x4 = [[GLdouble; 4]; 2]);\n", "file_path": "src/glsl/glsl_type.rs", "rank": 33, "score": 25695.47610096533 }, { "content": " 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608\n\n 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640\n\n 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672\n\n 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704\n\n 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736\n\n 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768\n\n 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800\n\n 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832\n\n 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864\n\n 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896\n\n 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928\n\n 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960\n\n 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992\n\n 0993 0994 0995 0996 0997 0998 0999 1000 1001 1002 1003 1004 1005 1006 1007 1008\n\n 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024\n\n}\n\n\n\nunsafe impl<T:Layout<std430>> Layout<std430> for [T] {}\n\nunsafe impl<T:AlignedVec4> Layout<std140> for [T] {}\n\nunsafe impl<T:AlignedVec4> AlignedVec4 for [T] {}\n", "file_path": "src/glsl/glsl_type.rs", "rank": 34, "score": 25694.43342555256 }, { "content": " let f = &$set;\n\n gl_builder!{\n\n [$mat]\n\n [f(id, data.len() as GLint, false as GLboolean, transmute(&data[0][0][0]));]\n\n [f(id, data.len() as GLint, transmute(&data[0]));]\n\n @if @quote\n\n }\n\n }\n\n\n\n unsafe fn get_uniform(p: GLuint, id:GLint) -> Self {\n\n let mut data = MaybeUninit::<Self>::uninit();\n\n let f = &$get;\n\n f(p, id, data.as_mut_ptr() as *mut _);\n\n data.assume_init()\n\n }\n\n }\n\n\n\n gl_builder!{\n\n [$std430] [unsafe impl Layout<std430> for $name {}] [] @if @quote\n\n }\n", "file_path": "src/glsl/glsl_type.rs", "rank": 35, "score": 25694.17301152615 }, { "content": " 065 066 067 068 069 070 071 072 073 074 075 076 077 078 079 080 081 082 083 084 085 086 087 088 089 090 091 092 093 094 095 096\n\n 097 098 099 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128\n\n 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160\n\n 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192\n\n 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224\n\n 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256\n\n}\n\n\n\n#[cfg(feature = \"extra_large_uniform_arrays\")]\n\nimpl_array_type! { false\n\n 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288\n\n 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320\n\n 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352\n\n 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384\n\n 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416\n\n 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448\n\n 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480\n\n 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512\n\n 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544\n\n 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576\n", "file_path": "src/glsl/glsl_type.rs", "rank": 36, "score": 25693.327717857966 }, { "content": "\n\n //all vecs are std140 complient\n\n ([$prim:ident; $c:tt] @align $($tail:tt)*) => { glsl_type!($prim @align $c @align_vec true true $($tail)*); };\n\n\n\n //matrices are considered as arrays over their columns, so all matNx2 and matNx4 are std430 and all matNx4 are 140,\n\n //However, since we're storing matNx3's as [[T; 3]; N], the columns are not vec4 aligned, so they aren't std430 OR std140\n\n ([[GLfloat; 2]; $c2:tt] @align $($tail:tt)*) => { glsl_type!({8} false false true $($tail)*); };\n\n ([[$prim:ident; 3]; $c2:tt] @align $($tail:tt)*) => { glsl_type!({8} false false false $($tail)*); };\n\n ([[$prim:ident; $c1:tt]; $c2:tt] @align $($tail:tt)*) => { glsl_type!($prim @align $c1 @align_vec true true $($tail)*); };\n\n\n\n //determine if the type is a scalar or matrix or neither\n\n ({$a:expr} $b1:tt $b2:tt $b3:tt $prim:ident @type $($tail:tt)*) => { glsl_type!({$a} $b1 $b2 $b3 true false $($tail)*); };\n\n ({$a:expr} $b1:tt $b2:tt $b3:tt [$prim:ident; $c:tt] @type $($tail:tt)*) => { glsl_type!({$a} $b1 $b2 $b3 false false $($tail)*); };\n\n ({$a:expr} $b1:tt $b2:tt $b3:tt [[$prim:ident; $c1:tt]; $c2:tt] @type $($tail:tt)*) => { glsl_type!({$a} $b1 $b2 $b3 false true $($tail)*); };\n\n\n\n //the initial macro call\n\n ({$fmt:ty} $name:ident = $($ty:tt)*) => {\n\n glsl_type!($($ty)* @align $($ty)* @type {$fmt} {$($ty)*} {gl_builder!(@set $($ty)*)} {gl_builder!(@get $($ty)*)} $name);\n\n glsl_type!(@index $name = $($ty)*);\n\n };\n", "file_path": "src/glsl/glsl_type.rs", "rank": 37, "score": 25691.63484479951 }, { "content": "macro_rules! glsl_type {\n\n\n\n //\n\n //In order to make all the primitives and the like fit the std430 layout\n\n //(and allow type checking for std140), we need to set the alignment of each type accordingly\n\n //\n\n\n\n //all scalars besides double (which has 8) require an alignment of 4bytes, fitstd140,\n\n //but they cannot satisfy sdt140 if they are in an array\n\n (GLdouble @align $($tail:tt)*) => { glsl_type!({8} false true true $($tail)*); };\n\n ($prim:ident @align $($tail:tt)*) => { glsl_type!({4} false true true $($tail)*); };\n\n\n\n //for vectors, the alignment is 2N for vec2 and 4N for vec3 and vec4, and hence,\n\n //all BUT vec2/ivec2/uvec2/bvec2 can be put into std140 arrays\n\n ({4} false true true 2 @align_vec $($tail:tt)*) => { glsl_type!({8} false $($tail)*); };\n\n ({8} false true true 2 @align_vec $($tail:tt)*) => { glsl_type!({16} true $($tail)*); }; //dvec2's have an alignent equal to that of vec4\n\n ({4} false true true 3 @align_vec $($tail:tt)*) => { glsl_type!({16} true $($tail)*); };\n\n ({8} false true true 3 @align_vec $($tail:tt)*) => { glsl_type!({32} true $($tail)*); };\n\n ({4} false true true 4 @align_vec $($tail:tt)*) => { glsl_type!({16} true $($tail)*); };\n\n ({8} false true true 4 @align_vec $($tail:tt)*) => { glsl_type!({32} true $($tail)*); };\n", "file_path": "src/glsl/glsl_type.rs", "rank": 38, "score": 25691.541479039228 }, { "content": "\n\nuse std::slice::*;\n\nuse std::mem::*;\n\n\n\n///\n\n///A macro constucting the gl functions for managing uniforms using the\n\n///concat_idents! macro. Note that this can only be used in expressions,\n\n///and as such, you must both import the function name into the current module\n\n///AND borrow the function as a pointer in order to use.\n\n///\n\n///Also, while this only does glUniform* stuff, in the future I may expand it if\n\n///it would be of use.\n\n///\n\nmacro_rules! gl_builder {\n\n\n\n //some control flow patterns. Probably would be better practice to redesign to not need these\n\n //but it is what it is for now\n\n ([true] $then:tt $else:tt @if $($tail:tt)*) => { gl_builder!{$then $($tail)*} };\n\n ([false] $then:tt $else:tt @if $($tail:tt)*) => { gl_builder!{$else $($tail)*} };\n\n\n", "file_path": "src/glsl/glsl_type.rs", "rank": 39, "score": 25690.686870638874 }, { "content": " ([true] @not $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n ([false] @not $($tail:tt)*) => { gl_builder!{[true] $($tail)*} };\n\n\n\n ([1] [1] @eq $($tail:tt)*) => { gl_builder!{[true] $($tail)*} };\n\n ([2] [2] @eq $($tail:tt)*) => { gl_builder!{[true] $($tail)*} };\n\n ([3] [3] @eq $($tail:tt)*) => { gl_builder!{[true] $($tail)*} };\n\n ([4] [4] @eq $($tail:tt)*) => { gl_builder!{[true] $($tail)*} };\n\n ([1] [$b:literal] @eq $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n ([2] [$b:literal] @eq $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n ([3] [$b:literal] @eq $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n ([4] [$b:literal] @eq $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n // ([$a:lit] [$b:lit] @eq $($tail:tt)*) => { gl_builder!{[false] $($tail)*} };\n\n\n\n ([$($code:tt)*] @quote) => { $($code)* };\n\n ({$($code:tt)*} @eval $($tail:tt)*) => { gl_builder!{$($code)* $($tail)*} };\n\n\n\n\n\n ({$($gl:ident)*} c_bool @ty_suffix $($tail:tt)*) => { gl_builder!{{$($gl)* ui} $($tail)*} };\n\n ({$($gl:ident)*} GLuint @ty_suffix $($tail:tt)*) => { gl_builder!{{$($gl)* ui} $($tail)*} };\n\n ({$($gl:ident)*} GLint @ty_suffix $($tail:tt)*) => { gl_builder!{{$($gl)* i} $($tail)*} };\n", "file_path": "src/glsl/glsl_type.rs", "rank": 40, "score": 25686.607197966896 }, { "content": " (@get $prim:ident) => { gl_builder!{{GetUniform} $prim @ty_suffix @v @concat} };\n\n (@get [$prim:ident; $c:tt]) => { gl_builder!{@get $prim} };\n\n (@get [[$prim:ident; $c1:tt]; $c2:tt]) => { gl_builder!{@get $prim} };\n\n\n\n (@set $prim:ident) => { gl_builder!{@set [$prim; 1]} };\n\n (@set [$prim:ident; $c:tt]) => { gl_builder!{{} $c @uni_vec $prim @ty_suffix @v @concat} };\n\n (@set [[$prim:ident; $c1:tt]; $c2:tt]) => {\n\n gl_builder! (\n\n [$c1] [$c2] @eq\n\n { {} $c2 @uni_mat }\n\n { {} $c2 @uni_mat $c1 @mat_xN }\n\n @if @eval\n\n $prim @ty_suffix @v @concat\n\n )\n\n };\n\n\n\n\n\n\n\n}\n\n\n", "file_path": "src/glsl/glsl_type.rs", "rank": 41, "score": 25686.607197966896 }, { "content": " ({$($gl:ident)*} GLfloat @ty_suffix $($tail:tt)*) => { gl_builder!{{$($gl)* f} $($tail)*} };\n\n ({$($gl:ident)*} GLdouble @ty_suffix $($tail:tt)*) => { gl_builder!{{$($gl)* d} $($tail)*} };\n\n\n\n ({$($gl:ident)*} 1 @uni_vec $($tail:tt)*) => { gl_builder!{{$($gl)* Uniform1} $($tail)*} };\n\n ({$($gl:ident)*} 2 @uni_vec $($tail:tt)*) => { gl_builder!{{$($gl)* Uniform2} $($tail)*} };\n\n ({$($gl:ident)*} 3 @uni_vec $($tail:tt)*) => { gl_builder!{{$($gl)* Uniform3} $($tail)*} };\n\n ({$($gl:ident)*} 4 @uni_vec $($tail:tt)*) => { gl_builder!{{$($gl)* Uniform4} $($tail)*} };\n\n\n\n ({$($gl:ident)*} 2 @uni_mat $($tail:tt)*) => { gl_builder!{{$($gl)* UniformMatrix2} $($tail)*} };\n\n ({$($gl:ident)*} 3 @uni_mat $($tail:tt)*) => { gl_builder!{{$($gl)* UniformMatrix3} $($tail)*} };\n\n ({$($gl:ident)*} 4 @uni_mat $($tail:tt)*) => { gl_builder!{{$($gl)* UniformMatrix4} $($tail)*} };\n\n\n\n ({$($gl:ident)*} 2 @mat_xN $($tail:tt)*) => { gl_builder!{{$($gl)* x2} $($tail)*} };\n\n ({$($gl:ident)*} 3 @mat_xN $($tail:tt)*) => { gl_builder!{{$($gl)* x3} $($tail)*} };\n\n ({$($gl:ident)*} 4 @mat_xN $($tail:tt)*) => { gl_builder!{{$($gl)* x4} $($tail)*} };\n\n\n\n ({$($gl:ident)*} @v $($tail:tt)*) => { gl_builder!{{$($gl)* v} $($tail)*} };\n\n\n\n ({$($gl:ident)*} @concat) => { concat_idents!{$($gl),*} };\n\n\n", "file_path": "src/glsl/glsl_type.rs", "rank": 42, "score": 25686.607197966896 }, { "content": "\n\nuse super::*;\n\nuse std::mem::transmute;\n\n\n\npub type IntFormat = IntType;\n\n\n\nunsafe impl AttribFormat for IntFormat {\n\n\n\n #[inline] fn size(self) -> usize { self.size_of() }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribIPointer(attr_id, 1, self.into(), stride as GLsizei, transmute(offset as GLintptr));\n\n }\n\n\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n FloatFormat::FromInt(self, false).set_attribute(attr_id, data);\n\n }\n\n}\n", "file_path": "src/glsl/attrib_format.rs", "rank": 52, "score": 25552.28714050134 }, { "content": " fn normalized(self) -> bool {\n\n match self {\n\n FloatFormat::FromInt(_, b) => b,\n\n _ => false\n\n }\n\n }\n\n\n\n}\n\n\n\nunsafe impl AttribFormat for FloatFormat {\n\n fn size(self) -> usize {\n\n match self {\n\n FloatFormat::Float(ty) => ty.size_of(),\n\n FloatFormat::Fixed => 4,\n\n FloatFormat::Double => 8,\n\n FloatFormat::FromInt(f, _) => f.size()\n\n }\n\n }\n\n\n\n #[inline]\n", "file_path": "src/glsl/attrib_format.rs", "rank": 55, "score": 25549.902420968887 }, { "content": "impl IVecFormat {\n\n #[inline] fn int_format(self) -> IntFormat { match self { IVecFormat::IVecN(f, _) => f } }\n\n #[inline] fn elements(self) -> usize { match self { IVecFormat::IVecN(_,c) => c } }\n\n #[inline] fn gl_type(self) -> GLenum { self.int_format().into() }\n\n\n\n}\n\n\n\nunsafe impl AttribFormat for IVecFormat {\n\n #[inline] fn size(self) -> usize { self.elements() * self.int_format().size() }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribIPointer(attr_id, 4.min(self.elements() as GLint), self.gl_type(), stride as GLsizei, transmute(offset as GLintptr));\n\n }\n\n\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n VecFormat::VecN(FloatFormat::FromInt(self.int_format(), false), self.elements()).set_attribute(attr_id, data);\n\n }\n\n}\n", "file_path": "src/glsl/attrib_format.rs", "rank": 56, "score": 25549.36546434251 }, { "content": "\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\npub enum FloatFormat {\n\n Float(FloatType),\n\n FromInt(IntFormat, bool),\n\n Fixed,\n\n Double\n\n}\n\n\n\nimpl FloatFormat {\n\n fn gl_type(self) -> GLenum {\n\n match self {\n\n FloatFormat::Float(ty) => ty.into(),\n\n FloatFormat::FromInt(f, _) => f.into(),\n\n FloatFormat::Double => gl::DOUBLE,\n\n FloatFormat::Fixed => gl::FIXED\n\n }\n\n }\n\n\n\n #[inline]\n", "file_path": "src/glsl/attrib_format.rs", "rank": 60, "score": 25548.033360973477 }, { "content": "unsafe impl AttribFormat for UnsupportedFormat {\n\n #[inline] fn size(self) -> usize { unimplemented!() }\n\n #[inline] fn attrib_count(self) -> usize { unimplemented!() }\n\n #[inline] unsafe fn bind_attribute(self, _attr_id: GLuint, _stride: usize, _offset: usize){ unimplemented!() }\n\n #[inline] unsafe fn set_attribute(self, _attr_id: GLuint, _data: *const GLvoid){ unimplemented!() }\n\n}\n\n\n\npub type Mat2Format = [VecFormat; 2];\n\npub type Mat3Format = [VecFormat; 3];\n\npub type Mat4Format = [VecFormat; 4];\n\n\n\npub type DMat2Format = [DVecFormat; 2];\n\npub type DMat3Format = [DVecFormat; 3];\n\npub type DMat4Format = [DVecFormat; 4];\n\n\n\nmacro_rules! array_format {\n\n ($($num:tt)*) => {\n\n $(\n\n unsafe impl<F:AttribFormat> AttribFormat for [F; $num] {\n\n #[inline] fn size(self) -> usize { self.iter().map(|f| f.size()).sum() }\n", "file_path": "src/glsl/attrib_format.rs", "rank": 62, "score": 25546.18052701811 }, { "content": "\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n DVecFormat::DVecN(1).set_attribute(attr_id, data);\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\n#[allow(non_camel_case_types)]\n\npub enum VecFormat {\n\n VecN(FloatFormat, usize),\n\n Int2_10_10_10Rev(bool),\n\n UInt2_10_10_10Rev(bool),\n\n UInt10F_11F_11FRev(bool)\n\n}\n\n\n\nimpl VecFormat {\n\n fn gl_type(self) -> GLenum {\n\n match self {\n\n VecFormat::VecN(f, _) => f.gl_type(),\n", "file_path": "src/glsl/attrib_format.rs", "rank": 63, "score": 25545.933308337615 }, { "content": " VecFormat::UInt10F_11F_11FRev(b) => b,\n\n }\n\n }\n\n\n\n}\n\n\n\nunsafe impl AttribFormat for VecFormat {\n\n fn size(self) -> usize {\n\n match self {\n\n VecFormat::VecN(f, c) => c * f.size(),\n\n VecFormat::Int2_10_10_10Rev(_) => 4,\n\n VecFormat::UInt2_10_10_10Rev(_) => 4,\n\n VecFormat::UInt10F_11F_11FRev(_) => 4\n\n }\n\n }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribPointer(attr_id, 4.min(self.elements() as GLint), self.gl_type(), self.normalized() as GLboolean, stride as GLsizei, transmute(offset as GLintptr));\n\n }\n", "file_path": "src/glsl/attrib_format.rs", "rank": 65, "score": 25544.372263826488 }, { "content": "\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\npub enum DVecFormat {\n\n DVecN(usize),\n\n}\n\n\n\nimpl DVecFormat {\n\n #[inline] fn elements(self) -> usize { match self { DVecFormat::DVecN(c) => c, } }\n\n}\n\n\n\nunsafe impl AttribFormat for DVecFormat {\n\n #[inline] fn size(self) -> usize { self.elements() * 8}\n\n // #[inline] fn attrib_count(self) -> usize { match self { DVecN(c) => if c>2 {2} else {1} } }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribLPointer(attr_id, 4.min(self.elements() as GLint), gl::DOUBLE, stride as GLsizei, transmute(offset as GLintptr));\n\n }\n\n\n\n #[inline]\n", "file_path": "src/glsl/attrib_format.rs", "rank": 66, "score": 25543.565183943738 }, { "content": " VecFormat::Int2_10_10_10Rev(_) => gl::INT_2_10_10_10_REV,\n\n VecFormat::UInt2_10_10_10Rev(_) => gl::UNSIGNED_INT_2_10_10_10_REV,\n\n VecFormat::UInt10F_11F_11FRev(_) => gl::UNSIGNED_INT_10F_11F_11F_REV,\n\n }\n\n }\n\n\n\n fn elements(self) -> usize {\n\n match self {\n\n VecFormat::VecN(_, c) => c,\n\n VecFormat::Int2_10_10_10Rev(_) => 4,\n\n VecFormat::UInt2_10_10_10Rev(_) => 4,\n\n VecFormat::UInt10F_11F_11FRev(_) => 3,\n\n }\n\n }\n\n\n\n fn normalized(self) -> bool {\n\n match self {\n\n VecFormat::VecN(f, _) => f.normalized(),\n\n VecFormat::Int2_10_10_10Rev(b) => b,\n\n VecFormat::UInt2_10_10_10Rev(b) => b,\n", "file_path": "src/glsl/attrib_format.rs", "rank": 68, "score": 25540.36276597555 }, { "content": " unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribPointer(attr_id, 1, self.gl_type(), self.normalized() as GLboolean, stride as GLsizei, transmute(offset as GLintptr));\n\n }\n\n\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n VecFormat::VecN(self, 1).set_attribute(attr_id, data);\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\npub struct DoubleFormat;\n\n\n\nunsafe impl AttribFormat for DoubleFormat {\n\n #[inline] fn size(self) -> usize { 8 }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize){\n\n gl::VertexAttribLPointer(attr_id, 1, gl::DOUBLE, stride as GLsizei, transmute(offset as GLintptr));\n\n }\n", "file_path": "src/glsl/attrib_format.rs", "rank": 69, "score": 25540.361784905028 }, { "content": "\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n match self {\n\n VecFormat::VecN(f, c) => {\n\n if c==0 {panic!(\"Zero size vecs are invalid\");}\n\n match f {\n\n FloatFormat::Float(FloatType::Half) => {\n\n match c {\n\n 1 => gl::VertexAttrib1sv(attr_id, transmute(data)),\n\n 2 => gl::VertexAttrib2sv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttrib3sv(attr_id, transmute(data)),\n\n _ => gl::VertexAttrib4sv(attr_id, transmute(data)),\n\n }\n\n },\n\n FloatFormat::Float(FloatType::Float) => {\n\n match c {\n\n 1 => gl::VertexAttrib1fv(attr_id, transmute(data)),\n\n 2 => gl::VertexAttrib2fv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttrib3fv(attr_id, transmute(data)),\n", "file_path": "src/glsl/attrib_format.rs", "rank": 70, "score": 25539.602721546984 }, { "content": " arr\n\n }\n\n }\n\n\n\n if normalized {\n\n match z {\n\n IntFormat::Byte => {\n\n let arr = to_vec4::<GLbyte>(data, c, 0, 0);\n\n gl::VertexAttrib4Nbv(attr_id, &arr[0] as *const GLbyte);\n\n },\n\n IntFormat::UByte => {\n\n let arr = to_vec4::<GLubyte>(data, c, 0, 0);\n\n gl::VertexAttrib4Nubv(attr_id, &arr[0] as *const GLubyte);\n\n },\n\n IntFormat::Short => {\n\n let arr = to_vec4::<GLshort>(data, c, 0, 0);\n\n gl::VertexAttrib4Nsv(attr_id, &arr[0] as *const GLshort);\n\n },\n\n IntFormat::UShort => {\n\n let arr = to_vec4::<GLushort>(data, c, 0, 0);\n", "file_path": "src/glsl/attrib_format.rs", "rank": 71, "score": 25538.715264486935 }, { "content": " _ => gl::VertexAttrib4fv(attr_id, transmute(data)),\n\n }\n\n },\n\n FloatFormat::Double => {\n\n match c {\n\n 1 => gl::VertexAttrib1dv(attr_id, transmute(data)),\n\n 2 => gl::VertexAttrib2dv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttrib3dv(attr_id, transmute(data)),\n\n _ => gl::VertexAttrib4dv(attr_id, transmute(data)),\n\n }\n\n },\n\n FloatFormat::Fixed => unimplemented!(),\n\n FloatFormat::FromInt(z, normalized) => {\n\n unsafe fn to_vec4<G:Copy>(ptr: *const GLvoid, count:usize, zero: G, one: G) -> [G;4] {\n\n let p: *const G = transmute(ptr);\n\n if count >=4 {\n\n [*p, *p.offset(1), *p.offset(2), *p.offset(3)]\n\n } else {\n\n let mut arr = [zero, zero, zero, one];\n\n for i in 0..count { arr[i] = *p.offset(i as isize);}\n", "file_path": "src/glsl/attrib_format.rs", "rank": 72, "score": 25536.57173700789 }, { "content": " },\n\n IntFormat::Short => {\n\n let arr = to_vec4::<GLshort>(data, c, 0, 0);\n\n gl::VertexAttribI4sv(attr_id, &arr[0] as *const GLshort);\n\n },\n\n IntFormat::UShort => {\n\n let arr = to_vec4::<GLushort>(data, c, 0, 0);\n\n gl::VertexAttribI4usv(attr_id, &arr[0] as *const GLushort);\n\n },\n\n IntFormat::Int => {\n\n match c {\n\n 1 => gl::VertexAttribI1iv(attr_id, transmute(data)),\n\n 2 => gl::VertexAttribI2iv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttribI3iv(attr_id, transmute(data)),\n\n _ => gl::VertexAttribI4iv(attr_id, transmute(data)),\n\n }\n\n },\n\n IntFormat::UInt => {\n\n match c {\n\n 1 => gl::VertexAttribI1uiv(attr_id, transmute(data)),\n", "file_path": "src/glsl/attrib_format.rs", "rank": 73, "score": 25536.086894553722 }, { "content": " gl::VertexAttrib4Nusv(attr_id, &arr[0] as *const GLushort);\n\n },\n\n IntFormat::Int => {\n\n let arr = to_vec4::<GLint>(data, c, 0, 0);\n\n gl::VertexAttrib4Niv(attr_id, &arr[0] as *const GLint);\n\n },\n\n IntFormat::UInt => {\n\n let arr = to_vec4::<GLuint>(data, c, 0, 0);\n\n gl::VertexAttrib4Nuiv(attr_id, &arr[0] as *const GLuint);\n\n },\n\n }\n\n } else {\n\n match z {\n\n IntFormat::Byte => {\n\n let arr = to_vec4::<GLbyte>(data, c, 0, 0);\n\n gl::VertexAttribI4bv(attr_id, &arr[0] as *const GLbyte);\n\n },\n\n IntFormat::UByte => {\n\n let arr = to_vec4::<GLubyte>(data, c, 0, 0);\n\n gl::VertexAttribI4ubv(attr_id, &arr[0] as *const GLubyte);\n", "file_path": "src/glsl/attrib_format.rs", "rank": 74, "score": 25535.383985859226 }, { "content": " 2 => gl::VertexAttribI2uiv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttribI3uiv(attr_id, transmute(data)),\n\n _ => gl::VertexAttribI4uiv(attr_id, transmute(data)),\n\n }\n\n },\n\n }\n\n }\n\n }\n\n }\n\n },\n\n _ => gl::VertexAttribP4uiv(attr_id, self.gl_type(), self.normalized() as GLboolean, transmute(data))\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\npub enum IVecFormat {\n\n IVecN(IntFormat, usize)\n\n}\n\n\n", "file_path": "src/glsl/attrib_format.rs", "rank": 75, "score": 25534.83085796285 }, { "content": " #[inline] fn attrib_count(self) -> usize { $num }\n\n\n\n #[inline]\n\n unsafe fn bind_attribute(self, attr_id: GLuint, stride: usize, offset: usize) {\n\n for i in 0..$num {\n\n self[i].bind_attribute(attr_id + (i as GLuint)*(self[i].attrib_count() as GLuint), stride, offset + i*self[i].size());\n\n }\n\n }\n\n\n\n #[inline]\n\n unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n for i in 0..$num {\n\n self[i].set_attribute(attr_id + (i as GLuint)*(self[i].attrib_count() as GLuint), data.offset((i*self[i].size()) as isize));\n\n }\n\n }\n\n }\n\n )*\n\n }\n\n}\n\n\n\n//lol no... it you need attribute arrays longer than 32 elements, you *probably* should try something else...\n\narray_format!{\n\n 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32\n\n}\n", "file_path": "src/glsl/attrib_format.rs", "rank": 76, "score": 25534.54134442948 }, { "content": " unsafe fn set_attribute(self, attr_id: GLuint, data: *const GLvoid){\n\n match self {\n\n DVecFormat::DVecN(c) => {\n\n match c {\n\n 0 => panic!(\"Zero size vecs are invalid\"),\n\n 1 => gl::VertexAttribL1dv(attr_id, transmute(data)),\n\n 2 => gl::VertexAttribL2dv(attr_id, transmute(data)),\n\n 3 => gl::VertexAttribL3dv(attr_id, transmute(data)),\n\n _ => gl::VertexAttribL4dv(attr_id, transmute(data)),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]\n\npub struct UnsupportedFormat {\n\n _private: ()\n\n}\n\n\n", "file_path": "src/glsl/attrib_format.rs", "rank": 77, "score": 25532.63531955049 }, { "content": " [Float FLOAT \"FLoat\"]\n\n }\n\n}\n\n\n\nimpl IntType {\n\n #[inline]\n\n pub fn size_of(self) -> usize {\n\n match self {\n\n IntType::Byte | IntType::UByte => 1,\n\n IntType::Short |IntType::UShort => 2,\n\n IntType::Int | IntType::UInt => 4\n\n }\n\n }\n\n}\n\n\n\nimpl FloatType {\n\n #[inline]\n\n pub fn size_of(self) -> usize {\n\n match self {\n\n FloatType::Half => 2,\n\n FloatType::Float => 4,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 37.334992135145306 }, { "content": " fn format_type(&self) -> F;\n\n fn len(&self) -> usize;\n\n\n\n fn bind_pixel_buffer<'a>(&'a self, target:&'a mut BindingLocation<UninitBuf>) -> Option<Binding<'a,UninitBuf>>;\n\n unsafe fn pixels(&self) -> *const GLvoid;\n\n}\n\n\n\npub unsafe trait PixelDataMut<F:PixelFormatType> {\n\n unsafe fn pixels_mut(&mut self) -> *mut GLvoid;\n\n}\n\n\n\npub unsafe trait PixelType<F: PixelFormatType>: Sized+Copy+Clone+PartialEq {\n\n fn format_type() -> F;\n\n fn swap_bytes() -> bool;\n\n fn lsb_first() -> bool;\n\n}\n\n\n\nunsafe impl<F:PixelFormatType,T:PixelType<F>> PixelData<F> for [T] {\n\n #[inline] fn swap_bytes(&self) -> bool {T::swap_bytes()}\n\n #[inline] fn lsb_first(&self) -> bool {T::lsb_first()}\n", "file_path": "src/texture/pixel_data.rs", "rank": 79, "score": 27.01640540396354 }, { "content": " pub fn init(_gl: &GLProvider) -> Context {\n\n Context { _private: ::std::marker::PhantomData }\n\n }\n\n}\n\n\n\n// impl !Send for Context {}\n\n// impl !Sync for Context {}\n\n\n\nglenum! {\n\n pub enum IntType {\n\n [Byte BYTE \"Byte\"],\n\n [UByte UNSIGNED_BYTE \"UByte\"],\n\n [Short SHORT \"Short\"],\n\n [UShort UNSIGNED_SHORT \"UShort\"],\n\n [Int INT \"Int\"],\n\n [UInt UNSIGNED_INT \"UInt\"]\n\n }\n\n\n\n pub enum FloatType {\n\n [Half HALF_FLOAT \"Half\"],\n", "file_path": "src/lib.rs", "rank": 80, "score": 26.834218008183193 }, { "content": "\n\nuse super::*;\n\nuse std::slice::*;\n\n\n\npub use self::attrib_format::*;\n\n\n\n#[macro_use]\n\nmod glsl;\n\nmod attrib_format;\n\npub mod glsl_type;\n\n\n\npub unsafe trait GLSLType: Sized + Copy + Debug {\n\n type AttributeFormat: AttribFormat;\n\n\n\n #[inline] fn uniform_locations() -> GLuint {1}\n\n #[inline] fn first_element_name(var: String) -> String { var }\n\n\n\n unsafe fn load_uniform(id: GLint, data: &Self) { Self::load_uniforms(id, from_ref(data)); }\n\n unsafe fn load_uniforms(id: GLint, data: &[Self]);\n\n unsafe fn get_uniform(p: GLuint, id:GLint) -> Self;\n", "file_path": "src/glsl/mod.rs", "rank": 81, "score": 25.26113911586482 }, { "content": "\n\nimpl From<u8> for PixelRowAlignment {\n\n #[inline] fn from(a:u8) -> Self {\n\n let mut shift = a;\n\n let mut count = 0;\n\n while shift!=0 {\n\n shift >>= 1;\n\n count += 1;\n\n }\n\n PixelRowAlignment(1<<count)\n\n }\n\n}\n\n\n\npub unsafe trait PixelData<F:PixelFormatType> {\n\n\n\n #[inline] fn swap_bytes(&self) -> bool {false}\n\n #[inline] fn lsb_first(&self) -> bool {false}\n\n\n\n #[inline] fn alignment(&self) -> PixelRowAlignment {ALIGN_4}\n\n\n", "file_path": "src/texture/pixel_data.rs", "rank": 82, "score": 23.734073989829476 }, { "content": "\n\n #[inline]\n\n unsafe fn bind_attribute(attr: GLuint, format: Self::AttributeFormat, stride: usize, offset: usize) {\n\n format.bind_attribute(attr, stride, offset);\n\n }\n\n\n\n #[inline]\n\n unsafe fn set_attribute(attr: GLuint, format: Self::AttributeFormat, data: *const GLvoid) {\n\n format.set_attribute(attr, data);\n\n }\n\n\n\n}\n\n\n\npub unsafe trait GLSLStruct { const SRC: &'static str; }\n\npub unsafe trait GLSLFunction<ReturnType, Params> { const SRC: &'static str; }\n\n\n\npub unsafe trait BlockLayout: Sized + Copy {}\n\npub unsafe trait Layout<B:BlockLayout> {}\n\npub unsafe trait AlignedVec4 {}\n\n\n\n#[derive(Clone, Copy, Debug)] #[allow(non_camel_case_types)] pub struct std140;\n\n#[derive(Clone, Copy, Debug)] #[allow(non_camel_case_types)] pub struct std430;\n\n#[derive(Clone, Copy, Debug)] #[allow(non_camel_case_types)] pub struct shared;\n\n\n\nunsafe impl BlockLayout for std140 {}\n\nunsafe impl BlockLayout for std430 {}\n\nunsafe impl BlockLayout for shared {}\n\n\n", "file_path": "src/glsl/mod.rs", "rank": 83, "score": 23.551716286781637 }, { "content": "}\n\n\n\nimpl_tuple!(impl_as_attrib_arrays);\n\n\n\n\n\nimpl<'a, A:GLSLType> AttribArray<'a,A> {\n\n\n\n #[inline] pub unsafe fn bind(&self) { BufferTarget::ArrayBuffer.bind(&self.buf); }\n\n #[inline] pub unsafe fn unbind() { BufferTarget::ArrayBuffer.unbind() }\n\n\n\n #[inline] pub fn len(&self) -> usize { self.len }\n\n #[inline] pub fn format(&self) -> A::AttributeFormat { self.format }\n\n #[inline] pub fn stride(&self) -> usize { self.stride }\n\n #[inline] pub fn offset(&self) -> usize { self.offset }\n\n\n\n}\n", "file_path": "src/buffer.rs", "rank": 84, "score": 22.061687472435626 }, { "content": " fn width(&self) -> usize {self[0]}\n\n fn height(&self) -> usize {self[1]}\n\n fn depth(&self) -> usize {self[2]}\n\n}\n\n\n\nunsafe fn apply_packing_settings<F:PixelFormatType,P:PixelData<F>>(pixels:&P) {\n\n gl::PixelStorei(gl::PACK_SWAP_BYTES, pixels.swap_bytes() as GLint);\n\n gl::PixelStorei(gl::PACK_LSB_FIRST, pixels.lsb_first() as GLint);\n\n gl::PixelStorei(gl::PACK_ALIGNMENT, pixels.alignment().0 as GLint);\n\n}\n\n\n\nunsafe fn apply_unpacking_settings<F:PixelFormatType,P:PixelData<F>>(pixels:&P) {\n\n gl::PixelStorei(gl::UNPACK_SWAP_BYTES, pixels.swap_bytes() as GLint);\n\n gl::PixelStorei(gl::UNPACK_LSB_FIRST, pixels.lsb_first() as GLint);\n\n gl::PixelStorei(gl::UNPACK_ALIGNMENT, pixels.alignment().0 as GLint);\n\n}\n\n\n\npub unsafe trait Texture: Sized {\n\n type InternalFormat: InternalFormat<TypeFormat=Self::PixelFormat>;\n\n type PixelFormat: PixelFormatType;\n", "file_path": "src/texture/mod.rs", "rank": 85, "score": 21.322686633388575 }, { "content": "\n\n #[inline] fn alignment(&self) -> PixelRowAlignment { PixelRowAlignment(align_of::<T>().min(8) as u8) }\n\n\n\n #[inline] fn format_type(&self) -> F {T::format_type()}\n\n #[inline] fn len(&self) -> usize {self.len()}\n\n\n\n #[inline]\n\n fn bind_pixel_buffer<'a>(&'a self, _target:&'a mut BindingLocation<UninitBuf>) -> Option<Binding<'a,UninitBuf>> {\n\n None\n\n }\n\n\n\n #[inline] unsafe fn pixels(&self) -> *const GLvoid {&self[0] as *const T as *const GLvoid}\n\n}\n\n\n\nunsafe impl<F:PixelFormatType,T:PixelType<F>> PixelDataMut<F> for [T] {\n\n #[inline] unsafe fn pixels_mut(&mut self) -> *mut GLvoid {&mut self[0] as *mut T as *mut GLvoid}\n\n}\n", "file_path": "src/texture/pixel_data.rs", "rank": 86, "score": 18.750577129894165 }, { "content": "use std::fmt;\n\nuse std::fmt::{Display, Debug, Formatter};\n\nuse std::hash::Hash;\n\n\n\npub use program::*;\n\npub use glsl::*;\n\npub use buffer::*;\n\n\n\nmacro_rules! display_from_debug {\n\n ($name:ty) => {\n\n impl ::std::fmt::Display for $name {\n\n #[inline]\n\n fn fmt(&self,f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::std::fmt::Debug::fmt(self, f)\n\n }\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! glenum {\n", "file_path": "src/lib.rs", "rank": 87, "score": 18.613711670005483 }, { "content": " $($mod)* struct $struct_name {\n\n $(pub $name: $ty),*\n\n }\n\n\n\n unsafe impl AlignedVec4 for $struct_name where ($($ty),*): AlignedVec4 {}\n\n unsafe impl Layout<std140> for $struct_name where Self:AlignedVec4, $($ty: Layout<std140>),* {}\n\n unsafe impl Layout<std430> for $struct_name where $($ty: Layout<std430>),* {}\n\n\n\n //methods for getting attribute arrays from buffers\n\n impl $struct_name where $($ty: GLSLType + GLSLData<$ty>),* {\n\n\n\n #[inline]\n\n pub fn get_attrib_arrays<'a, A:BufferAccess>(buf: &'a Buffer<[Self], A>) -> ($(AttribArray<'a, $ty>),*) {\n\n unsafe {\n\n use std::mem::*;\n\n let uninit = ::std::mem::MaybeUninit::<Self>::uninit();\n\n let start = uninit.as_ptr() as *const u8;\n\n let arrays = (\n\n $(buf.get_attrib_array::<$ty, $ty>(\n\n //get the byte offset from the field to the start of the object\n", "file_path": "src/glsl/glsl.rs", "rank": 88, "score": 17.793770849861435 }, { "content": " gl::DeleteBuffers(1, &b.id);\n\n forget(b);\n\n }\n\n }\n\n }\n\n\n\n #[inline] pub fn id(&self) -> GLuint { self.id }\n\n #[inline] pub fn data_offset(&self) -> usize { self.offset as usize }\n\n #[inline] pub fn data_size(&self) -> usize { self.size as usize }\n\n #[inline] pub fn buffer_size(&self) -> usize { self.capacity as usize }\n\n #[inline] pub fn usage_hint(&self) -> BufferUsage { self.usage }\n\n\n\n #[inline] pub fn gl_provider(&self) -> GLProvider { GLProvider::get_current().unwrap() }\n\n\n\n #[inline]\n\n unsafe fn _from_box(_gl: &GLProvider, data: Box<T>) -> Self {\n\n Self::_from_box_with_hint(_gl, BufferUsage::default(), data)\n\n }\n\n\n\n unsafe fn _from_box_with_hint(_gl: &GLProvider, hint: BufferUsage, data: Box<T>) -> Self {\n", "file_path": "src/buffer.rs", "rank": 89, "score": 16.72221292915632 }, { "content": "\n\nuse super::*;\n\nuse buffer_new::{UninitBuf};\n\nuse std::mem::*;\n\n\n\n#[derive(Copy,Clone,PartialEq,Eq,Hash)]\n\npub struct PixelRowAlignment(pub(super) u8);\n\n\n\ndisplay_from_debug!(PixelRowAlignment);\n\nimpl ::std::fmt::Debug for PixelRowAlignment {\n\n #[inline]\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\npub const ALIGN_1: PixelRowAlignment = PixelRowAlignment(1);\n\npub const ALIGN_2: PixelRowAlignment = PixelRowAlignment(2);\n\npub const ALIGN_4: PixelRowAlignment = PixelRowAlignment(4);\n\npub const ALIGN_8: PixelRowAlignment = PixelRowAlignment(8);\n", "file_path": "src/texture/pixel_data.rs", "rank": 90, "score": 16.09971783973619 }, { "content": "impl<T:GPUCopy+?Sized> Buffer<T,Read> {\n\n #[inline] pub fn readonly_from(gl: &GLProvider, data: Box<T>) -> Self { unsafe { Self::_from_box(gl, data) } }\n\n #[inline] pub fn readonly_from_with_hint(gl: &GLProvider, hint: BufferUsage, data: Box<T>) -> Self {\n\n unsafe { Self::_from_box_with_hint(gl, hint, data)}\n\n }\n\n}\n\n\n\nimpl<T:GPUCopy+Sized> Buffer<T,Read> {\n\n #[inline] pub fn new_readonly(gl: &GLProvider, data: T) -> Self { unsafe { Self::_new(gl, data) } }\n\n #[inline] pub fn readonly_with_hint(gl: &GLProvider, hint: BufferUsage, data: T) -> Self {\n\n unsafe { Self::_with_hint(gl, hint, data) }\n\n }\n\n}\n\n\n\nimpl<T:GPUCopy+?Sized> Buffer<T,Write> {\n\n #[inline] pub fn writeonly_from(gl: &GLProvider, data: Box<T>) -> Self { unsafe { Self::_from_box(gl, data) } }\n\n #[inline] pub fn writeonly_from_with_hint(gl: &GLProvider, hint: BufferUsage, data: Box<T>) -> Self {\n\n unsafe { Self::_from_box_with_hint(gl, hint, data) }\n\n }\n\n}\n", "file_path": "src/buffer.rs", "rank": 91, "score": 16.0353035983597 }, { "content": " \"\".to_owned()\n\n }\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl Drop for ProgramID {\n\n fn drop(&mut self) {\n\n unsafe { gl::DeleteProgram(self.id); }\n\n }\n\n}\n\n\n\npub unsafe trait Program: Sized {\n\n fn init(context: &GLProvider) -> Result<Self, GLError>;\n\n}\n\n\n\npub unsafe trait ShaderProgram: Program {}\n\npub unsafe trait ComputeProgram: Program {}\n\n\n", "file_path": "src/program.rs", "rank": 92, "score": 16.03380401537688 }, { "content": " #[inline] pub fn new(gl: &GLProvider, data: T) -> Self { unsafe { Self::_new(gl, data) } }\n\n #[inline] pub fn with_hint(gl: &GLProvider, hint: BufferUsage, data: T) -> Self {\n\n unsafe { Self::_with_hint(gl, hint, data) }\n\n }\n\n}\n\n\n\nimpl<T:GPUCopy+?Sized> Buffer<T,CopyOnly> {\n\n #[inline] pub fn immut_from(gl: &GLProvider, data: Box<T>) -> Self { unsafe { Self::_from_box(gl, data) } }\n\n #[inline] pub fn immut_from_with_hint(gl: &GLProvider, hint: BufferUsage, data: Box<T>) -> Self {\n\n unsafe { Self::_from_box_with_hint(gl, hint, data)}\n\n }\n\n}\n\n\n\nimpl<T:GPUCopy+Sized> Buffer<T,CopyOnly> {\n\n #[inline] pub fn new_immut(gl: &GLProvider, data: T) -> Self { unsafe { Self::_new(gl, data) } }\n\n #[inline] pub fn immut_with_hint(gl: &GLProvider, hint: BufferUsage, data: T) -> Self {\n\n unsafe { Self::_with_hint(gl, hint, data) }\n\n }\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 93, "score": 16.029756828897096 }, { "content": " type Dim: TexDim;\n\n\n\n fn target() -> TextureTarget;\n\n fn id(&self) -> GLuint;\n\n fn format(&self) -> Self::InternalFormat;\n\n fn dim(&self) -> Self::Dim;\n\n\n\n fn storage<P:PixelData<Self::PixelFormat>>(\n\n _gl:&GL4,\n\n raw:RawTex,\n\n levels:usize,\n\n internalformat:Self::InternalFormat,\n\n dim:Self::Dim,\n\n _pixels:P\n\n ) -> Self\n\n {\n\n unsafe {\n\n let mut target = Self::target().as_loc();\n\n let binding = target.bind(&raw);\n\n match Self::Dim::dim() {\n", "file_path": "src/texture/mod.rs", "rank": 94, "score": 16.003063984342653 }, { "content": "#![allow(dead_code)]\n\n\n\nuse super::*;\n\nuse crate::gl;\n\n\n\nuse super::Target;\n\n\n\nuse std::convert::TryInto;\n\n\n\npub use self::pixel_format::*;\n\npub use self::pixel_data::*;\n\n\n\nmod pixel_format;\n\nmod pixel_data;\n\n\n\nglenum! {\n\n pub enum TextureTarget {\n\n [Texture1D TEXTURE_1D \"Texture 1D\"],\n\n [Texture2D TEXTURE_2D \"Texture 2D\"],\n\n [Texture3D TEXTURE_3D \"Texture 3D\"],\n", "file_path": "src/texture/mod.rs", "rank": 95, "score": 15.8683667042636 }, { "content": "\n\nunsafe impl Target for TextureTarget {\n\n type Resource = RawTex;\n\n #[inline] unsafe fn bind(self, id:GLuint) {gl::BindTexture(self as GLenum, id)}\n\n}\n\n\n\n\n\ngl_resource!{\n\n pub struct RawTex {\n\n gl = GL2,\n\n target = TextureTarget,\n\n gen = GenTextures,\n\n is = IsTexture,\n\n delete = DeleteTextures\n\n }\n\n}\n\n\n\npub unsafe trait TexDim:Copy {\n\n fn dim() -> usize;\n\n fn width(&self) -> usize;\n", "file_path": "src/texture/mod.rs", "rank": 96, "score": 15.768621392220194 }, { "content": " //to get the GLvoid we need\n\n buf.repr.rust = &data as *const T;\n\n let ptr = buf.repr.void;\n\n buf.buffer_storage(hint, size_of::<T>(), ptr);\n\n\n\n //make sure the destructor does not run\n\n forget(data);\n\n\n\n buf\n\n }\n\n}\n\n\n\nimpl<T:Sized, A:BufferAccess> Buffer<[T], A> {\n\n #[inline]\n\n pub unsafe fn uninitialized(_gl: &GLProvider, count: usize) -> Self {\n\n Self::uninitialized_with_hint(_gl, BufferUsage::default(), count)\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn uninitialized_with_hint(_gl: &GLProvider, hint: BufferUsage, count: usize) -> Self {\n", "file_path": "src/buffer.rs", "rank": 97, "score": 15.419377482762783 }, { "content": " };\n\n drop(buf_binding);\n\n }\n\n }\n\n}\n\n\n\n\n\n// pub struct Tex1D<F:InternalFormat> {\n\n// raw: RawTex,\n\n// format: F,\n\n// dim: [usize;1]\n\n// }\n\n//\n\n//\n\n//\n\n// pub struct Tex2D<F:InternalFormat> {\n\n// raw: RawTex,\n\n// format: F,\n\n// dim: [usize;2]\n\n// }\n\n//\n\n// pub struct Tex3D<F:InternalFormat> {\n\n// raw: RawTex,\n\n// format: F,\n\n// dim: [usize;3]\n\n// }\n", "file_path": "src/texture/mod.rs", "rank": 98, "score": 15.39609604631436 }, { "content": "impl<T:GPUCopy + Sized, A:BufferAccess> Buffer<T,A> {\n\n //since we have a sized type, there no need for error checking\n\n //or resizing offsets when we copy\n\n #[inline] pub fn copy_to(&self, dest: &mut Self) { unsafe { self.copy_data(dest); } }\n\n #[inline] pub fn copy_from(&mut self, src: &Self) { src.copy_to(self) }\n\n}\n\n\n\n//\n\n//Clone works using the copy methods, and thus, we can only clone buffers that can be copied.\n\n//\n\n\n\nimpl<T:GPUCopy + ?Sized, A:BufferAccess> Clone for Buffer<T, A> {\n\n fn clone(&self) -> Self {\n\n unsafe {\n\n let mut buf = Self::allocate(self.data_size(), self.usage_hint());\n\n self.copy_data(&mut buf);\n\n buf\n\n }\n\n }\n\n}\n", "file_path": "src/buffer.rs", "rank": 99, "score": 15.188627445028775 } ]
Rust
src/lexer.rs
kimhyunkang/r5.rs
cac10a2d3c65e72bcbeac11169fb0eba94baa53e
use std::io::{IoError, IoErrorKind}; use std::string::CowString; use std::borrow::Cow; use std::fmt; use error::{ParserError, ParserErrorKind}; #[derive(PartialEq)] pub enum Token { OpenParen, CloseParen, Dot, Identifier(CowString<'static>), True, False, Character(String), Numeric(String), EOF } impl fmt::Show for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Token::OpenParen => write!(f, "OpenParen"), Token::CloseParen => write!(f, "CloseParen"), Token::Dot => write!(f, "Dot"), Token::Identifier(ref name) => write!(f, "Identifier({})", name), Token::True => write!(f, "#t"), Token::False => write!(f, "#f"), Token::Character(ref name) => write!(f, "#\\{}", name), Token::Numeric(ref rep) => rep.fmt(f), Token::EOF => write!(f, "EOF"), } } } pub struct TokenWrapper { pub line: usize, pub column: usize, pub token: Token } fn wrap(line: usize, column: usize, t: Token) -> TokenWrapper { TokenWrapper { line: line, column: column, token: t } } fn is_whitespace(c: char) -> bool { match c { '\t' | '\n' | '\x0b' | '\x0c' | '\r' | ' ' => true, _ => false } } fn is_initial(c: char) -> bool { match c { 'a'...'z' | 'A'...'Z' | '!' | '$' | '%' | '&' | '*' | '/' | ':' | '<' | '=' | '>' | '?' | '^' | '_' | '~' => true, _ => false } } fn is_subsequent(c: char) -> bool { if is_initial(c) { true } else { match c { '0'...'9' | '+' | '-' | '.' | '@' => true, _ => false } } } pub struct Lexer<'a> { line: usize, column: usize, stream: &'a mut (Buffer+'a), lookahead_buf: Option<char>, } impl <'a> Lexer<'a> { pub fn new<'r>(stream: &'r mut Buffer) -> Lexer<'r> { Lexer { line: 1, column: 1, stream: stream, lookahead_buf: None, } } pub fn lex_token(&mut self) -> Result<TokenWrapper, ParserError> { try!(self.consume_whitespace()); let line = self.line; let col = self.column; let c = match self.consume() { Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(wrap(line, col, Token::EOF)), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) }, Ok(c) => c }; let end_of_token = try!(self.is_end_of_token()); if is_initial(c) { let mut init = String::new(); init.push(c); self.lex_ident(init).map(|s| wrap(line, col, Token::Identifier(Cow::Owned(s)))) } else if c == '+' && end_of_token { Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("+")))) } else if c == '-' { if end_of_token { Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("-")))) } else { match self.lookahead() { Ok('>') => self.lex_ident("-".to_string()).map(|s| wrap(line, col, Token::Identifier(Cow::Owned(s)))), Ok(c) => Err(self.make_error(ParserErrorKind::InvalidCharacter(c))), Err(e) => match e.kind { IoErrorKind::EndOfFile => Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("-")))), _ => Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } } else if c == '(' { Ok(wrap(line, col, Token::OpenParen)) } else if c == ')' { Ok(wrap(line, col, Token::CloseParen)) } else if c == '.' && end_of_token { Ok(wrap(line, col, Token::Dot)) } else if c == '#' { let c0 = match self.consume() { Err(e) => return Err(match e.kind { IoErrorKind::EndOfFile => self.make_error(ParserErrorKind::UnexpectedEOF), _ => self.make_error(ParserErrorKind::UnderlyingError(e)) }), Ok(x) => x }; match c0 { 't' | 'T' => Ok(wrap(line, col, Token::True)), 'f' | 'F' => Ok(wrap(line, col, Token::False)), '\\' => self.lex_char().map(|s| wrap(line, col, Token::Character(s))), _ => Err(self.make_error(ParserErrorKind::InvalidCharacter(c))) } } else if c.is_numeric() { self.lex_numeric(c).map(|s| wrap(line, col, Token::Numeric(s))) } else { Err(self.make_error(ParserErrorKind::InvalidCharacter(c))) } } fn is_end_of_token(&mut self) -> Result<bool, ParserError> { match self.lookahead() { Ok(c) => Ok(is_whitespace(c)), Err(e) => match e.kind { IoErrorKind::EndOfFile => Ok(true), _ => Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } fn lex_ident(&mut self, initial: String) -> Result<String, ParserError> { let mut s = initial; let sub = try!(self.read_while(is_subsequent)); s.push_str(sub.as_slice()); return Ok(s); } fn lex_char(&mut self) -> Result<String, ParserError> { let c = match self.consume() { Ok(c) => c, Err(e) => return Err(self.make_error(match e.kind { IoErrorKind::EndOfFile => ParserErrorKind::UnexpectedEOF, _ => ParserErrorKind::UnderlyingError(e) })) }; let mut s = String::new(); s.push(c); let sub = try!(self.read_while(|c| c.is_alphanumeric())); s.push_str(sub.as_slice()); return Ok(s); } fn lex_numeric(&mut self, init: char) -> Result<String, ParserError> { let mut s = String::new(); s.push(init); let sub = try!(self.read_while(|c| c.is_numeric())); s.push_str(sub.as_slice()); return Ok(s); } fn make_error(&self, kind: ParserErrorKind) -> ParserError { ParserError { line: self.line, column: self.column, kind: kind } } fn lookahead(&mut self) -> Result<char, IoError> { Ok(match self.lookahead_buf { Some(c) => c, None => { let c = try!(self.stream.read_char()); self.lookahead_buf = Some(c); c } }) } fn advance(&mut self, c: char) { if c == '\n' { self.line += 1; self.column = 1; } else { self.column += 1; } } fn read_while<F>(&mut self, f: F) -> Result<String, ParserError> where F: Fn(char) -> bool { let mut s = match self.lookahead_buf { None => String::new(), Some(c) => if f(c) { self.lookahead_buf = None; self.advance(c); let mut s = String::new(); s.push(c); s } else { return Ok(String::new()); } }; loop { match self.stream.read_char() { Ok(c) => if f(c) { self.advance(c); s.push(c); } else { self.lookahead_buf = Some(c); return Ok(s); }, Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(s), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } } fn consume(&mut self) -> Result<char, IoError> { let c = match self.lookahead_buf { Some(c) => { self.lookahead_buf = None; c }, None => try!(self.stream.read_char()) }; self.advance(c); Ok(c) } fn consume_whitespace(&mut self) -> Result<bool, ParserError> { let mut consumed = false; loop { let whitespace = try!(self.read_while(is_whitespace)); consumed = consumed || whitespace.len() > 0; match self.lookahead() { Ok(';') => { consumed = true; try!(self.read_while(|c| c != '\n')); if self.lookahead_buf.is_some() { self.lookahead_buf = None } }, Ok(_) => return Ok(consumed), Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(consumed), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } } }
use std::io::{IoError, IoErrorKind}; use std::string::CowString; use std::borrow::Cow; use std::fmt; use error::{ParserError, ParserErrorKind}; #[derive(PartialEq)] pub enum Token { OpenParen, CloseParen, Dot, Identifier(CowString<'static>), True, False, Character(String), Numeric(String), EOF } impl fmt::Show for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Token::OpenParen => write!(f, "OpenParen"), Token::CloseParen => write!(f, "CloseParen"), Token::Dot => write!(f, "Dot"), Token::Identifier(ref name) => write!(f, "Identifier({})", name), Token::True => write!(f, "#t"), Token::False => write!(f, "#f"), Token::Character(ref name) => write!(f, "#\\{}", name), Token::Numeric(ref rep) => rep.fmt(f), Token::EOF => write!(f, "EOF"), } } } pub struct TokenWrapper { pub line: usize, pub column: usize, pub token: Token } fn wrap(line: usize, column: usize, t: Token) -> TokenWrapper { TokenWrapper { line: line, column: column, token: t } } fn is_whitespace(c: char) -> bool { match c { '\t' | '\n' | '\x0b' | '\x0c' | '\r' | ' ' => true, _ => false } } fn is_initial(c: char) -> bool { match c { 'a'...'z' | 'A'...'Z' | '!' | '$' | '%' | '&' | '*' | '/' | ':' | '<' | '=' | '>' | '?' | '^' | '_' | '~' => true, _ => false } } fn is_subsequent(c: char) -> bool { if is_initial(c) { true } else { match c { '0'...'9' | '+' | '-' | '.' | '@' => true, _ => false } } } pub struct Lexer<'a> { line: usize, column: usize, stream: &'a mut (Buffer+'a), lookahead_buf: Option<char>, } impl <'a> Lexer<'a> { pub fn new<'r>(stream: &'r mut Buffer) -> Lexer<'r> { Lexer { line: 1, column: 1, stream: stream, lookahead_buf: None, } } pub fn lex_token(&mut self) -> Result<TokenWrapper, ParserError> { try!(self.consume_whitespace()); let line = self.line; let col = self.column; let c = match self.consume() { Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(wrap(line, col, Token::EOF)), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) }, Ok(c) => c }; let end_of_token = try!(self.is_end_of_token()); if is_initial(c) { let mut init = String::new(); init.push(c); self.lex_ident(init).map(|s| wrap(line, col, Token::Identifier(Cow::Owned(s)))) } else if c == '+' && end_of_token { Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("+")))) } else if c == '-' { if end_of_token { Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("-")))) } else { match self.lookahead() { Ok('>') => self.lex_ident("-".to_string()).map(|s| wrap(line, col, Token::Identifier(Cow::Owned(s)))), Ok(c) => Err(self.make_error(ParserErrorKind::InvalidCharacter(c))), Err(e) => match e.kind { IoErrorKind::EndOfFile => Ok(wrap(line, col, Token::Identifier(Cow::Borrowed("-")))), _ => Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } } else if c == '(' { Ok(wrap(line, col, Token::OpenParen)) } else if c == ')' { Ok(wrap(line, col, Token::CloseParen)) } else if c == '.' && end_of_token { Ok(wrap(line, col, Token::Dot)) } else if c == '#' { let c0 = match self.consume() { Err(e) => return Err(match e.kind { IoErrorKind::EndOfFile => self.make_error(ParserErrorKind::UnexpectedEOF), _ => self.make_error(ParserErrorKind::UnderlyingError(e)) }), Ok(x) => x }; match c0 { 't' | 'T' => Ok(wrap(line, col, Token::True)), 'f' | 'F' => Ok(wrap(line, col, Token::False)), '\\' => self.lex_char().map(|s| wrap(line, col, Token::Character(s))), _ => Err(self.make_error(ParserErrorKind::InvalidCharacter(c))) } } else if c.is_numeric() { self.lex_numeric(c).map(|s| wrap(line, col, Token::Numeric(s))) } else { Err(self.make_error(ParserErrorKind::InvalidCharacter(c))) } } fn is_end_of_token(&mut self) -> Result<bool, ParserError> { match self.lookahead() { Ok(c) => Ok(is_whitespace(c)), Err(e) => match e.kind { IoErrorKind::EndOfFile => Ok(true), _ => Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } fn lex_ident(&mut self, initial: String) -> Result<String, ParserError> { let mut s = initial; let sub = try!(self.read_while(is_subsequent)); s.push_str(sub.as_slice()); return Ok(s); } fn lex_char(&mut self) -> Result<String, ParserError> { let c = match self.consume() { Ok(c) => c, Err(e) => return Err(self.make_error(match e.kind { IoErrorKind::EndOfFile => ParserErrorKind::UnexpectedEOF, _ => ParserErrorKind::UnderlyingError(e) })) }; let mut s = String::new(); s.push(c); let sub = try!(self.read_while(|c| c.is_alphanumeric())); s.push_str(sub.as_slice()); return Ok(s); } fn lex_numeric(&mut self, init: char) -> Result<String, ParserError> { let mut s = String::new(); s.push(init); let sub = try!(self.read_while(|c| c.is_numeric())); s.push_str(sub.as_slice()); return Ok(s); } fn make_error(&self, kind: ParserErrorKind) -> ParserError { ParserError { line: self.line, column: self.column, kind: kind } } fn lookahead(&mut self) -> Result<char, IoError> { Ok(match self.lookahead_buf { Some(c) => c, None => { let c = try!(self.stream.read_char()); self.lookahead_buf = Some(c); c } }) } fn advance(&mut self, c: char) { if c == '\n' { self.line += 1; self.column = 1; } else { self.column += 1; } } fn read_while<F>(&mut self, f: F) -> Result<String, ParserError> where F: Fn(char) -> bool { let mut s = match self.lookahead_buf { None => String::new(), Some(c) => if f(c) { self.lookahead_buf = None; self.advance(c); let mut s = String::new(); s.push(c); s } else { return Ok(String::new()); } }; loop {
} } fn consume(&mut self) -> Result<char, IoError> { let c = match self.lookahead_buf { Some(c) => { self.lookahead_buf = None; c }, None => try!(self.stream.read_char()) }; self.advance(c); Ok(c) } fn consume_whitespace(&mut self) -> Result<bool, ParserError> { let mut consumed = false; loop { let whitespace = try!(self.read_while(is_whitespace)); consumed = consumed || whitespace.len() > 0; match self.lookahead() { Ok(';') => { consumed = true; try!(self.read_while(|c| c != '\n')); if self.lookahead_buf.is_some() { self.lookahead_buf = None } }, Ok(_) => return Ok(consumed), Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(consumed), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } } } } }
match self.stream.read_char() { Ok(c) => if f(c) { self.advance(c); s.push(c); } else { self.lookahead_buf = Some(c); return Ok(s); }, Err(e) => match e.kind { IoErrorKind::EndOfFile => return Ok(s), _ => return Err(self.make_error(ParserErrorKind::UnderlyingError(e))) } }
if_condition
[ { "content": "fn format_char(c: char, f: &mut fmt::Formatter) -> fmt::Result {\n\n try!(write!(f, \"#\\\\\"));\n\n match c {\n\n '\\0' => write!(f, \"nul\"),\n\n '\\x08' => write!(f, \"backspace\"),\n\n '\\t' => write!(f, \"tab\"),\n\n '\\x0c' => write!(f, \"page\"),\n\n '\\r' => write!(f, \"return\"),\n\n ' ' => write!(f, \"space\"),\n\n '!'...'~' => write!(f, \"{}\", c),\n\n _ => write!(f, \"x{:x}\", c as usize)\n\n }\n\n}\n\n\n\nimpl<T: fmt::Show> fmt::Show for Datum<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Datum::Sym(ref s) => write!(f, \"{}\", s),\n\n Datum::Bool(true) => write!(f, \"#t\"),\n\n Datum::Bool(false) => write!(f, \"#f\"),\n", "file_path": "src/datum.rs", "rank": 4, "score": 104001.87495807279 }, { "content": "fn unexpected_token(tok: &TokenWrapper, expected: String) -> ParserError {\n\n ParserError {\n\n line: tok.line,\n\n column: tok.column,\n\n kind: ParserErrorKind::UnexpectedToken(format!(\"{:?}\", tok.token), expected)\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 5, "score": 87368.83892498747 }, { "content": "/// Lists all primitive functions with its name\n\npub fn libprimitive() -> Vec<(&'static str, Rc<fn(&[RDatum]) -> Result<RDatum, RuntimeError>>)> {\n\n vec![\n\n (\"+\", Rc::new(PRIM_ADD))\n\n ]\n\n}\n", "file_path": "src/primitive.rs", "rank": 6, "score": 70647.5909651529 }, { "content": "/// Compiles the global env from `base`\n\npub fn libbase() -> HashMap<CowString<'static>, EnvVar> {\n\n let mut lib = HashMap::new();\n\n for &(name, ref func) in libprimitive().iter() {\n\n lib.insert(Cow::Borrowed(name), EnvVar::PrimFunc(name, func.clone()));\n\n }\n\n lib.insert(Cow::Borrowed(\"lambda\"), EnvVar::Syntax(Syntax::Lambda));\n\n return lib;\n\n}\n", "file_path": "src/base.rs", "rank": 7, "score": 66397.83365840753 }, { "content": "fn invalid_token(tok: &TokenWrapper) -> ParserError {\n\n ParserError {\n\n line: tok.line,\n\n column: tok.column,\n\n kind: ParserErrorKind::InvalidToken(format!(\"{:?}\", tok.token))\n\n }\n\n}\n\n\n\nstatic CHAR_MAP: phf::Map<&'static str, char> = phf_map! {\n\n \"nul\" => '\\0',\n\n \"backspace\" => '\\x08',\n\n \"tab\" => '\\t',\n\n \"page\" => '\\x0c',\n\n \"return\" => '\\r',\n\n \"space\" => ' ',\n\n};\n\n\n", "file_path": "src/parser.rs", "rank": 8, "score": 64285.12599923823 }, { "content": "fn parse_char(ch: &str) -> Option<char> {\n\n match CHAR_MAP.get(ch) {\n\n Some(c) => return Some(*c),\n\n None => ()\n\n };\n\n\n\n if ch.chars().count() == 1 {\n\n Some(ch.char_at(0))\n\n } else if ch.starts_with(\"x\") {\n\n from_str_radix(&ch[1..], 16).and_then(|c| unicode::char::from_u32(c))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 9, "score": 63442.00119970827 }, { "content": "fn parse_numeric(rep: &str) -> Option<isize> {\n\n rep.parse()\n\n}\n\n\n\nimpl <'a> Parser<'a> {\n\n /// Create new parser from io::Buffer\n\n pub fn new<'r>(stream: &'r mut Buffer) -> Parser<'r> {\n\n Parser {\n\n lexer: Lexer::new(stream),\n\n token_buf: None\n\n }\n\n }\n\n\n\n /// Parse next datum\n\n pub fn parse_datum<T>(&mut self) -> Result<Datum<T>, ParserError> {\n\n let tok = try!(self.consume_token());\n\n match tok.token {\n\n Token::Identifier(ident) => Ok(Datum::Sym(ident)),\n\n Token::OpenParen => self.parse_list(),\n\n Token::True => Ok(Datum::Bool(true)),\n", "file_path": "src/parser.rs", "rank": 10, "score": 57093.42978722793 }, { "content": "fn write_cons<T: fmt::Show>(tail: &Datum<T>, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *tail {\n\n Datum::Nil => {\n\n write!(f, \")\")\n\n },\n\n Datum::Cons(ref ht, ref tt) => {\n\n try!(write!(f, \" {:?}\", ht.borrow()));\n\n write_cons(tt.borrow().deref(), f)\n\n },\n\n _ => {\n\n write!(f, \" . {:?})\", tail)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/datum.rs", "rank": 11, "score": 49285.69205797382 }, { "content": "fn add(args: &[RDatum]) -> Result<RDatum, RuntimeError> {\n\n let mut sum:isize = 0;\n\n for arg in args.iter() {\n\n let a = try!(DatumCast::unwrap(arg));\n\n sum += a;\n\n }\n\n return Ok(sum.wrap());\n\n}\n\n\n", "file_path": "src/primitive.rs", "rank": 12, "score": 46082.20212149631 }, { "content": "/// `cons` the values into a pair\n\npub fn cons<T>(head: Datum<T>, tail: Datum<T>) -> Datum<T> {\n\n Datum::Cons(Rc::new(RefCell::new(head)), Rc::new(RefCell::new(tail)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{Datum, cons};\n\n use std::borrow::Cow;\n\n use std::rc::Rc;\n\n use std::cell::RefCell;\n\n\n\n fn compare_fmt(s: &str, datum: Datum<()>) {\n\n assert_eq!(s.to_string(), format!(\"{:?}\", datum))\n\n }\n\n\n\n #[test]\n\n fn test_fmt() {\n\n compare_fmt(\"a\", sym!(\"a\"));\n\n compare_fmt(\"()\", list!());\n\n compare_fmt(\"(a)\", list!(sym!(\"a\")));\n", "file_path": "src/datum.rs", "rank": 13, "score": 43453.13911018076 }, { "content": "/// Types with implementing DatumCast trait can cast from/to Datum\n\npub trait DatumCast {\n\n /// Casts Datum into Self, possibly raising error\n\n fn unwrap(datum: &RDatum) -> Result<Self, RuntimeError>;\n\n /// Casts Self into Datum\n\n fn wrap(&self) -> RDatum;\n\n}\n\n\n\nimpl DatumCast for isize {\n\n fn unwrap(datum: &RDatum) -> Result<isize, RuntimeError> {\n\n match datum {\n\n &Datum::Num(n) => Ok(n),\n\n _ => Err(RuntimeError {\n\n kind: RuntimeErrorKind::InvalidType,\n\n desc: format!(\"expected Num, but received {:?}\", DatumType::get_type(datum))\n\n })\n\n }\n\n }\n\n\n\n fn wrap(&self) -> RDatum{\n\n Datum::Num(*self)\n", "file_path": "src/runtime.rs", "rank": 14, "score": 37226.38214475599 }, { "content": "#[test]\n\nfn lexical_scoping() {\n\n // (\\y f -> f 2) #f ((\\y -> (\\x -> y)) #t)\n\n // If it's dynamic scope, it should return 0\n\n // If it's static scope, it should return 1\n\n assert_evaluates_to!(\"((lambda (y f) (f 2)) #f ((lambda (y) (lambda (x) y)) #t))\", \"#t\")\n\n}\n", "file_path": "tests/eval_test.rs", "rank": 15, "score": 30740.611828867783 }, { "content": " Token::False => Ok(Datum::Bool(false)),\n\n Token::Character(ref ch) => match parse_char(ch.as_slice()) {\n\n Some(c) => Ok(Datum::Char(c)),\n\n None => Err(invalid_token(&tok))\n\n },\n\n Token::Numeric(ref rep) => match parse_numeric(rep.as_slice()) {\n\n Some(n) => Ok(Datum::Num(n)),\n\n None => Err(invalid_token(&tok))\n\n },\n\n _ => Err(unexpected_token(&tok, \"Datum or OpenParen\".to_string()))\n\n }\n\n }\n\n\n\n fn consume_token(&mut self) -> Result<TokenWrapper, ParserError> {\n\n let mut tok = None;\n\n mem::swap(&mut self.token_buf, &mut tok);\n\n match tok {\n\n Some(t) => Ok(t),\n\n None => self.lexer.lex_token()\n\n }\n", "file_path": "src/parser.rs", "rank": 30, "score": 24.397465628255134 }, { "content": "#[derive(Show, PartialEq)]\n\npub struct ParserError {\n\n pub line: usize,\n\n pub column: usize,\n\n pub kind: ParserErrorKind,\n\n}\n\n\n\nimpl Error for ParserError {\n\n fn description(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n fn detail(&self) -> Option<String> {\n\n None\n\n }\n\n\n\n fn cause(&self) -> Option<&Error> {\n\n match self.kind {\n\n ParserErrorKind::UnderlyingError(ref e) => Some(e as &Error),\n\n _ => None\n", "file_path": "src/error.rs", "rank": 31, "score": 20.286708501429935 }, { "content": "use std::io::IoError;\n\nuse std::error::{Error, FromError};\n\n\n\n/// Possible parser errors\n\n#[derive(Show, PartialEq)]\n\npub enum ParserErrorKind {\n\n /// Parser met EOF before parsing a proper datum\n\n UnexpectedEOF,\n\n /// Unexpected token: the first string describes expected token, and the second describes\n\n /// actual token\n\n UnexpectedToken(String, String),\n\n /// Lexer met character not allowed in source code\n\n InvalidCharacter(char),\n\n /// Parser met un-parseable token\n\n InvalidToken(String),\n\n /// Parser met IoError while reading the underlying stream\n\n UnderlyingError(IoError)\n\n}\n\n\n\n/// Parser error\n", "file_path": "src/error.rs", "rank": 32, "score": 16.703340671823465 }, { "content": " code.push_all(try!(self.compile_expr(static_scope, args, link_size, &d)).as_slice());\n\n arg_count += 1;\n\n },\n\n Err(()) => return Err(CompileError { kind: CompileErrorKind::DottedEval })\n\n }\n\n }\n\n\n\n if arg_count == 0 {\n\n Err(CompileError { kind: CompileErrorKind::NullEval })\n\n } else {\n\n code.push(Inst::Call(arg_count - 1));\n\n Ok(code)\n\n }\n\n }\n\n\n\n fn compile_expr(&mut self, static_scope: &[Vec<CowString<'static>>], args: &[CowString<'static>],\n\n link_size: &mut usize, datum: &RDatum)\n\n -> Result<Vec<Inst>, CompileError>\n\n {\n\n match datum {\n", "file_path": "src/compiler.rs", "rank": 33, "score": 16.18470736382224 }, { "content": " }\n\n\n\n fn lookahead_token<'t>(&'t mut self) -> Result<&'t TokenWrapper, ParserError> {\n\n if self.token_buf.is_none() {\n\n self.token_buf = Some(try!(self.lexer.lex_token()));\n\n }\n\n\n\n Ok(self.token_buf.as_ref().unwrap())\n\n }\n\n\n\n fn consume_if(&mut self, tok: &Token) -> Result<bool, ParserError> {\n\n let res = self.lookahead_token().map(|t| t.token == *tok);\n\n if res == Ok(true) {\n\n self.token_buf = None;\n\n }\n\n res\n\n }\n\n\n\n fn expect(&mut self, tok: &Token) -> Result<(), ParserError> {\n\n let t = try!(self.consume_token());\n", "file_path": "src/parser.rs", "rank": 34, "score": 15.850022471931192 }, { "content": "use std::mem;\n\nuse std::num::from_str_radix;\n\n\n\nuse phf;\n\nuse unicode;\n\n\n\nuse datum::{Datum, cons};\n\nuse lexer::{Token, TokenWrapper, Lexer};\n\nuse error::{ParserError, ParserErrorKind};\n\n\n\n/// Parser parses character stream into a Datum\n\npub struct Parser<'a> {\n\n lexer: Lexer<'a>,\n\n token_buf: Option<TokenWrapper>\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 35, "score": 15.187075112867438 }, { "content": " }\n\n }\n\n\n\n /// Compiles the datum into a bytecode evaluates it\n\n pub fn compile(&mut self, datum: &RDatum) -> Result<Vec<Inst>, CompileError> {\n\n let mut link_size = 0;\n\n let mut code = try!(self.compile_expr(&[], &[], &mut link_size, datum));\n\n code.push(Inst::Return);\n\n return Ok(code);\n\n }\n\n\n\n fn compile_call(&mut self, static_scope: &[Vec<CowString<'static>>], args: &[CowString<'static>],\n\n link_size: &mut usize, datum: &RDatum)\n\n -> Result<Vec<Inst>, CompileError>\n\n {\n\n let mut code = Vec::new();\n\n let mut arg_count = 0;\n\n for d in datum.iter() {\n\n match d {\n\n Ok(d) => {\n", "file_path": "src/compiler.rs", "rank": 36, "score": 14.77145095275476 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl FromError<IoError> for ParserError {\n\n fn from_error(err: IoError) -> ParserError {\n\n ParserError {\n\n line: 0,\n\n column: 0,\n\n kind: ParserErrorKind::UnderlyingError(err)\n\n }\n\n }\n\n}\n\n\n\n/// Possible compiler errors\n\n#[derive(Show, PartialEq, Copy)]\n\npub enum CompileErrorKind {\n\n /// The syntax is not implemented yet\n\n NotImplemented,\n\n /// Trying to evaluate `()`\n", "file_path": "src/error.rs", "rank": 37, "score": 12.629126655039869 }, { "content": " *link_size = i+1;\n\n }\n\n return Ok(vec![Inst::PushArg(MemRef::UpValue(i, j))]);\n\n }\n\n }\n\n }\n\n\n\n match self.global_env.get(sym) {\n\n Some(data) => match data {\n\n &EnvVar::Syntax(_) =>\n\n Err(CompileError { kind: CompileErrorKind::SyntaxReference }),\n\n &EnvVar::PrimFunc(ref name, ref func) =>\n\n Ok(vec![Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(name.clone(), func.clone()))))]),\n\n &EnvVar::Closure(ref closure) =>\n\n Ok(vec![Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::Closure(closure.clone()))))])\n\n },\n\n None => \n\n Err(CompileError { kind: CompileErrorKind::UnboundVariable })\n\n }\n\n },\n", "file_path": "src/compiler.rs", "rank": 38, "score": 11.688353696382183 }, { "content": "/// If it's not a list, returns Err(()) when the iterator meets non-null cdr\n\npub struct DatumIter<T> {\n\n ptr: Datum<T>\n\n}\n\n\n\nimpl<T: Clone> Iterator for DatumIter<T> {\n\n type Item = Result<Datum<T>, ()>;\n\n\n\n fn next(&mut self) -> Option<Result<Datum<T>, ()>> {\n\n let (val, next) = match self.ptr {\n\n Datum::Nil => return None,\n\n Datum::Cons(ref h, ref t) => (h.borrow().deref().clone(), t.borrow().deref().clone()),\n\n _ => return Some(Err(()))\n\n };\n\n\n\n self.ptr = next;\n\n\n\n Some(Ok(val))\n\n }\n\n}\n", "file_path": "src/datum.rs", "rank": 39, "score": 11.547198877253226 }, { "content": " // The lexical environment directly enclosing the code\n\n static_link: Option<StaticLink>\n\n}\n\n\n\n/// Type representation of RDatum\n\n#[derive(Show, Copy)]\n\npub enum DatumType {\n\n Sym,\n\n Bool,\n\n Char,\n\n Num,\n\n List,\n\n Callable,\n\n Syntax\n\n}\n\n\n\nimpl DatumType {\n\n /// Get the type of datum\n\n fn get_type(datum: &RDatum) -> DatumType {\n\n match datum {\n", "file_path": "src/runtime.rs", "rank": 40, "score": 11.467039036352006 }, { "content": " };\n\n\n\n let mut new_link_size = 0;\n\n let mut code = Vec::new();\n\n for expr in body.borrow().iter() {\n\n if let Ok(e) = expr {\n\n let piece = try!(self.compile_expr(\n\n new_stackenv.as_slice(),\n\n new_args.as_slice(),\n\n &mut new_link_size,\n\n &e));\n\n code.push_all(piece.as_slice());\n\n } else {\n\n return Err(CompileError { kind: CompileErrorKind::DottedBody });\n\n }\n\n }\n\n\n\n code.push(Inst::Return);\n\n\n\n return Ok(vec![Inst::PushArg(MemRef::Closure(Rc::new(code), new_link_size))]);\n", "file_path": "src/compiler.rs", "rank": 41, "score": 11.354110621950735 }, { "content": " if t.token == *tok {\n\n Ok(())\n\n } else {\n\n Err(unexpected_token(&t, format!(\"{:?}\", tok)))\n\n }\n\n }\n\n\n\n fn parse_list<T>(&mut self) -> Result<Datum<T>, ParserError> {\n\n if try!(self.consume_if(&Token::CloseParen)) {\n\n return Ok(Datum::Nil);\n\n }\n\n\n\n let head = try!(self.parse_datum());\n\n\n\n if try!(self.consume_if(&Token::Dot)) {\n\n let tail = try!(self.parse_datum());\n\n try!(self.expect(&Token::CloseParen));\n\n Ok(cons(head, tail))\n\n } else {\n\n let tail = try!(self.parse_list());\n", "file_path": "src/parser.rs", "rank": 42, "score": 11.149937654245935 }, { "content": "use std::string::CowString;\n\nuse std::fmt;\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\nuse std::ops::Deref;\n\n\n\nuse error::{CompileError, CompileErrorKind, RuntimeError};\n\nuse datum::Datum;\n\nuse runtime::{Inst, MemRef, RDatum, RuntimeData, Closure};\n\n\n\n/// Syntax variables\n\n#[derive(Copy, Clone, PartialEq)]\n\npub enum Syntax {\n\n /// `lambda` syntax\n\n Lambda\n\n}\n\n\n\n/// Environment variables in the global environment\n\npub enum EnvVar {\n\n /// Syntax variables\n", "file_path": "src/compiler.rs", "rank": 43, "score": 10.51302232224688 }, { "content": " }\n\n}\n\n\n\nimpl PartialEq for RuntimeData {\n\n fn eq(&self, other: &RuntimeData) -> bool {\n\n match self {\n\n &RuntimeData::PrimFunc(ref n0, _) =>\n\n if let &RuntimeData::PrimFunc(ref n1, _) = other {\n\n *n0 == *n1\n\n } else {\n\n false\n\n },\n\n &RuntimeData::Closure(ref c0) =>\n\n if let &RuntimeData::Closure(ref c1) = other {\n\n *c0 == *c1\n\n } else {\n\n false\n\n },\n\n }\n\n }\n\n}\n\n\n\n/// RDatum contains RuntimeData in addition to normal Datum\n\npub type RDatum = Datum<RuntimeData>;\n\n\n\n/// Types with implementing DatumCast trait can cast from/to Datum\n", "file_path": "src/runtime.rs", "rank": 44, "score": 10.44286226504827 }, { "content": "use std::string::CowString;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::iter::FromIterator;\n\nuse std::ops::Deref;\n\nuse std::fmt;\n\n\n\n/// Datum is the primary data type of Scheme\n\n/// Datum is a generic type here to make parser somewhat independent from runtime\n\n/// Ext can hold runtime data not representable in datum syntax, such as primitive function or I/O\n\n/// ports\n\n#[derive(PartialEq, Clone)]\n\npub enum Datum<T> {\n\n /// Symbol\n\n Sym(CowString<'static>),\n\n /// Boolean\n\n Bool(bool),\n\n /// Character\n\n Char(char),\n\n /// Numeric value\n\n Num(isize),\n\n /// `()`\n\n Nil,\n\n /// Pair\n\n Cons(Rc<RefCell<Datum<T>>>, Rc<RefCell<Datum<T>>>),\n\n /// Extra values\n\n Ext(T)\n\n}\n\n\n", "file_path": "src/datum.rs", "rank": 45, "score": 10.314583321651659 }, { "content": "\n\n/// Errors raised in runtime\n\n#[derive(Show, PartialEq, Copy, Clone)]\n\npub enum RuntimeErrorKind {\n\n /// Number of arguments did not match\n\n NumArgs,\n\n /// Argument type did not match \n\n InvalidType\n\n}\n\n\n\n/// Errors raised in runtime\n\n#[derive(Show, PartialEq, Clone)]\n\npub struct RuntimeError {\n\n pub kind: RuntimeErrorKind,\n\n pub desc: String\n\n}\n", "file_path": "src/error.rs", "rank": 46, "score": 10.074361420682084 }, { "content": " &Datum::Sym(_) => DatumType::Sym,\n\n &Datum::Bool(_) => DatumType::Bool,\n\n &Datum::Char(_) => DatumType::Char,\n\n &Datum::Num(_) => DatumType::Num,\n\n &Datum::Nil => DatumType::List,\n\n &Datum::Cons(_, _) => DatumType::List,\n\n &Datum::Ext(RuntimeData::PrimFunc(_, _)) => DatumType::Callable,\n\n &Datum::Ext(RuntimeData::Closure(_)) => DatumType::Callable\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Show for RuntimeData {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n &RuntimeData::PrimFunc(name, _) =>\n\n write!(f, \"<primitive: {:?}>\", name),\n\n &RuntimeData::Closure(ref closure) =>\n\n write!(f, \"<procedure {:?}: {:?}>\", closure.static_link, closure.code)\n\n }\n", "file_path": "src/runtime.rs", "rank": 47, "score": 9.718785630935647 }, { "content": "extern crate r6;\n\n\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\nuse std::io::BufReader;\n\nuse std::borrow::Cow;\n\nuse r6::runtime::Runtime;\n\nuse r6::compiler::{Compiler, EnvVar, Syntax};\n\nuse r6::primitive::PRIM_ADD;\n\nuse r6::parser::Parser;\n\n\n\nmacro_rules! assert_evaluates_to {\n\n ($src:expr, $expected:expr) => (\n\n {\n\n let mut src_reader = BufReader::new($src.as_bytes());\n\n let mut src_parser = Parser::new(&mut src_reader);\n\n let sourcecode = match src_parser.parse_datum() {\n\n Ok(code) => code,\n\n Err(e) => panic!(\"failed to parse source: {:?}\", e)\n\n };\n", "file_path": "tests/eval_test.rs", "rank": 48, "score": 9.187441669298641 }, { "content": " }\n\n}\n\n\n\n/// Pointer referring to memory locations in the VM\n\n#[derive(Clone, Show, PartialEq)]\n\npub enum MemRef {\n\n RetVal,\n\n Arg(usize),\n\n UpValue(usize, usize),\n\n Const(RDatum),\n\n Closure(Rc<Vec<Inst>>, usize),\n\n}\n\n\n\n/// The instruction of the bytecode\n\n#[derive(Clone, Show, PartialEq)]\n\npub enum Inst {\n\n PushArg(MemRef),\n\n Call(usize),\n\n DropArg(usize),\n\n Return\n", "file_path": "src/runtime.rs", "rank": 49, "score": 9.166859469042855 }, { "content": " }\n\n\n\n fn up_scope(&self, link: Option<StaticLink>) -> Option<StaticLink> {\n\n match link {\n\n None => None,\n\n Some(link) => match *link.borrow() {\n\n ScopePtr::Heap(ref data) => data.static_link.clone(),\n\n ScopePtr::Stack(n) => if n == self.call_stack.len() {\n\n self.frame.closure.static_link.clone()\n\n } else {\n\n self.call_stack[n].closure.static_link.clone()\n\n },\n\n }\n\n }\n\n }\n\n\n\n fn get_upvalue(&self, link_cnt: usize, arg_idx: usize) -> RDatum {\n\n let mut link = self.frame.closure.static_link.clone();\n\n for _ in range(0, link_cnt) {\n\n link = self.up_scope(link);\n", "file_path": "src/runtime.rs", "rank": 50, "score": 9.121548703804674 }, { "content": "\n\n self.call_stack.push(new_frame);\n\n mem::swap(&mut self.frame, self.call_stack.last_mut().unwrap());\n\n }\n\n\n\n pub fn push_stack(&mut self, val: RDatum) {\n\n self.arg_stack.push(val)\n\n }\n\n\n\n pub fn pop_stack(&mut self) -> Option<RDatum> {\n\n self.arg_stack.pop()\n\n }\n\n\n\n fn step(&mut self) -> bool {\n\n debug!(\"STEP\");\n\n let value = self.fetch();\n\n for (i, frame) in self.call_stack.iter().enumerate() {\n\n debug!(\"call_stack[{:?}]: {:?}\", i, frame);\n\n }\n\n debug!(\"frame: {:?}\", self.frame);\n", "file_path": "src/runtime.rs", "rank": 51, "score": 8.998294273905321 }, { "content": " res\n\n }\n\n }\n\n }\n\n\n\n pub fn run(&mut self) -> RDatum {\n\n while self.step() {\n\n ()\n\n }\n\n\n\n return self.arg_stack.pop().unwrap()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::rc::Rc;\n\n use super::{Inst, MemRef, Runtime, RuntimeData};\n\n use datum::Datum;\n\n use primitive::PRIM_ADD;\n", "file_path": "src/runtime.rs", "rank": 52, "score": 8.904479169379975 }, { "content": "use std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::mem;\n\nuse std::fmt;\n\n\n\nuse error::{RuntimeErrorKind, RuntimeError};\n\nuse datum::Datum;\n\n\n\n/// RuntimeData contains runtime values not representable in standard syntax\n\n#[derive(Clone)]\n\npub enum RuntimeData {\n\n PrimFunc(&'static str, Rc<fn(&[RDatum]) -> Result<RDatum, RuntimeError>>),\n\n Closure(Closure)\n\n}\n\n\n\n/// Compiled closure object \n\n#[derive(Show, Clone, PartialEq)]\n\npub struct Closure {\n\n // Pointer to the bytecode\n\n code: Rc<Vec<Inst>>,\n", "file_path": "src/runtime.rs", "rank": 53, "score": 8.71196435443413 }, { "content": "use std::rc::Rc;\n\n\n\nuse error::RuntimeError;\n\nuse runtime::{DatumCast, RDatum};\n\n\n\n/// (+ n0 n1 ...)\n\npub static PRIM_ADD:fn(&[RDatum]) -> Result<RDatum, RuntimeError> = add;\n\n\n", "file_path": "src/primitive.rs", "rank": 54, "score": 8.33656838598815 }, { "content": " _ => Ok(vec![Inst::PushArg(MemRef::Const(datum.clone()))])\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Show for Syntax {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"lambda\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::borrow::Cow;\n\n use std::rc::Rc;\n\n use datum::Datum;\n\n use runtime::{Inst, MemRef, RuntimeData};\n\n use base::libbase;\n\n use primitive::PRIM_ADD;\n\n use super::Compiler;\n", "file_path": "src/compiler.rs", "rank": 55, "score": 7.981339330565126 }, { "content": " Syntax(Syntax),\n\n\n\n /// Primitive functions\n\n PrimFunc(&'static str, Rc<fn(&[RDatum]) -> Result<RDatum, RuntimeError>>),\n\n\n\n /// Compiled library functions\n\n Closure(Closure)\n\n}\n\n\n\n/// Compiler compiles Datum into a bytecode evaluates it\n\npub struct Compiler<'g> {\n\n /// Global environment\n\n global_env: &'g HashMap<CowString<'static>, EnvVar>\n\n}\n\n\n\nimpl<'g> Compiler<'g> {\n\n /// Creates a new compiler with given environment\n\n pub fn new<'a>(global_env: &'a HashMap<CowString<'static>, EnvVar>) -> Compiler<'a> {\n\n Compiler {\n\n global_env: global_env\n", "file_path": "src/compiler.rs", "rank": 56, "score": 7.898452121708276 }, { "content": "\n\n let mut res_reader = BufReader::new($expected.as_bytes());\n\n let mut res_parser = Parser::new(&mut res_reader);\n\n let expected = match res_parser.parse_datum() {\n\n Ok(val) => val,\n\n Err(e) => panic!(\"failed to parse result: {:?}\", e)\n\n };\n\n\n\n let mut glob = HashMap::new();\n\n glob.insert(Cow::Borrowed(\"lambda\"), EnvVar::Syntax(Syntax::Lambda));\n\n glob.insert(Cow::Borrowed(\"+\"), EnvVar::PrimFunc(\"+\", Rc::new(PRIM_ADD)));\n\n let mut compiler = Compiler::new(&glob);\n\n let bytecode = match compiler.compile(&sourcecode) {\n\n Ok(code) => code,\n\n Err(e) => panic!(\"compile failure: {:?}\", e)\n\n };\n\n let mut runtime = Runtime::new(bytecode);\n\n let result = runtime.run();\n\n if !((result == expected) && (expected == result)) {\n\n panic!(\"test failed: expected `{:?}` but got `{:?}`\", expected, result);\n\n }\n\n }\n\n )\n\n}\n\n\n\n#[test]\n", "file_path": "tests/eval_test.rs", "rank": 57, "score": 7.871416918603131 }, { "content": " Ok(cons(head, tail))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::io::BufReader;\n\n use std::borrow::Cow;\n\n use super::Parser;\n\n use super::super::datum::{Datum, cons};\n\n\n\n fn test_parse(source: &str, result: Datum<()>) {\n\n let mut reader = BufReader::new(source.as_bytes());\n\n let mut parser = Parser::new(&mut reader);\n\n\n\n assert_eq!(parser.parse_datum(), Ok(result))\n\n }\n\n\n\n #[test]\n", "file_path": "src/parser.rs", "rank": 58, "score": 7.681758320801971 }, { "content": " },\n\n Inst::DropArg(n) => {\n\n for _ in range(0, n) {\n\n self.pop_stack();\n\n }\n\n true\n\n },\n\n Inst::Return => {\n\n let n = self.frame.arg_size;\n\n let top = self.arg_stack.len();\n\n let res = self.pop_call_stack();\n\n let retval = match self.arg_stack.pop() {\n\n Some(val) => val,\n\n None => panic!(\"arg_stack empty!\")\n\n };\n\n self.arg_stack.truncate(top - n - 2);\n\n self.push_stack(retval);\n\n if res {\n\n self.frame.pc += 1;\n\n }\n", "file_path": "src/runtime.rs", "rank": 59, "score": 7.1335587114422845 }, { "content": " for (i, val) in self.arg_stack.iter().enumerate() {\n\n debug!(\"stack[{:?}]: {:?}\", i, val);\n\n }\n\n\n\n debug!(\"fetch: {:?}\", value);\n\n match value {\n\n Inst::Call(n) => {\n\n let top = self.arg_stack.len();\n\n let datum = self.arg_stack[top - n - 1].clone();\n\n match datum {\n\n Datum::Ext(RuntimeData::PrimFunc(_, f)) => {\n\n let dummy_closure = Closure {\n\n code: Rc::new(Vec::new()),\n\n static_link: None\n\n };\n\n self.push_call_stack(n, dummy_closure);\n\n let res = match (*f)(&self.arg_stack[top - n ..]) {\n\n Ok(x) => x,\n\n Err(e) => panic!(e)\n\n };\n", "file_path": "src/runtime.rs", "rank": 60, "score": 7.113712366668848 }, { "content": " NullEval,\n\n /// Trying to evaluate non-proper list, such as `(a b c . d)`\n\n DottedEval,\n\n /// Expression body is non-proper list, such as `(a b c . d)`\n\n DottedBody,\n\n /// Invalid lambda syntax\n\n BadLambdaSyntax,\n\n /// Trying to apply non-function constant\n\n NotCallable,\n\n /// Trying to refer a syntax variable\n\n SyntaxReference,\n\n /// Trying to refer an unbound variable\n\n UnboundVariable\n\n}\n\n\n\n/// Compiler error\n\n#[derive(Show, PartialEq, Copy)]\n\npub struct CompileError {\n\n pub kind: CompileErrorKind\n\n}\n", "file_path": "src/error.rs", "rank": 61, "score": 6.779444986848008 }, { "content": " &Datum::Cons(ref h, ref t) =>\n\n if let &Datum::Sym(ref n) = h.borrow().deref() {\n\n if let Some(&EnvVar::Syntax(Syntax::Lambda)) = self.global_env.get(n) {\n\n if let &Datum::Cons(ref cur_args, ref body) = t.borrow().deref() {\n\n let new_stackenv = {\n\n let mut nenv = static_scope.to_vec();\n\n nenv.push(args.to_vec());\n\n nenv\n\n };\n\n\n\n let new_args = {\n\n let mut nargs = Vec::new();\n\n for arg in cur_args.borrow().iter() {\n\n if let Ok(Datum::Sym(s)) = arg {\n\n nargs.push(s)\n\n } else {\n\n return Err(CompileError { kind: CompileErrorKind::BadLambdaSyntax });\n\n }\n\n }\n\n nargs\n", "file_path": "src/compiler.rs", "rank": 62, "score": 6.774688182403451 }, { "content": "use std::collections::HashMap;\n\nuse std::string::CowString;\n\nuse std::borrow::Cow;\n\n\n\nuse compiler::{EnvVar, Syntax};\n\nuse primitive::libprimitive;\n\n\n\n/// Compiles the global env from `base`\n", "file_path": "src/base.rs", "rank": 63, "score": 6.407230440258049 }, { "content": " } else {\n\n return Err(CompileError { kind: CompileErrorKind::BadLambdaSyntax })\n\n }\n\n } else {\n\n self.compile_call(static_scope, args, link_size, datum)\n\n }\n\n } else {\n\n self.compile_call(static_scope, args, link_size, datum)\n\n },\n\n &Datum::Nil => Err(CompileError { kind: CompileErrorKind::NullEval }),\n\n &Datum::Sym(ref sym) => {\n\n if let Some(i) = range(0, args.len()).find(|&i| args[i] == *sym) {\n\n return Ok(vec![Inst::PushArg(MemRef::Arg(i))])\n\n }\n\n\n\n // (0, static_scope[-1]), (1, static_scope[-2]), (2, static_scope[-3]), ...\n\n for (i, up_args) in static_scope.iter().rev().enumerate() {\n\n for (j, arg) in up_args.iter().enumerate() {\n\n if *arg == *sym {\n\n if *link_size < i+1 {\n", "file_path": "src/compiler.rs", "rank": 64, "score": 5.894914177655728 }, { "content": " Runtime {\n\n ret_val: Datum::Nil,\n\n arg_stack: Vec::new(),\n\n call_stack: Vec::new(),\n\n frame: StackFrame {\n\n closure: Closure { code: Rc::new(code), static_link: None},\n\n pc: 0,\n\n stack_bottom: 0,\n\n arg_size: 0,\n\n self_link: Rc::new(RefCell::new(ScopePtr::Stack(0)))\n\n }\n\n }\n\n }\n\n\n\n fn fetch(&self) -> Inst {\n\n self.frame.closure.code[self.frame.pc].clone()\n\n }\n\n\n\n pub fn get_stack_val(&self, idx: usize) -> RDatum {\n\n self.arg_stack[self.frame.stack_bottom + idx].clone()\n", "file_path": "src/runtime.rs", "rank": 65, "score": 5.872438974396113 }, { "content": " Datum::Char(c) => format_char(c, f),\n\n Datum::Num(n) => n.fmt(f),\n\n Datum::Ext(ref x) => x.fmt(f),\n\n Datum::Nil => write!(f, \"()\"),\n\n Datum::Cons(ref h, ref t) => {\n\n try!(write!(f, \"({:?}\", h.borrow()));\n\n write_cons(t.borrow().deref(), f)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Clone> Datum<T> {\n\n /// Iterate the values if it's a proper list\n\n pub fn iter(&self) -> DatumIter<T> {\n\n DatumIter { ptr: self.clone() }\n\n }\n\n}\n\n\n\n/// If the datum is a proper list, iterate the values in the list.\n", "file_path": "src/datum.rs", "rank": 66, "score": 5.733069589899722 }, { "content": " }\n\n match link {\n\n None => panic!(\"get_upvalue({:?}, {:?}) failed!\", link_cnt, arg_idx),\n\n Some(link) => match *link.borrow() {\n\n ScopePtr::Heap(ref data) => data.args[arg_idx].clone(),\n\n ScopePtr::Stack(n) => {\n\n let frame_ref = if n == self.call_stack.len() {\n\n &self.frame\n\n } else {\n\n &self.call_stack[n]\n\n };\n\n let bot = frame_ref.stack_bottom;\n\n self.arg_stack[bot + arg_idx].clone()\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn fetch_mem(&self, ptr: MemRef) -> RDatum {\n\n match ptr {\n", "file_path": "src/runtime.rs", "rank": 67, "score": 5.6687509432835315 }, { "content": " MemRef::RetVal => self.ret_val.clone(),\n\n MemRef::Arg(idx) => self.get_stack_val(idx),\n\n MemRef::UpValue(i, j) => self.get_upvalue(i, j),\n\n MemRef::Const(val) => val.clone(),\n\n MemRef::Closure(code, _) => Datum::Ext(RuntimeData::Closure(\n\n Closure {\n\n code: code.clone(),\n\n static_link: Some(self.frame.self_link.clone())\n\n }\n\n ))\n\n }\n\n }\n\n\n\n fn pop_call_stack(&mut self) -> bool {\n\n match self.call_stack.pop() {\n\n None => false,\n\n Some(f) => {\n\n let bottom = self.frame.stack_bottom;\n\n let top = bottom + self.frame.arg_size;\n\n let heap = HeapClosure {\n", "file_path": "src/runtime.rs", "rank": 68, "score": 5.488866375815336 }, { "content": "}\n\n\n\n/// Shared link to the ScopePtr\n\npub type StaticLink = Rc<RefCell<ScopePtr>>;\n\n\n\n/// StackFrame represents frame in the main stack\n\n#[derive(Show)]\n\npub struct StackFrame {\n\n // Current running code\n\n closure: Closure,\n\n\n\n // Program counter\n\n pc: usize,\n\n\n\n // Bottom of the current frame\n\n stack_bottom: usize,\n\n\n\n // Number of function arguments of the current frame\n\n arg_size: usize,\n\n\n", "file_path": "src/runtime.rs", "rank": 69, "score": 5.2187946726117795 }, { "content": "}\n\n\n\n/// Error values returned from parser, compiler or runtime\n\npub mod error;\n\n/// Basic datum types\n\npub mod datum;\n\npub mod parser;\n\npub mod lexer;\n\n/// Virtual machine running the bytecode\n\npub mod runtime;\n\n/// Primitive functions\n\npub mod primitive;\n\n/// Compiles datum into a bytecode\n\npub mod compiler;\n\n/// R6RS `base` library\n\npub mod base;\n", "file_path": "src/lib.rs", "rank": 70, "score": 5.197706410587966 }, { "content": " // Pointer link to this frame. When this frame is out of scope, other closures enclosed by\n\n // this scope loses reference to upvalues. To prevent such situation, when the frame is out of\n\n // scope, the VM copies this frame into a newly allocated heap memory. However, doing that\n\n // requires searching entire stack and heap memory looking for the pointers pointing to this\n\n // frame. To avoid that, VM just changes self_link pointing to ClosureHeap when the frame goes\n\n // out of scope.\n\n self_link: StaticLink\n\n}\n\n\n\n/// The virtual machine running the bytecode\n\npub struct Runtime {\n\n ret_val: RDatum,\n\n arg_stack: Vec<RDatum>,\n\n call_stack: Vec<StackFrame>,\n\n frame: StackFrame\n\n}\n\n\n\nimpl Runtime {\n\n /// Create the new virtual machine with given code\n\n pub fn new(code: Vec<Inst>) -> Runtime {\n", "file_path": "src/runtime.rs", "rank": 71, "score": 4.965843816884398 }, { "content": " args: self.arg_stack[bottom .. top].to_vec(),\n\n static_link: self.frame.closure.static_link.clone()\n\n };\n\n *self.frame.self_link.borrow_mut() = ScopePtr::Heap(heap);\n\n self.frame = f;\n\n true\n\n }\n\n }\n\n }\n\n\n\n fn push_call_stack(&mut self, arg_size: usize, closure: Closure) {\n\n let idx = self.call_stack.len();\n\n let stack_bottom = self.arg_stack.len() - arg_size; \n\n let new_frame = StackFrame {\n\n closure: closure,\n\n pc: 0,\n\n stack_bottom: stack_bottom,\n\n arg_size: arg_size,\n\n self_link: Rc::new(RefCell::new(ScopePtr::Stack(idx+1)))\n\n };\n", "file_path": "src/runtime.rs", "rank": 72, "score": 4.789749849799067 }, { "content": "}\n\n\n\n/// When the enclosing lexical env goes out of scope of the closure, the env is copied into heap\n\n/// memory. HeapClosure represents the env in heap memory\n\n#[derive(Show, PartialEq)]\n\npub struct HeapClosure {\n\n args: Vec<RDatum>,\n\n static_link: Option<StaticLink>\n\n}\n\n\n\n/// ScopePtr points to the directly enclosing lexical env of the frame. It might be live in stack,\n\n/// or residing in heap\n\n#[derive(Show, PartialEq)]\n\npub enum ScopePtr {\n\n // Stack(n) refers to the n-th element of the main call stack\n\n // if n == runtime.call_stack.len(), this refers to the runtime.frame\n\n Stack(usize),\n\n\n\n // refers to the heap environment\n\n Heap(HeapClosure)\n", "file_path": "src/runtime.rs", "rank": 73, "score": 4.638653929348223 }, { "content": "\n\n #[test]\n\n fn test_simple_expr() {\n\n let env = libbase();\n\n let mut compiler = Compiler::new(&env);\n\n let expected = Ok(vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Return\n\n ]);\n\n let code = compiler.compile(&list![sym!(\"+\"), num!(1), num!(2)]);\n\n assert_eq!(expected, code);\n\n }\n\n\n\n #[test]\n\n fn test_nested_expr() {\n\n let env = libbase();\n\n let mut compiler = Compiler::new(&env);\n", "file_path": "src/compiler.rs", "rank": 74, "score": 4.520685185973383 }, { "content": " fn test_upvalue() {\n\n let env = libbase();\n\n let mut compiler = Compiler::new(&env);\n\n\n\n let f = vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::UpValue(0, 0)),\n\n Inst::PushArg(MemRef::Arg(0)),\n\n Inst::Call(2),\n\n Inst::Return\n\n ];\n\n let g = vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(f), 1)),\n\n Inst::Return\n\n ];\n\n let expected = Ok(vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(g), 0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(1),\n\n Inst::PushArg(MemRef::Const(Datum::Num(3))),\n", "file_path": "src/compiler.rs", "rank": 75, "score": 4.282056520360834 }, { "content": "\n\nimpl<T> FromIterator<Datum<T>> for Datum<T> {\n\n fn from_iter<Iter: Iterator<Item=Datum<T>> >(iterator: Iter) -> Datum<T> {\n\n let list:Vec<Datum<T>> = FromIterator::from_iter(iterator);\n\n let mut res = Datum::Nil;\n\n for d in list.into_iter().rev() {\n\n res = cons(d, res);\n\n }\n\n return res;\n\n }\n\n}\n\n\n", "file_path": "src/datum.rs", "rank": 76, "score": 3.725755409848386 }, { "content": " ];\n\n let g = vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(f), 1)),\n\n Inst::Return\n\n ];\n\n\n\n // ((\n\n // (lambda (x) # = g\n\n // (lambda (y) (+ x y)) # = f\n\n // ) 2) 3)\n\n let code = vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(g), 0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(1),\n\n Inst::PushArg(MemRef::Const(Datum::Num(3))),\n\n Inst::Call(1),\n\n Inst::Return\n\n ];\n\n\n\n let mut runtime = Runtime::new(code);\n\n assert_eq!(runtime.run(), Datum::Num(5));\n\n }\n\n}\n", "file_path": "src/runtime.rs", "rank": 77, "score": 3.71969793754704 }, { "content": " fn test_sym() {\n\n test_parse(\"lambda\", sym!(\"lambda\"));\n\n test_parse(\"list->vector\", sym!(\"list->vector\"));\n\n test_parse(\"->vector\", sym!(\"->vector\"));\n\n test_parse(\"+\", sym!(\"+\"));\n\n }\n\n\n\n #[test]\n\n fn test_list() {\n\n test_parse(\"()\", list!());\n\n test_parse(\"(a)\", list!(sym!(\"a\")));\n\n test_parse(\"(a b)\", list!(sym!(\"a\"), sym!(\"b\")));\n\n test_parse(\"(a . b)\", cons(sym!(\"a\"), sym!(\"b\")));\n\n test_parse(\"(a; comment!\\nb)\", list!(sym!(\"a\"), sym!(\"b\")));\n\n }\n\n\n\n #[test]\n\n fn test_simple_datum() {\n\n test_parse(\"#t\", Datum::Bool(true));\n\n test_parse(\"#f\", Datum::Bool(false));\n", "file_path": "src/parser.rs", "rank": 78, "score": 3.699075484286616 }, { "content": "#![crate_name = \"r6\"]\n\n\n\n//! r6.rs is an attempt to implement R6RS Scheme in Rust language\n\n\n\n#![feature(plugin)]\n\n#![feature(slicing_syntax)]\n\n#![feature(box_syntax)]\n\n//TODO: Allow unstable items until Rust hits 1.0\n\n#![allow(unstable)]\n\n\n\n#[plugin]\n\nextern crate phf_mac;\n\nextern crate phf;\n\nextern crate unicode;\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n\nmacro_rules! list{\n\n ($($x:expr),*) => (\n", "file_path": "src/lib.rs", "rank": 79, "score": 3.444791341231151 }, { "content": " test_parse(r##\"#\\f\"##, Datum::Char('f'));\n\n test_parse(r##\"#\\x3f\"##, Datum::Char('\\x3f'));\n\n test_parse(r##\"#\\space\"##, Datum::Char(' '));\n\n test_parse(r##\"#\\nul\"##, Datum::Char('\\0'));\n\n }\n\n\n\n #[test]\n\n fn test_numeric() {\n\n test_parse(\"2\", Datum::Num(2));\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 80, "score": 3.3965830844502864 }, { "content": " let expected = Ok(vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(3))),\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Call(2),\n\n Inst::Return\n\n ]);\n\n let code = compiler.compile(&list![sym!(\"+\"), num!(3), list![sym!(\"+\"), num!(1), num!(2)]]);\n\n assert_eq!(expected, code);\n\n }\n\n\n\n #[test]\n\n fn test_lambda() {\n\n let env = libbase();\n\n let mut compiler = Compiler::new(&env);\n\n\n\n let f = vec![\n", "file_path": "src/compiler.rs", "rank": 81, "score": 3.395919591962669 }, { "content": " ];\n\n let code = vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(f), 0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::Call(1),\n\n Inst::Return\n\n ];\n\n\n\n let mut runtime = Runtime::new(code);\n\n assert_eq!(runtime.run(), Datum::Num(3));\n\n }\n\n\n\n #[test]\n\n fn test_closure() {\n\n let f = vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::UpValue(0, 0)),\n\n Inst::PushArg(MemRef::Arg(0)),\n\n Inst::Call(2),\n\n Inst::Return\n", "file_path": "src/runtime.rs", "rank": 82, "score": 3.382690431071222 }, { "content": " Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Arg(0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Return\n\n ];\n\n let expected = Ok(vec![\n\n Inst::PushArg(MemRef::Closure(Rc::new(f), 0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::Call(1),\n\n Inst::Return\n\n ]);\n\n let code = compiler.compile(&list![\n\n list![sym!(\"lambda\"), list![sym!(\"x\")],\n\n list![sym!(\"+\"), sym!(\"x\"), num!(2)]],\n\n num!(1)]);\n\n assert_eq!(expected, code);\n\n }\n\n\n\n #[test]\n", "file_path": "src/compiler.rs", "rank": 83, "score": 3.2581900452501324 }, { "content": " Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Call(2),\n\n Inst::Return\n\n ];\n\n\n\n let mut runtime = Runtime::new(code);\n\n assert_eq!(runtime.run(), Datum::Num(6));\n\n }\n\n\n\n #[test]\n\n fn test_lambda() {\n\n let f = vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Arg(0)),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Return\n", "file_path": "src/runtime.rs", "rank": 84, "score": 3.007080630369584 }, { "content": " self.pop_call_stack();\n\n self.arg_stack.truncate(top - n - 1);\n\n self.push_stack(res);\n\n self.frame.pc += 1;\n\n true\n\n }, \n\n Datum::Ext(RuntimeData::Closure(closure)) => {\n\n self.push_call_stack(n, closure);\n\n true\n\n },\n\n _ => {\n\n panic!(\"Not callable\")\n\n }\n\n }\n\n },\n\n Inst::PushArg(ptr) => {\n\n let val = self.fetch_mem(ptr);\n\n self.arg_stack.push(val);\n\n self.frame.pc += 1;\n\n true\n", "file_path": "src/runtime.rs", "rank": 85, "score": 2.982071903864799 }, { "content": "\n\n #[test]\n\n fn test_runtime() {\n\n let code = vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(1))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(2))),\n\n Inst::Call(2),\n\n Inst::Return\n\n ];\n\n\n\n let mut runtime = Runtime::new(code);\n\n assert_eq!(runtime.run(), Datum::Num(3));\n\n }\n\n\n\n #[test]\n\n fn test_nested_call() {\n\n let code = vec![\n\n Inst::PushArg(MemRef::Const(Datum::Ext(RuntimeData::PrimFunc(\"+\", Rc::new(PRIM_ADD))))),\n\n Inst::PushArg(MemRef::Const(Datum::Num(3))),\n", "file_path": "src/runtime.rs", "rank": 86, "score": 2.4186220660484072 }, { "content": " Inst::Call(1),\n\n Inst::Return\n\n ]);\n\n\n\n // ((\n\n // (lambda (x) # = g\n\n // (lambda (y) (+ x y)) # = f\n\n // ) 2) 3)\n\n let code = compiler.compile(&list![\n\n list![\n\n list![sym!(\"lambda\"), list![sym!(\"x\")],\n\n list![sym!(\"lambda\"), list![sym!(\"y\")],\n\n list![sym!(\"+\"), sym!(\"x\"), sym!(\"y\")]]],\n\n num!(2)],\n\n num!(3)\n\n ]);\n\n assert_eq!(expected, code)\n\n }\n\n}\n", "file_path": "src/compiler.rs", "rank": 87, "score": 2.046774357273938 }, { "content": " compare_fmt(\"(a b)\", list!(sym!(\"a\"), sym!(\"b\")));\n\n compare_fmt(\"(a . b)\", cons(sym!(\"a\"), sym!(\"b\")));\n\n }\n\n\n\n #[test]\n\n fn test_iter() {\n\n let list: Datum<()> = Datum::Cons(\n\n Rc::new(RefCell::new(Datum::Num(1))),\n\n Rc::new(RefCell::new(Datum::Cons(\n\n Rc::new(RefCell::new(Datum::Num(2))),\n\n Rc::new(RefCell::new(Datum::Nil))\n\n )\n\n )));\n\n\n\n assert_eq!(Ok(vec![Datum::Num(1), Datum::Num(2)]), list.iter().collect());\n\n }\n\n}\n", "file_path": "src/datum.rs", "rank": 88, "score": 1.462280638446118 } ]